insights: extend insight series API for timeout alerts (#43452)

This commit is contained in:
coury-clark 2022-11-09 10:56:02 -07:00 committed by GitHub
parent be11d1d632
commit 587df140f8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 386 additions and 11 deletions

View File

@ -87,6 +87,7 @@ type InsightStatusResolver interface {
FailedJobs(context.Context) (int32, error)
BackfillQueuedAt(context.Context) *gqlutil.DateTime
IsLoadingData(context.Context) (*bool, error)
IncompleteDatapoints(ctx context.Context) ([]IncompleteDatapointAlert, error)
}
type InsightsPointsArgs struct {
@ -450,3 +451,12 @@ type SearchInsightLivePreviewSeriesResolver interface {
Points(ctx context.Context) ([]InsightsDataPointResolver, error)
Label(ctx context.Context) (string, error)
}
type IncompleteDatapointAlert interface {
ToTimeoutDatapointAlert() (TimeoutDatapointAlert, bool)
Time() gqlutil.DateTime
}
type TimeoutDatapointAlert interface {
Time() gqlutil.DateTime
}

View File

@ -172,6 +172,31 @@ type InsightSeriesStatus {
Indicates if the series is currently loading additional data
"""
isLoadingData: Boolean
"""
Data points that are flagged terminally incomplete for this series.
"""
incompleteDatapoints: [IncompleteDatapointAlert!]!
}
"""
Represents a terminally incomplete data point at a specific time, and optionally for a specific repository.
"""
interface IncompleteDatapointAlert {
"""
The data point that is incomplete.
"""
time: DateTime!
}
"""
Represents a terminally incomplete data point at a specific time, and optionally for a specific repository.
"""
type TimeoutDatapointAlert implements IncompleteDatapointAlert {
"""
The data point that is incomplete.
"""
time: DateTime!
}
extend type Query {

View File

@ -76,3 +76,7 @@ func (e emptyInsightStatusResolver) BackfillQueuedAt(ctx context.Context) *gqlut
current := time.Now().AddDate(-1, 0, 0)
return gqlutil.DateTimeOrNil(&current)
}
func (e emptyInsightStatusResolver) IncompleteDatapoints(ctx context.Context) (resolvers []graphqlbackend.IncompleteDatapointAlert, err error) {
return nil, nil
}

View File

@ -89,15 +89,18 @@ type statusInfo struct {
totalPoints, pendingJobs, completedJobs, failedJobs int32
backfillQueuedAt *time.Time
isLoading bool
incompletedDatapoints []store.IncompleteDatapoint
}
type GetSeriesQueueStatusFunc func(ctx context.Context, seriesID string) (*queryrunner.JobsStatus, error)
type GetSeriesBackfillsFunc func(ctx context.Context, seriesID int) ([]scheduler.SeriesBackfill, error)
type GetIncompleteDatapointsFunc func(ctx context.Context, seriesID int) ([]store.IncompleteDatapoint, error)
type insightStatusResolver struct {
getQueueStatus GetSeriesQueueStatusFunc
getSeriesBackfills GetSeriesBackfillsFunc
statusOnce sync.Once
series types.InsightViewSeries
getQueueStatus GetSeriesQueueStatusFunc
getSeriesBackfills GetSeriesBackfillsFunc
getIncompleteDatapoints GetIncompleteDatapointsFunc
statusOnce sync.Once
series types.InsightViewSeries
status statusInfo
statusErr error
@ -167,14 +170,18 @@ func NewStatusResolver(r *baseInsightResolver, viewSeries types.InsightViewSerie
backfillStore := scheduler.NewBackfillStore(r.insightsDB)
return backfillStore.LoadSeriesBackfills(ctx, seriesID)
}
return newStatusResolver(getStatus, getBackfills, viewSeries)
getIncompletes := func(ctx context.Context, seriesID int) ([]store.IncompleteDatapoint, error) {
return r.timeSeriesStore.LoadAggregatedIncompleteDatapoints(ctx, seriesID)
}
return newStatusResolver(getStatus, getBackfills, getIncompletes, viewSeries)
}
func newStatusResolver(getQueueStatus GetSeriesQueueStatusFunc, getSeriesBackfills GetSeriesBackfillsFunc, series types.InsightViewSeries) *insightStatusResolver {
func newStatusResolver(getQueueStatus GetSeriesQueueStatusFunc, getSeriesBackfills GetSeriesBackfillsFunc, getIncompleteDatapoints GetIncompleteDatapointsFunc, series types.InsightViewSeries) *insightStatusResolver {
return &insightStatusResolver{
getQueueStatus: getQueueStatus,
getSeriesBackfills: getSeriesBackfills,
series: series,
getQueueStatus: getQueueStatus,
getSeriesBackfills: getSeriesBackfills,
series: series,
getIncompleteDatapoints: getIncompleteDatapoints,
}
}
@ -521,3 +528,46 @@ func streamingSeriesJustInTime(ctx context.Context, definition types.InsightView
return resolvers, nil
}
var _ graphqlbackend.TimeoutDatapointAlert = &timeoutDatapointAlertResolver{}
// var _ graphqlbackend.IncompleteDatapointAlert = &timeoutDatapointAlertResolver{}
var _ graphqlbackend.IncompleteDatapointAlert = &IncompleteDataPointAlertResolver{}
type IncompleteDataPointAlertResolver struct {
// resolver any
// graphqlbackend.IncompleteDatapointAlert
point store.IncompleteDatapoint
}
func (i *IncompleteDataPointAlertResolver) ToTimeoutDatapointAlert() (graphqlbackend.TimeoutDatapointAlert, bool) {
if i.point.Reason == store.ReasonTimeout {
return &timeoutDatapointAlertResolver{point: i.point}, true
}
// t, ok := i.resolver.(graphqlbackend.TimeoutDatapointAlert)
// return t, ok
return nil, false
}
func (i *IncompleteDataPointAlertResolver) Time() gqlutil.DateTime {
return gqlutil.DateTime{Time: i.point.Time}
}
type timeoutDatapointAlertResolver struct {
point store.IncompleteDatapoint
baseInsightResolver
}
func (t *timeoutDatapointAlertResolver) Time() gqlutil.DateTime {
return gqlutil.DateTime{Time: t.point.Time}
}
func (i *insightStatusResolver) IncompleteDatapoints(ctx context.Context) (resolvers []graphqlbackend.IncompleteDatapointAlert, err error) {
incomplete, err := i.getIncompleteDatapoints(ctx, i.series.InsightSeriesID)
for _, reason := range incomplete {
resolvers = append(resolvers, &IncompleteDataPointAlertResolver{point: reason})
}
return resolvers, err
}

View File

@ -10,7 +10,6 @@ import (
"github.com/stretchr/testify/require"
"github.com/sourcegraph/log/logtest"
"github.com/sourcegraph/sourcegraph/cmd/frontend/graphqlbackend"
edb "github.com/sourcegraph/sourcegraph/enterprise/internal/database"
"github.com/sourcegraph/sourcegraph/enterprise/internal/insights/background/queryrunner"
@ -110,6 +109,12 @@ func fakeBackfillGetter(backfills []scheduler.SeriesBackfill, err error) GetSeri
return backfills, err
}
}
func fakeIncompleteGetter() GetIncompleteDatapointsFunc {
return func(ctx context.Context, seriesID int) ([]store.IncompleteDatapoint, error) {
return nil, nil
}
}
func TestInsightSeriesStatusResolver_IsLoadingData(t *testing.T) {
type isLoadingTestCase struct {
@ -193,7 +198,7 @@ func TestInsightSeriesStatusResolver_IsLoadingData(t *testing.T) {
t.Run(tc.want.Name(), func(t *testing.T) {
statusGetter := fakeStatusGetter(&tc.queueStatus, tc.queueErr)
backfillGetter := fakeBackfillGetter(tc.backfills, tc.backfillsErr)
statusResolver := newStatusResolver(statusGetter, backfillGetter, tc.series)
statusResolver := newStatusResolver(statusGetter, backfillGetter, fakeIncompleteGetter(), tc.series)
loading, err := statusResolver.IsLoadingData(context.Background())
var loadingResult bool
if loading != nil {
@ -209,3 +214,61 @@ func TestInsightSeriesStatusResolver_IsLoadingData(t *testing.T) {
}
}
func TestInsightStatusResolver_IncompleteDatapoints(t *testing.T) {
// Setup the GraphQL resolver.
ctx := actor.WithInternalActor(context.Background())
now := time.Date(2020, 1, 1, 0, 0, 0, 0, time.UTC).Truncate(time.Microsecond)
logger := logtest.Scoped(t)
insightsDB := edb.NewInsightsDB(dbtest.NewInsightsDB(logger, t))
postgres := database.NewDB(logger, dbtest.NewDB(logger, t))
insightStore := store.NewInsightStore(insightsDB)
tss := store.New(insightsDB, store.NewInsightPermissionStore(postgres))
base := baseInsightResolver{
insightStore: insightStore,
timeSeriesStore: tss,
insightsDB: insightsDB,
postgresDB: postgres,
}
series, err := insightStore.CreateSeries(ctx, types.InsightSeries{
SeriesID: "asdf",
Query: "asdf",
SampleIntervalUnit: string(types.Month),
SampleIntervalValue: 1,
GenerationMethod: types.Search,
})
require.NoError(t, err)
repo := 5
addFakeIncomplete := func(in time.Time) {
err = tss.AddIncompleteDatapoint(ctx, store.AddIncompleteDatapointInput{
SeriesID: series.ID,
RepoID: &repo,
Reason: store.ReasonTimeout,
Time: in,
})
require.NoError(t, err)
}
resolver := NewStatusResolver(&base, types.InsightViewSeries{InsightSeriesID: series.ID})
addFakeIncomplete(now)
addFakeIncomplete(now)
addFakeIncomplete(now.AddDate(0, 0, 1))
stringify := func(input []graphqlbackend.IncompleteDatapointAlert) (res []string) {
for _, in := range input {
res = append(res, in.Time().String())
}
return res
}
t.Run("as timeout", func(t *testing.T) {
got, err := resolver.IncompleteDatapoints(ctx)
require.NoError(t, err)
autogold.Want("as timeout", []string{"2020-01-01 00:00:00 +0000 UTC", "2020-01-02 00:00:00 +0000 UTC"}).Equal(t, stringify(got))
})
}

View File

@ -779,3 +779,51 @@ func scanAll(rows *sql.Rows, scan scanFunc) (err error) {
}
return rows.Err()
}
// LoadAggregatedIncompleteDatapoints returns incomplete datapoints for a given series aggregated for each reason and time. This will effectively
// remove any repository granularity information from the result.
func (s *Store) LoadAggregatedIncompleteDatapoints(ctx context.Context, seriesID int) (results []IncompleteDatapoint, err error) {
if seriesID == 0 {
return nil, errors.New("invalid seriesID")
}
q := "select reason, time from insight_series_incomplete_points where series_id = %s group by reason, time;"
rows, err := s.Query(ctx, sqlf.Sprintf(q, seriesID))
if err != nil {
return nil, err
}
return results, scanAll(rows, func(s scanner) (err error) {
var tmp IncompleteDatapoint
if err = rows.Scan(
&tmp.Reason,
&tmp.Time); err != nil {
return err
}
results = append(results, tmp)
return nil
})
}
type AddIncompleteDatapointInput struct {
SeriesID int
RepoID *int
Reason IncompleteReason
Time time.Time
}
func (s *Store) AddIncompleteDatapoint(ctx context.Context, input AddIncompleteDatapointInput) error {
q := "insert into insight_series_incomplete_points (series_id, repo_id, reason, time) values (%s, %s, %s, %s) on conflict do nothing;"
return s.Exec(ctx, sqlf.Sprintf(q, input.SeriesID, input.RepoID, input.Reason, input.Time))
}
type IncompleteDatapoint struct {
Reason IncompleteReason
RepoId *int
Time time.Time
}
type IncompleteReason string
const (
ReasonTimeout IncompleteReason = "timeout"
)

View File

@ -93,6 +93,15 @@
"Increment": 1,
"CycleOption": "NO"
},
{
"Name": "insight_series_incomplete_points_id_seq",
"TypeName": "integer",
"StartValue": 1,
"MinimumValue": 1,
"MaximumValue": 2147483647,
"Increment": 1,
"CycleOption": "NO"
},
{
"Name": "insight_view_grants_id_seq",
"TypeName": "integer",
@ -1227,6 +1236,109 @@
],
"Triggers": []
},
{
"Name": "insight_series_incomplete_points",
"Comment": "",
"Columns": [
{
"Name": "id",
"Index": 1,
"TypeName": "integer",
"IsNullable": false,
"Default": "nextval('insight_series_incomplete_points_id_seq'::regclass)",
"CharacterMaximumLength": 0,
"IsIdentity": false,
"IdentityGeneration": "",
"IsGenerated": "NEVER",
"GenerationExpression": "",
"Comment": ""
},
{
"Name": "reason",
"Index": 3,
"TypeName": "text",
"IsNullable": false,
"Default": "",
"CharacterMaximumLength": 0,
"IsIdentity": false,
"IdentityGeneration": "",
"IsGenerated": "NEVER",
"GenerationExpression": "",
"Comment": ""
},
{
"Name": "repo_id",
"Index": 5,
"TypeName": "integer",
"IsNullable": true,
"Default": "",
"CharacterMaximumLength": 0,
"IsIdentity": false,
"IdentityGeneration": "",
"IsGenerated": "NEVER",
"GenerationExpression": "",
"Comment": ""
},
{
"Name": "series_id",
"Index": 2,
"TypeName": "integer",
"IsNullable": false,
"Default": "",
"CharacterMaximumLength": 0,
"IsIdentity": false,
"IdentityGeneration": "",
"IsGenerated": "NEVER",
"GenerationExpression": "",
"Comment": ""
},
{
"Name": "time",
"Index": 4,
"TypeName": "timestamp without time zone",
"IsNullable": false,
"Default": "",
"CharacterMaximumLength": 0,
"IsIdentity": false,
"IdentityGeneration": "",
"IsGenerated": "NEVER",
"GenerationExpression": "",
"Comment": ""
}
],
"Indexes": [
{
"Name": "insight_series_incomplete_points_pk",
"IsPrimaryKey": true,
"IsUnique": true,
"IsExclusion": false,
"IsDeferrable": false,
"IndexDefinition": "CREATE UNIQUE INDEX insight_series_incomplete_points_pk ON insight_series_incomplete_points USING btree (id)",
"ConstraintType": "p",
"ConstraintDefinition": "PRIMARY KEY (id)"
},
{
"Name": "insight_series_incomplete_points_unique_idx",
"IsPrimaryKey": false,
"IsUnique": true,
"IsExclusion": false,
"IsDeferrable": false,
"IndexDefinition": "CREATE UNIQUE INDEX insight_series_incomplete_points_unique_idx ON insight_series_incomplete_points USING btree (series_id, reason, \"time\", repo_id)",
"ConstraintType": "",
"ConstraintDefinition": ""
}
],
"Constraints": [
{
"Name": "insight_series_incomplete_points_series_id_fk",
"ConstraintType": "f",
"RefTableName": "insight_series",
"IsDeferrable": false,
"ConstraintDefinition": "FOREIGN KEY (series_id) REFERENCES insight_series(id) ON DELETE CASCADE"
}
],
"Triggers": []
},
{
"Name": "insight_series_recording_times",
"Comment": "",

View File

@ -168,6 +168,7 @@ Referenced by:
TABLE "insight_dirty_queries" CONSTRAINT "insight_dirty_queries_insight_series_id_fkey" FOREIGN KEY (insight_series_id) REFERENCES insight_series(id) ON DELETE CASCADE
TABLE "insight_series_backfill" CONSTRAINT "insight_series_backfill_series_id_fk" FOREIGN KEY (series_id) REFERENCES insight_series(id) ON DELETE CASCADE
TABLE "insight_series_recording_times" CONSTRAINT "insight_series_id_fkey" FOREIGN KEY (insight_series_id) REFERENCES insight_series(id) ON DELETE CASCADE
TABLE "insight_series_incomplete_points" CONSTRAINT "insight_series_incomplete_points_series_id_fk" FOREIGN KEY (series_id) REFERENCES insight_series(id) ON DELETE CASCADE
TABLE "insight_view_series" CONSTRAINT "insight_view_series_insight_series_id_fkey" FOREIGN KEY (insight_series_id) REFERENCES insight_series(id)
```
@ -212,6 +213,23 @@ Referenced by:
```
# Table "public.insight_series_incomplete_points"
```
Column | Type | Collation | Nullable | Default
-----------+-----------------------------+-----------+----------+--------------------------------------------------------------
id | integer | | not null | nextval('insight_series_incomplete_points_id_seq'::regclass)
series_id | integer | | not null |
reason | text | | not null |
time | timestamp without time zone | | not null |
repo_id | integer | | |
Indexes:
"insight_series_incomplete_points_pk" PRIMARY KEY, btree (id)
"insight_series_incomplete_points_unique_idx" UNIQUE, btree (series_id, reason, "time", repo_id)
Foreign-key constraints:
"insight_series_incomplete_points_series_id_fk" FOREIGN KEY (series_id) REFERENCES insight_series(id) ON DELETE CASCADE
```
# Table "public.insight_series_recording_times"
```
Column | Type | Collation | Nullable | Default

View File

@ -0,0 +1 @@
DROP TABLE IF EXISTS insight_series_incomplete_points;

View File

@ -0,0 +1,2 @@
name: incomplete_points
parents: [1666632478]

View File

@ -0,0 +1,14 @@
CREATE TABLE IF NOT EXISTS insight_series_incomplete_points
(
id SERIAL CONSTRAINT insight_series_incomplete_points_pk PRIMARY KEY,
series_id INT NOT NULL,
reason TEXT NOT NULL,
time TIMESTAMP WITHOUT TIME ZONE NOT NULL,
repo_id INT,
CONSTRAINT insight_series_incomplete_points_series_id_fk
FOREIGN KEY (series_id) REFERENCES insight_series (id) ON DELETE CASCADE
);
CREATE UNIQUE INDEX IF NOT EXISTS insight_series_incomplete_points_unique_idx
ON insight_series_incomplete_points (series_id, reason, time, repo_id);

View File

@ -225,6 +225,24 @@ CREATE SEQUENCE insight_series_id_seq
ALTER SEQUENCE insight_series_id_seq OWNED BY insight_series.id;
CREATE TABLE insight_series_incomplete_points (
id integer NOT NULL,
series_id integer NOT NULL,
reason text NOT NULL,
"time" timestamp without time zone NOT NULL,
repo_id integer
);
CREATE SEQUENCE insight_series_incomplete_points_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER SEQUENCE insight_series_incomplete_points_id_seq OWNED BY insight_series_incomplete_points.id;
CREATE TABLE insight_series_recording_times (
insight_series_id integer,
recording_time timestamp with time zone,
@ -521,6 +539,8 @@ ALTER TABLE ONLY insight_series ALTER COLUMN id SET DEFAULT nextval('insight_ser
ALTER TABLE ONLY insight_series_backfill ALTER COLUMN id SET DEFAULT nextval('insight_series_backfill_id_seq'::regclass);
ALTER TABLE ONLY insight_series_incomplete_points ALTER COLUMN id SET DEFAULT nextval('insight_series_incomplete_points_id_seq'::regclass);
ALTER TABLE ONLY insight_view ALTER COLUMN id SET DEFAULT nextval('insight_view_id_seq'::regclass);
ALTER TABLE ONLY insight_view_grants ALTER COLUMN id SET DEFAULT nextval('insight_view_grants_id_seq'::regclass);
@ -556,6 +576,9 @@ ALTER TABLE ONLY insight_dirty_queries
ALTER TABLE ONLY insight_series_backfill
ADD CONSTRAINT insight_series_backfill_pk PRIMARY KEY (id);
ALTER TABLE ONLY insight_series_incomplete_points
ADD CONSTRAINT insight_series_incomplete_points_pk PRIMARY KEY (id);
ALTER TABLE ONLY insight_series
ADD CONSTRAINT insight_series_pkey PRIMARY KEY (id);
@ -607,6 +630,8 @@ CREATE INDEX insight_dirty_queries_insight_series_id_fk_idx ON insight_dirty_que
CREATE INDEX insight_series_deleted_at_idx ON insight_series USING btree (deleted_at);
CREATE UNIQUE INDEX insight_series_incomplete_points_unique_idx ON insight_series_incomplete_points USING btree (series_id, reason, "time", repo_id);
CREATE INDEX insight_series_next_recording_after_idx ON insight_series USING btree (next_recording_after);
CREATE UNIQUE INDEX insight_series_series_id_unique_idx ON insight_series USING btree (series_id);
@ -671,6 +696,9 @@ ALTER TABLE ONLY insight_series_backfill
ALTER TABLE ONLY insight_series_recording_times
ADD CONSTRAINT insight_series_id_fkey FOREIGN KEY (insight_series_id) REFERENCES insight_series(id) ON DELETE CASCADE;
ALTER TABLE ONLY insight_series_incomplete_points
ADD CONSTRAINT insight_series_incomplete_points_series_id_fk FOREIGN KEY (series_id) REFERENCES insight_series(id) ON DELETE CASCADE;
ALTER TABLE ONLY insight_view_grants
ADD CONSTRAINT insight_view_grants_insight_view_id_fk FOREIGN KEY (insight_view_id) REFERENCES insight_view(id) ON DELETE CASCADE;