feat(search): remove smart search logic (#64215)

This change removes the backend smart search logic. After this, searches
with smart search enabled (`sm=1`) will be executed in the default
'precise' mode (`sm=0`). For old searches that use `sm=1` and
`patterntype=standard`, it's possible that they will now return no
results.

Looking at telemetry, only 0.1% of searches on dot com trigger any smart
search rule. So this change should only affect a small percentage of
usage. To mitigate the impact on these rare cases, this PR adds an alert
whenever there are no results and smart search is enabled, suggesting
users switch to keyword search. (This will help in the majority of
cases, since the most frequent smart search rule rewrites literal
queries to use 'AND' between terms).

Closes SPLF-92
This commit is contained in:
Julie Tibshirani 2024-08-01 18:02:35 +03:00 committed by GitHub
parent 50dbc74fba
commit 5c5ed6ca27
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
66 changed files with 234 additions and 2194 deletions

View File

@ -44,7 +44,7 @@ func NewBatchSearchImplementer(ctx context.Context, logger log.Logger, db databa
if err != nil {
var queryErr *client.QueryError
if errors.As(err, &queryErr) {
return NewSearchAlertResolver(search.AlertForQuery(queryErr.Query, queryErr.Err)).wrapSearchImplementer(db), nil
return NewSearchAlertResolver(search.AlertForQuery(queryErr.Err)).wrapSearchImplementer(db), nil
}
return nil, err
}

View File

@ -438,7 +438,7 @@ func canAggregateByCaptureGroup(searchQuery, patternType string) (bool, *notAvai
if err != nil {
return false, &notAvailableReason{reason: cgInvalidQueryMsg, reasonType: types.INVALID_AGGREGATION_MODE_FOR_QUERY}, err
}
if !(searchType == query.SearchTypeRegex || searchType == query.SearchTypeStandard || searchType == query.SearchTypeLucky) {
if !(searchType == query.SearchTypeRegex || searchType == query.SearchTypeStandard) {
return false, &notAvailableReason{reason: cgInvalidQueryMsg, reasonType: types.INVALID_AGGREGATION_MODE_FOR_QUERY}, nil
}

View File

@ -134,7 +134,7 @@ func (h *streamHandler) serveHTTP(r *http.Request, tr trace.Trace, eventWriter *
if err != nil {
var queryErr *client.QueryError
if errors.As(err, &queryErr) {
eventWriter.Alert(search.AlertForQuery(queryErr.Query, queryErr.Err))
eventWriter.Alert(search.AlertForQuery(queryErr.Err))
return nil
} else {
return err
@ -201,7 +201,7 @@ func (h *streamHandler) serveHTTP(r *http.Request, tr trace.Trace, eventWriter *
}()
if err != nil && errors.HasType[*query.UnsupportedError](err) {
eventWriter.Alert(search.AlertForQuery(inputs.OriginalQuery, err))
eventWriter.Alert(search.AlertForQuery(err))
err = nil
}
if alert != nil {

View File

@ -26,7 +26,6 @@ func run(w io.Writer, args []string) error {
version := fs.String("version", "V3", "the version of the search API to use")
patternType := fs.String("pattern_type", "", "optionally specify query.PatternType (regex, literal, ...)")
smartSearch := fs.Bool("smart_search", false, "enable smart search mode instead of precise")
dotCom := fs.Bool("dotcom", false, "enable sourcegraph.com parsing rules")
fs.Parse(args[1:])
@ -37,9 +36,6 @@ func run(w io.Writer, args []string) error {
// Further argument parsing
query := fs.Arg(0)
mode := search.Precise
if *smartSearch {
mode = search.SmartSearch
}
// Sourcegraph infra we need
conf.Mock(&conf.Unified{})

View File

@ -110,12 +110,6 @@ func TestDetectSearchType(t *testing.T) {
"literal",
query.SearchTypeRegex,
},
{
"submit literal with patterntype",
"test patterntype:regexp",
"lucky",
query.SearchTypeRegex,
},
{
"submit structural with structural patterntype",
"[a] patterntype:structural",

View File

@ -179,18 +179,6 @@ func TestReplace_Valid(t *testing.T) {
want: autogold.Expect(BasicQuery("/replace(?:you)/")),
searchType: query.SearchTypeKeyword,
},
{
query: "/replaceme/",
replacement: "replace",
want: autogold.Expect(BasicQuery("/replace/")),
searchType: query.SearchTypeLucky,
},
{
query: "/replace(me)/",
replacement: "you",
want: autogold.Expect(BasicQuery("/replace(?:you)/")),
searchType: query.SearchTypeLucky,
},
{
query: "/b(u)tt(er)/",
replacement: "e",

View File

@ -73,8 +73,6 @@ func (q *QueryDescription) QueryString() string {
return q.Query + " patternType:literal"
case query.SearchTypeStructural:
return q.Query + " patternType:structural"
case query.SearchTypeLucky:
return q.Query
default:
panic("unreachable")
}
@ -82,8 +80,8 @@ func (q *QueryDescription) QueryString() string {
return q.Query
}
// AlertForQuery converts errors in the query to search alerts.
func AlertForQuery(queryString string, err error) *Alert {
// AlertForQuery converts errors query parsing to search alerts.
func AlertForQuery(err error) *Alert {
if errors.HasType[*query.ExpectedOperand](err) {
return &Alert{
PrometheusType: "unsupported_and_or_query",
@ -98,6 +96,14 @@ func AlertForQuery(queryString string, err error) *Alert {
}
}
func AlertForSmartSearch() *Alert {
return &Alert{
PrometheusType: "smart_search_no_results",
Title: "No results matched your search.",
Description: "To find more results, try your search again using the default `patterntype:keyword`.",
}
}
func AlertForTimeout(usedTime time.Duration, suggestTime time.Duration, queryString string, patternType query.SearchType) *Alert {
q, err := query.ParseLiteral(queryString) // Invariant: query is already validated; guard against error anyway.
if err != nil {

View File

@ -218,13 +218,6 @@ func (o *Observer) Done() (*search.Alert, error) {
return o.alert, o.err
}
type alertKind string
const (
smartSearchAdditionalResults alertKind = "smart-search-additional-results"
smartSearchPureResults alertKind = "smart-search-pure-results"
)
func (o *Observer) errorToAlert(ctx context.Context, err error) (*search.Alert, error) {
if err == nil {
return nil, nil
@ -238,7 +231,6 @@ func (o *Observer) errorToAlert(ctx context.Context, err error) (*search.Alert,
var (
mErr *searchrepos.MissingRepoRevsError
oErr *errOverRepoLimit
lErr *ErrLuckyQueries
)
if errors.HasType[authz.ErrStalePermissions](err) {
@ -271,24 +263,6 @@ func (o *Observer) errorToAlert(ctx context.Context, err error) (*search.Alert,
return a, nil
}
if errors.As(err, &lErr) {
title := "Also showing additional results"
description := "We returned all the results for your query. We also added results for similar queries that might interest you."
kind := string(smartSearchAdditionalResults)
if lErr.Type == LuckyAlertPure {
title = "No results for original query. Showing related results instead"
description = "The original query returned no results. Below are results for similar queries that might interest you."
kind = string(smartSearchPureResults)
}
return &search.Alert{
PrometheusType: "smart_search_notice",
Title: title,
Kind: kind,
Description: description,
ProposedQueries: lErr.ProposedQueries,
}, nil
}
if strings.Contains(err.Error(), "Worker_oomed") || strings.Contains(err.Error(), "Worker_exited_abnormally") {
return &search.Alert{
PrometheusType: "structural_search_needs_more_memory",
@ -351,22 +325,6 @@ func (e *errOverRepoLimit) Error() string {
return "Too many matching repositories"
}
type LuckyAlertType int
const (
LuckyAlertAdded LuckyAlertType = iota
LuckyAlertPure
)
type ErrLuckyQueries struct {
Type LuckyAlertType
ProposedQueries []*search.QueryDescription
}
func (e *ErrLuckyQueries) Error() string {
return "Showing results for lucky search"
}
// isContextError returns true if ctx.Err() is not nil or if err
// is an error caused by context cancelation or timeout.
func isContextError(ctx context.Context, err error) bool {

View File

@ -211,7 +211,7 @@ func TestQuoteSuggestions(t *testing.T) {
if err == nil {
t.Fatalf("error returned from query.ParseRegexp(%q) is nil", raw)
}
alert := AlertForQuery(raw, err)
alert := AlertForQuery(err)
if !strings.Contains(alert.Description, "regexp") {
t.Errorf("description is '%s', want it to contain 'regexp'", alert.Description)
}

View File

@ -243,8 +243,6 @@ func SearchTypeFromString(patternType string) (query.SearchType, error) {
return query.SearchTypeRegex, nil
case "structural":
return query.SearchTypeStructural, nil
case "lucky":
return query.SearchTypeLucky, nil
case "codycontext":
return query.SearchTypeCodyContext, nil
case "keyword":
@ -295,8 +293,6 @@ func overrideSearchType(input string, searchType query.SearchType) query.SearchT
searchType = query.SearchTypeLiteral
case "structural":
searchType = query.SearchTypeStructural
case "lucky":
searchType = query.SearchTypeLucky
case "codycontext":
searchType = query.SearchTypeCodyContext
case "keyword":

View File

@ -47,7 +47,6 @@ go_library(
"//internal/search/result",
"//internal/search/searchcontexts",
"//internal/search/searcher",
"//internal/search/smartsearch",
"//internal/search/streaming",
"//internal/search/structural",
"//internal/search/zoekt",

View File

@ -10,6 +10,7 @@ import (
"github.com/sourcegraph/sourcegraph/internal/search"
searchalert "github.com/sourcegraph/sourcegraph/internal/search/alert"
"github.com/sourcegraph/sourcegraph/internal/search/job"
"github.com/sourcegraph/sourcegraph/internal/search/query"
"github.com/sourcegraph/sourcegraph/internal/search/streaming"
"github.com/sourcegraph/sourcegraph/lib/errors"
)
@ -69,6 +70,12 @@ func (j *alertJob) Run(ctx context.Context, clients job.RuntimeClients, stream s
}
}
if countingStream.Count() == 0 &&
j.inputs.SearchMode == search.SmartSearch &&
(j.inputs.PatternType == query.SearchTypeLiteral || j.inputs.PatternType == query.SearchTypeStandard) {
return search.AlertForSmartSearch(), nil
}
return search.MaxPriorityAlert(jobAlert, observerAlert), err
}

View File

@ -23,7 +23,6 @@ import (
"github.com/sourcegraph/sourcegraph/internal/search/result"
"github.com/sourcegraph/sourcegraph/internal/search/searchcontexts"
"github.com/sourcegraph/sourcegraph/internal/search/searcher"
"github.com/sourcegraph/sourcegraph/internal/search/smartsearch"
"github.com/sourcegraph/sourcegraph/internal/search/structural"
"github.com/sourcegraph/sourcegraph/internal/search/zoekt"
"github.com/sourcegraph/sourcegraph/internal/searcher/protocol"
@ -48,13 +47,6 @@ func NewPlanJob(inputs *search.Inputs, plan query.Plan) (job.Job, error) {
return NewBasicJob(inputs, b)
}
if inputs.SearchMode == search.SmartSearch || inputs.PatternType == query.SearchTypeLucky {
if inputs.PatternType == query.SearchTypeCodyContext || inputs.PatternType == query.SearchTypeKeyword {
return nil, errors.Newf("The '%s' patterntype is not compatible with Smart Search", inputs.PatternType)
}
jobTree = smartsearch.NewSmartSearchJob(jobTree, newJob, plan)
}
if inputs.PatternType == query.SearchTypeCodyContext {
newJobTree, err := codycontext.NewSearchJob(plan, inputs, newJob)
if err != nil {

View File

@ -311,56 +311,6 @@ func TestNewPlanJob(t *testing.T) {
(type . text))
REPOSCOMPUTEEXCLUDED
NOOP)))))
`),
}, {
query: `repo:sourcegraph/sourcegraph rev:*refs/heads/*`,
protocol: search.Streaming,
searchType: query.SearchTypeLucky,
want: autogold.Expect(`
(LOG
(ALERT
(features . error decoding features)
(protocol . Streaming)
(onSourcegraphDotCom . true)
(query . )
(originalQuery . )
(patternType . lucky)
(FEELINGLUCKYSEARCH
(TIMEOUT
(timeout . 20s)
(LIMIT
(limit . 10000)
(PARALLEL
(REPOSCOMPUTEEXCLUDED
(repoOpts.repoFilters . [sourcegraph/sourcegraph@*refs/heads/*]))
(REPOSEARCH
(repoOpts.repoFilters . [sourcegraph/sourcegraph@*refs/heads/*])
(repoNamePatterns . ["(?i)sourcegraph/sourcegraph"]))))))))
`),
}, {
query: `repo:sourcegraph/sourcegraph@*refs/heads/*`,
protocol: search.Streaming,
searchType: query.SearchTypeLucky,
want: autogold.Expect(`
(LOG
(ALERT
(features . error decoding features)
(protocol . Streaming)
(onSourcegraphDotCom . true)
(query . )
(originalQuery . )
(patternType . lucky)
(FEELINGLUCKYSEARCH
(TIMEOUT
(timeout . 20s)
(LIMIT
(limit . 10000)
(PARALLEL
(REPOSCOMPUTEEXCLUDED
(repoOpts.repoFilters . [sourcegraph/sourcegraph@*refs/heads/*]))
(REPOSEARCH
(repoOpts.repoFilters . [sourcegraph/sourcegraph@*refs/heads/*])
(repoNamePatterns . ["(?i)sourcegraph/sourcegraph"]))))))))
`),
}, {
query: `foo @bar`,
@ -1294,62 +1244,6 @@ func TestNewPlanJob(t *testing.T) {
}
}
func TestSmartSearchRestrictions(t *testing.T) {
cases := []struct {
query string
protocol search.Protocol
searchType query.SearchType
searchMode search.Mode
wantErr error
}{{
query: `foo context:@userA`,
protocol: search.Streaming,
searchType: query.SearchTypeStandard,
searchMode: search.SmartSearch,
wantErr: nil,
},
{
query: `foo context:@userA`,
protocol: search.Streaming,
searchType: query.SearchTypeKeyword,
searchMode: search.SmartSearch,
wantErr: errors.New("The 'keyword' patterntype is not compatible with Smart Search"),
},
{
query: `foo context:@userA`,
protocol: search.Streaming,
searchType: query.SearchTypeCodyContext,
searchMode: search.SmartSearch,
wantErr: errors.New("The 'codycontext' patterntype is not compatible with Smart Search"),
},
}
for _, tc := range cases {
t.Run(tc.query, func(t *testing.T) {
plan, err := query.Pipeline(query.Init(tc.query, tc.searchType))
require.NoError(t, err)
inputs := &search.Inputs{
UserSettings: &schema.Settings{},
PatternType: tc.searchType,
SearchMode: tc.searchMode,
Protocol: tc.protocol,
Features: &search.Features{},
OnSourcegraphDotCom: true,
}
_, err = NewPlanJob(inputs, plan)
if !errors.Is(err, tc.wantErr) {
if tc.wantErr == nil {
t.Errorf("got unexpected error %v", err)
} else {
t.Errorf("error mismatch: got %v, want %v", err, tc.wantErr)
}
}
})
}
}
func TestToEvaluateJob(t *testing.T) {
test := func(input string, protocol search.Protocol) string {
q, _ := query.ParseLiteral(input)

View File

@ -95,8 +95,6 @@ func (l *LogJob) logEvent(ctx context.Context, clients job.RuntimeClients, durat
types = append(types, "literal")
case l.inputs.PatternType == query.SearchTypeRegex:
types = append(types, "regexp")
case l.inputs.PatternType == query.SearchTypeLucky:
types = append(types, "lucky")
}
}
}

View File

@ -1153,7 +1153,7 @@ func (p *parser) parseAnd() ([]Node, error) {
left, err = p.parseLeaves(Regexp)
case SearchTypeLiteral, SearchTypeStructural:
left, err = p.parseLeaves(Literal)
case SearchTypeStandard, SearchTypeLucky:
case SearchTypeStandard:
left, err = p.parseLeaves(Literal | Standard)
case SearchTypeKeyword:
left, err = p.parseLeaves(Literal | Standard | QuotesAsLiterals)

View File

@ -93,7 +93,7 @@ func SubstituteSearchContexts(lookupQueryString func(contextValue string) (strin
func For(searchType SearchType) step {
var processType step
switch searchType {
case SearchTypeStandard, SearchTypeLucky, SearchTypeCodyContext:
case SearchTypeStandard, SearchTypeCodyContext:
processType = succeeds(substituteConcat(standard))
case SearchTypeLiteral:
processType = succeeds(substituteConcat(space))

View File

@ -1,55 +1,78 @@
[
{
"value": "alsace",
"negated": false,
"labels": [
"Literal",
"Standard"
],
"range": {
"start": {
"line": 0,
"column": 0
"and": [
{
"value": "alsace",
"negated": false,
"labels": [
"Regexp"
],
"range": {
"start": {
"line": 0,
"column": 0
},
"end": {
"line": 0,
"column": 8
}
}
},
"end": {
"line": 0,
"column": 6
}
}
},
{
"value": "bourgogne",
"negated": false,
"labels": [
"Regexp"
],
"range": {
"start": {
"line": 0,
"column": 7
{
"value": "bourgogne",
"negated": false,
"labels": [
"Literal",
"QuotesAsLiterals",
"Standard"
],
"range": {
"start": {
"line": 0,
"column": 9
},
"end": {
"line": 0,
"column": 18
}
}
},
"end": {
"line": 0,
"column": 18
}
}
},
{
"value": "bordeaux",
"negated": false,
"labels": [
"Literal",
"Standard"
],
"range": {
"start": {
"line": 0,
"column": 19
{
"value": "bordeaux",
"negated": false,
"labels": [
"Literal",
"QuotesAsLiterals",
"Standard"
],
"range": {
"start": {
"line": 0,
"column": 19
},
"end": {
"line": 0,
"column": 27
}
}
},
"end": {
"line": 0,
"column": 27
{
"value": "champagne",
"negated": false,
"labels": [
"Regexp"
],
"range": {
"start": {
"line": 0,
"column": 28
},
"end": {
"line": 0,
"column": 39
}
}
}
}
]
}
]

View File

@ -4,23 +4,6 @@
{
"value": "alsace",
"negated": false,
"labels": [
"Regexp"
],
"range": {
"start": {
"line": 0,
"column": 0
},
"end": {
"line": 0,
"column": 8
}
}
},
{
"value": "bourgogne",
"negated": false,
"labels": [
"Literal",
"QuotesAsLiterals",
@ -29,7 +12,24 @@
"range": {
"start": {
"line": 0,
"column": 9
"column": 0
},
"end": {
"line": 0,
"column": 6
}
}
},
{
"value": "bourgogne",
"negated": false,
"labels": [
"Regexp"
],
"range": {
"start": {
"line": 0,
"column": 7
},
"end": {
"line": 0,
@ -55,23 +55,6 @@
"column": 27
}
}
},
{
"value": "champagne",
"negated": false,
"labels": [
"Regexp"
],
"range": {
"start": {
"line": 0,
"column": 28
},
"end": {
"line": 0,
"column": 39
}
}
}
]
}

View File

@ -2,7 +2,7 @@
{
"and": [
{
"value": "alsace",
"value": "a",
"negated": false,
"labels": [
"Literal",
@ -16,29 +16,12 @@
},
"end": {
"line": 0,
"column": 6
"column": 1
}
}
},
{
"value": "bourgogne",
"negated": false,
"labels": [
"Regexp"
],
"range": {
"start": {
"line": 0,
"column": 7
},
"end": {
"line": 0,
"column": 18
}
}
},
{
"value": "bordeaux",
"value": "b",
"negated": false,
"labels": [
"Literal",
@ -48,11 +31,87 @@
"range": {
"start": {
"line": 0,
"column": 19
"column": 2
},
"end": {
"line": 0,
"column": 27
"column": 3
}
}
},
{
"value": "c",
"negated": false,
"labels": [
"Literal",
"QuotesAsLiterals",
"Standard"
],
"range": {
"start": {
"line": 0,
"column": 4
},
"end": {
"line": 0,
"column": 5
}
}
},
{
"value": "d",
"negated": false,
"labels": [
"Literal",
"QuotesAsLiterals",
"Standard"
],
"range": {
"start": {
"line": 0,
"column": 6
},
"end": {
"line": 0,
"column": 7
}
}
},
{
"value": "e",
"negated": false,
"labels": [
"Literal",
"QuotesAsLiterals",
"Standard"
],
"range": {
"start": {
"line": 0,
"column": 8
},
"end": {
"line": 0,
"column": 9
}
}
},
{
"value": "f",
"negated": false,
"labels": [
"Literal",
"QuotesAsLiterals",
"Standard"
],
"range": {
"start": {
"line": 0,
"column": 10
},
"end": {
"line": 0,
"column": 11
}
}
}

View File

@ -12,17 +12,17 @@
"range": {
"start": {
"line": 0,
"column": 0
"column": 1
},
"end": {
"line": 0,
"column": 1
"column": 2
}
}
},
{
"value": "b",
"negated": false,
"negated": true,
"labels": [
"Literal",
"QuotesAsLiterals",
@ -31,17 +31,17 @@
"range": {
"start": {
"line": 0,
"column": 2
"column": 3
},
"end": {
"line": 0,
"column": 3
"column": 8
}
}
},
{
"value": "c",
"negated": false,
"negated": true,
"labels": [
"Literal",
"QuotesAsLiterals",
@ -50,11 +50,11 @@
"range": {
"start": {
"line": 0,
"column": 4
"column": 9
},
"end": {
"line": 0,
"column": 5
"column": 14
}
}
},
@ -69,49 +69,11 @@
"range": {
"start": {
"line": 0,
"column": 6
"column": 15
},
"end": {
"line": 0,
"column": 7
}
}
},
{
"value": "e",
"negated": false,
"labels": [
"Literal",
"QuotesAsLiterals",
"Standard"
],
"range": {
"start": {
"line": 0,
"column": 8
},
"end": {
"line": 0,
"column": 9
}
}
},
{
"value": "f",
"negated": false,
"labels": [
"Literal",
"QuotesAsLiterals",
"Standard"
],
"range": {
"start": {
"line": 0,
"column": 10
},
"end": {
"line": 0,
"column": 11
"column": 16
}
}
}

View File

@ -5,25 +5,7 @@
"value": "a",
"negated": false,
"labels": [
"Literal",
"QuotesAsLiterals",
"Standard"
],
"range": {
"start": {
"line": 0,
"column": 1
},
"end": {
"line": 0,
"column": 2
}
}
},
{
"value": "b",
"negated": true,
"labels": [
"HeuristicHoisted",
"Literal",
"QuotesAsLiterals",
"Standard"
@ -35,14 +17,15 @@
},
"end": {
"line": 0,
"column": 8
"column": 4
}
}
},
{
"value": "c",
"negated": true,
"value": "b",
"negated": false,
"labels": [
"HeuristicHoisted",
"Literal",
"QuotesAsLiterals",
"Standard"
@ -50,11 +33,31 @@
"range": {
"start": {
"line": 0,
"column": 9
"column": 5
},
"end": {
"line": 0,
"column": 14
"column": 6
}
}
},
{
"value": "c",
"negated": false,
"labels": [
"HeuristicHoisted",
"Literal",
"QuotesAsLiterals",
"Standard"
],
"range": {
"start": {
"line": 0,
"column": 7
},
"end": {
"line": 0,
"column": 8
}
}
},
@ -62,6 +65,7 @@
"value": "d",
"negated": false,
"labels": [
"HeuristicHoisted",
"Literal",
"QuotesAsLiterals",
"Standard"
@ -69,11 +73,11 @@
"range": {
"start": {
"line": 0,
"column": 15
"column": 16
},
"end": {
"line": 0,
"column": 16
"column": 17
}
}
}

View File

@ -227,10 +227,6 @@ func TestConcat(t *testing.T) {
autogold.ExpectFile(t, autogold.Raw(test(`alsace /bourgogne/ bordeaux`, SearchTypeStandard)))
})
t.Run("", func(t *testing.T) {
autogold.ExpectFile(t, autogold.Raw(test(`alsace /bourgogne/ bordeaux`, SearchTypeLucky)))
})
t.Run("", func(t *testing.T) {
autogold.ExpectFile(t, autogold.Raw(test(`/alsace/ bourgogne bordeaux /champagne/`, SearchTypeKeyword)))
})

View File

@ -39,7 +39,6 @@ const (
SearchTypeRegex SearchType = iota
SearchTypeLiteral
SearchTypeStructural
SearchTypeLucky
SearchTypeStandard
SearchTypeCodyContext
SearchTypeKeyword
@ -55,8 +54,6 @@ func (s SearchType) String() string {
return "literal"
case SearchTypeStructural:
return "structural"
case SearchTypeLucky:
return "lucky"
case SearchTypeCodyContext:
return "codycontext"
case SearchTypeKeyword:

View File

@ -1,53 +0,0 @@
load("//dev:go_defs.bzl", "go_test")
load("@io_bazel_rules_go//go:def.bzl", "go_library")
go_library(
name = "smartsearch",
srcs = [
"generator.go",
"rules.go",
"smart_search_job.go",
],
importpath = "github.com/sourcegraph/sourcegraph/internal/search/smartsearch",
tags = [TAG_PLATFORM_SEARCH],
visibility = ["//:__subpackages__"],
deps = [
"//internal/search",
"//internal/search/alert",
"//internal/search/job",
"//internal/search/limits",
"//internal/search/query",
"//internal/search/repos",
"//internal/search/streaming",
"//lib/codeintel/languages",
"//lib/errors",
"@com_github_grafana_regexp//:regexp",
"@io_opentelemetry_go_otel//attribute",
"@org_gonum_v1_gonum//stat/combin",
],
)
go_test(
name = "smartsearch_test",
timeout = "short",
srcs = [
"generator_test.go",
"rules_test.go",
"smart_search_job_test.go",
],
data = glob(["testdata/**"]),
embed = [":smartsearch"],
tags = [TAG_PLATFORM_SEARCH],
deps = [
"//internal/search",
"//internal/search/alert",
"//internal/search/job",
"//internal/search/job/mockjob",
"//internal/search/limits",
"//internal/search/query",
"//internal/search/result",
"//internal/search/streaming",
"@com_github_hexops_autogold_v2//:autogold",
"@com_github_stretchr_testify//require",
],
)

View File

@ -1,219 +0,0 @@
package smartsearch
import (
"strings"
"gonum.org/v1/gonum/stat/combin"
"github.com/sourcegraph/sourcegraph/internal/search/query"
)
// next is the continuation for the query generator.
type next func() (*autoQuery, next)
type cg = combin.CombinationGenerator
type PHASE int
const (
ONE PHASE = iota + 1
TWO
THREE
)
// NewComboGenerator returns a generator for queries produced by a combination
// of rules on a seed query. The generator has a strategy over two kinds of rule
// sets: narrowing and widening rules. You can read more below, but if you don't
// care about this and just want to apply rules sequentially, simply pass in
// only `widen` rules and pass in an empty `narrow` rule set. This will mean
// your queries are just generated by successively applying rules in order of
// the `widen` rule set. To get more sophisticated generation behavior, read on.
//
// This generator understands two kinds of rules:
//
// - narrowing rules (roughly, rules that we expect make a query more specific, and reduces the result set size)
// - widening rules (roughly, rules that we expect make a query more general, and increases the result set size).
//
// A concrete example of a narrowing rule might be: `go parse` -> `lang:go
// parse`. This since we restrict the subset of files to search for `parse` to
// Go files only.
//
// A concrete example of a widening rule might be: `a b` -> `a OR b`. This since
// the `OR` expression is more general and will typically find more results than
// the string `a b`.
//
// The way the generator applies narrowing and widening rules has three phases,
// executed in order. The phases work like this:
//
// PHASE ONE: The generator strategy tries to first apply _all narrowing_ rules,
// and then successively reduces the number of rules that it attempts to apply
// by one. This strategy is useful when we try the most aggressive
// interpretation of a query subject to rules first, and gradually loosen the
// number of rules and interpretation. Roughly, PHASE ONE can be thought of as
// trying to maximize applying "for all" rules on the narrow rule set.
//
// PHASE TWO: The generator performs PHASE ONE generation, generating
// combinations of narrow rules, and then additionally _adds_ the first widening
// rule to each narrowing combination. It continues iterating along the list of
// widening rules, appending them to each narrowing combination until the
// iteration of widening rules is exhausted. Roughly, PHASE TWO can be thought
// of as trying to maximize applying "for all" rules in the narrow rule set
// while widening them by applying, in order, "there exists" rules in the widen
// rule set.
//
// PHASE THREE: The generator only applies widening rules in order without any
// narrowing rules. Roughly, PHASE THREE can be thought of as an ordered "there
// exists" application over widen rules.
//
// To avoid spending time on generator invalid combinations, the generator
// prunes the initial rule set to only those rules that do successively apply
// individually to the seed query.
func NewGenerator(seed query.Basic, narrow, widen []rule) next {
narrow = pruneRules(seed, narrow)
widen = pruneRules(seed, widen)
num := len(narrow)
// the iterator state `n` stores:
// - phase, the current generation phase based on progress
// - k, the size of the selection in the narrow set to apply
// - cg, an iterator producing the next sequence of rules for the current value of `k`.
// - w, the index of the widen rule to apply (-1 if empty)
var n func(phase PHASE, k int, c *cg, w int) next
n = func(phase PHASE, k int, c *cg, w int) next {
var transform []transform
var descriptions []string
var generated *query.Basic
narrowing_exhausted := k == 0
widening_active := w != -1
widening_exhausted := widening_active && w == len(widen)
switch phase {
case THREE:
if widening_exhausted {
// Base case: we exhausted the set of narrow
// rules (if any) and we've attempted every
// widen rule with the sets of narrow rules.
return nil
}
transform = append(transform, widen[w].transform...)
descriptions = append(descriptions, widen[w].description)
w += 1 // advance to next widening rule.
case TWO:
if widening_exhausted {
// Start phase THREE: apply only widening rules.
return n(THREE, 0, nil, 0)
}
if narrowing_exhausted && !widening_exhausted {
// Continue widening: We've exhausted the sets of narrow
// rules for the current widen rule, but we're not done
// yet: there are still more widen rules to try. So
// increment w by 1.
c = combin.NewCombinationGenerator(num, num)
w += 1 // advance to next widening rule.
return n(phase, num, c, w)
}
if !c.Next() {
// Reduce narrow set size.
k -= 1
c = combin.NewCombinationGenerator(num, k)
return n(phase, k, c, w)
}
for _, idx := range c.Combination(nil) {
transform = append(transform, narrow[idx].transform...)
descriptions = append(descriptions, narrow[idx].description)
}
// Compose narrow rules with a widen rule.
transform = append(transform, widen[w].transform...)
descriptions = append(descriptions, widen[w].description)
case ONE:
if narrowing_exhausted && !widening_active {
// Start phase TWO: apply widening with
// narrowing rules. We've exhausted the sets of
// narrow rules, but have not attempted to
// compose them with any widen rules. Compose
// them with widen rules by initializing w to 0.
cg := combin.NewCombinationGenerator(num, num)
return n(TWO, num, cg, 0)
}
if !c.Next() {
// Reduce narrow set size.
k -= 1
c = combin.NewCombinationGenerator(num, k)
return n(phase, k, c, w)
}
for _, idx := range c.Combination(nil) {
transform = append(transform, narrow[idx].transform...)
descriptions = append(descriptions, narrow[idx].description)
}
}
generated = applyTransformation(seed, transform)
if generated == nil {
// Rule does not apply, go to next rule.
return n(phase, k, c, w)
} else if err := query.ValidatePlan([]query.Basic{*generated}); err != nil {
// Generated query is not valid, go to next rule.
return n(phase, k, c, w)
}
q := autoQuery{
description: strings.Join(descriptions, " ⚬ "),
query: *generated,
}
return func() (*autoQuery, next) {
return &q, n(phase, k, c, w)
}
}
if len(narrow) == 0 {
return n(THREE, 0, nil, 0)
}
cg := combin.NewCombinationGenerator(num, num)
return n(ONE, num, cg, -1)
}
// pruneRules produces a minimum set of rules that apply successfully on the seed query.
func pruneRules(seed query.Basic, rules []rule) []rule {
types, _ := seed.IncludeExcludeValues(query.FieldType)
for _, t := range types {
// Running additional diff searches is expensive, we clamp this
// until things improve.
if t == "diff" {
return []rule{}
}
}
applies := make([]rule, 0, len(rules))
for _, r := range rules {
g := applyTransformation(seed, r.transform)
if g == nil {
continue
}
applies = append(applies, r)
}
return applies
}
// applyTransformation applies a transformation on `b`. If any function does not apply, it returns nil.
func applyTransformation(b query.Basic, transform []transform) *query.Basic {
for _, apply := range transform {
res := apply(b)
if res == nil {
return nil
}
b = *res
}
return &b
}

View File

@ -1,87 +0,0 @@
package smartsearch
import (
"encoding/json"
"testing"
"github.com/hexops/autogold/v2"
"github.com/sourcegraph/sourcegraph/internal/search/query"
)
type want struct {
Description string
Input string
Query string
}
func TestNewGenerator(t *testing.T) {
test := func(input string, rulesNarrow, rulesWiden []rule) string {
q, _ := query.ParseStandard(input)
b, _ := query.ToBasicQuery(q)
g := NewGenerator(b, rulesNarrow, rulesWiden)
result, _ := json.MarshalIndent(generateAll(g, input), "", " ")
return string(result)
}
cases := [][2][]rule{
{rulesNarrow, rulesWiden},
{rulesNarrow, nil},
{nil, rulesWiden},
}
for _, c := range cases {
t.Run("rule application", func(t *testing.T) {
autogold.ExpectFile(t, autogold.Raw(test(`go commit yikes derp`, c[0], c[1])))
})
}
}
func TestSkippedRules(t *testing.T) {
test := func(input string) string {
q, _ := query.ParseStandard(input)
b, _ := query.ToBasicQuery(q)
g := NewGenerator(b, rulesNarrow, rulesWiden)
result, _ := json.MarshalIndent(generateAll(g, input), "", " ")
return string(result)
}
c := `type:diff foo bar`
t.Run("do not apply rules for type_diff", func(t *testing.T) {
autogold.ExpectFile(t, autogold.Raw(test(c)))
})
}
func TestSkipInvalidQueries(t *testing.T) {
test := func(input string) []want {
q, _ := query.ParseStandard(input)
b, _ := query.ToBasicQuery(q)
g := NewGenerator(b, rulesNarrow, rulesWiden)
return generateAll(g, input)
}
// The "expand URLs to filters" rule can produce a repo filter with
// an invalid regex, like `repo:github.com/org/repo(`
c := `github.com/org/repo(/tree/rev)`
got := test(c)
if len(got) != 0 {
t.Errorf("expected no queries to be generated")
}
}
func generateAll(g next, input string) []want {
var autoQ *autoQuery
generated := []want{}
for g != nil {
autoQ, g = g()
generated = append(
generated,
want{
Description: autoQ.description,
Input: input,
Query: query.StringHuman(autoQ.query.ToParseTree()),
})
}
return generated
}

View File

@ -1,708 +0,0 @@
package smartsearch
import (
"fmt"
"net/url"
"regexp/syntax" //nolint:depguard // using the grafana fork of regexp clashes with zoekt, which uses the std regexp/syntax.
"strings"
"github.com/grafana/regexp"
"github.com/sourcegraph/sourcegraph/internal/search/query"
"github.com/sourcegraph/sourcegraph/lib/codeintel/languages"
)
// rule represents a transformation function on a Basic query. Transformation
// cannot fail: either they apply in sequence and produce a valid, non-nil,
// Basic query, or they do not apply, in which case they return nil. See the
// `unquotePatterns` rule for an example.
type rule struct {
description string
transform []transform
}
type transform func(query.Basic) *query.Basic
var rulesNarrow = []rule{
{
description: "unquote patterns",
transform: []transform{unquotePatterns},
},
{
description: "apply search type for pattern",
transform: []transform{typePatterns},
},
{
description: "apply language filter for pattern",
transform: []transform{langPatterns},
},
{
description: "apply symbol select for pattern",
transform: []transform{symbolPatterns},
},
{
description: "expand URL to filters",
transform: []transform{patternsToCodeHostFilters},
},
{
description: "rewrite repo URLs",
transform: []transform{rewriteRepoFilter},
},
}
var rulesWiden = []rule{
{
description: "patterns as regular expressions",
transform: []transform{regexpPatterns},
},
{
description: "AND patterns together",
transform: []transform{unorderedPatterns},
},
}
// unquotePatterns is a rule that unquotes all patterns in the input query (it
// removes quotes, and honors escape sequences inside quoted values).
func unquotePatterns(b query.Basic) *query.Basic {
// Go back all the way to the raw tree representation :-). We just parse
// the string as regex, since parsing with regex annotates quoted
// patterns.
rawParseTree, err := query.Parse(query.StringHuman(b.ToParseTree()), query.SearchTypeRegex)
if err != nil {
return nil
}
changed := false // track whether we've successfully changed any pattern, which means this rule applies.
newParseTree := query.MapPattern(rawParseTree, func(value string, negated bool, annotation query.Annotation) query.Node {
if annotation.Labels.IsSet(query.Quoted) && !annotation.Labels.IsSet(query.IsContent) {
changed = true
annotation.Labels.Unset(query.Quoted)
annotation.Labels.Set(query.Literal)
return query.Pattern{
Value: value,
Negated: negated,
Annotation: annotation,
}
}
return query.Pattern{
Value: value,
Negated: negated,
Annotation: annotation,
}
})
if !changed {
// No unquoting happened, so we don't run the search.
return nil
}
newNodes, err := query.Sequence(query.For(query.SearchTypeStandard))(newParseTree)
if err != nil {
return nil
}
newBasic, err := query.ToBasicQuery(newNodes)
if err != nil {
return nil
}
return &newBasic
}
// regexpPatterns converts literal patterns into regular expression patterns.
// The conversion is a heuristic and happens based on whether the pattern has
// indicative regular expression metasyntax. It would be overly aggressive to
// convert patterns containing _any_ potential metasyntax, since a pattern like
// my.config.yaml contains two `.` (match any character in regexp).
func regexpPatterns(b query.Basic) *query.Basic {
rawParseTree, err := query.Parse(query.StringHuman(b.ToParseTree()), query.SearchTypeStandard)
if err != nil {
return nil
}
// we decide to interpret patterns as regular expressions if the number of
// significant metasyntax operators exceed this threshold
METASYNTAX_THRESHOLD := 2
// countMetaSyntax counts the number of significant regular expression
// operators in string when it is interpreted as a regular expression. A
// rough map of operators to syntax can be found here:
// https://sourcegraph.com/github.com/golang/go@bf5898ef53d1693aa572da0da746c05e9a6f15c5/-/blob/src/regexp/syntax/regexp.go?L116-244
var countMetaSyntax func([]*syntax.Regexp) int
countMetaSyntax = func(res []*syntax.Regexp) int {
count := 0
for _, r := range res {
switch r.Op {
case
// operators that are weighted 0 on their own
syntax.OpAnyCharNotNL,
syntax.OpAnyChar,
syntax.OpNoMatch,
syntax.OpEmptyMatch,
syntax.OpLiteral,
syntax.OpConcat:
count += countMetaSyntax(r.Sub)
case
// operators that are weighted 1 on their own
syntax.OpCharClass,
syntax.OpBeginLine,
syntax.OpEndLine,
syntax.OpBeginText,
syntax.OpEndText,
syntax.OpWordBoundary,
syntax.OpNoWordBoundary,
syntax.OpAlternate:
count += countMetaSyntax(r.Sub) + 1
case
// quantifiers *, +, ?, {...} on metasyntax like
// `.` or `(...)` are weighted 2. If the
// quantifier applies to other syntax like
// literals (not metasyntax) it's weighted 1.
syntax.OpStar,
syntax.OpPlus,
syntax.OpQuest,
syntax.OpRepeat:
switch r.Sub[0].Op {
case
syntax.OpAnyChar,
syntax.OpAnyCharNotNL,
syntax.OpCapture:
count += countMetaSyntax(r.Sub) + 2
default:
count += countMetaSyntax(r.Sub) + 1
}
case
// capture groups over an alternate like (a|b)
// are weighted one. All other capture groups
// are weighted zero on their own because parens
// are very common in code.
syntax.OpCapture:
switch r.Sub[0].Op {
case syntax.OpAlternate:
count += countMetaSyntax(r.Sub) + 1
default:
count += countMetaSyntax(r.Sub)
}
}
}
return count
}
changed := false
newParseTree := query.MapPattern(rawParseTree, func(value string, negated bool, annotation query.Annotation) query.Node {
if annotation.Labels.IsSet(query.Regexp) {
return query.Pattern{
Value: value,
Negated: negated,
Annotation: annotation,
}
}
re, err := syntax.Parse(value, syntax.ClassNL|syntax.PerlX|syntax.UnicodeGroups)
if err != nil {
return query.Pattern{
Value: value,
Negated: negated,
Annotation: annotation,
}
}
count := countMetaSyntax([]*syntax.Regexp{re})
if count < METASYNTAX_THRESHOLD {
return query.Pattern{
Value: value,
Negated: negated,
Annotation: annotation,
}
}
changed = true
annotation.Labels.Unset(query.Literal)
annotation.Labels.Set(query.Regexp)
return query.Pattern{
Value: value,
Negated: negated,
Annotation: annotation,
}
})
if !changed {
return nil
}
newNodes, err := query.Sequence(query.For(query.SearchTypeStandard))(newParseTree)
if err != nil {
return nil
}
newBasic, err := query.ToBasicQuery(newNodes)
if err != nil {
return nil
}
return &newBasic
}
// UnorderedPatterns generates a query that interprets all recognized patterns
// as unordered terms (`and`-ed terms). The implementation detail is that we
// simply map all `concat` nodes (after a raw parse) to `and` nodes. This works
// because parsing maintains the invariant that `concat` nodes only ever have
// pattern children.
func unorderedPatterns(b query.Basic) *query.Basic {
rawParseTree, err := query.Parse(query.StringHuman(b.ToParseTree()), query.SearchTypeStandard)
if err != nil {
return nil
}
newParseTree, changed := mapConcat(rawParseTree)
if !changed {
return nil
}
newNodes, err := query.Sequence(query.For(query.SearchTypeStandard))(newParseTree)
if err != nil {
return nil
}
newBasic, err := query.ToBasicQuery(newNodes)
if err != nil {
return nil
}
return &newBasic
}
func mapConcat(q []query.Node) ([]query.Node, bool) {
mapped := make([]query.Node, 0, len(q))
changed := false
for _, node := range q {
if n, ok := node.(query.Operator); ok {
if n.Kind != query.Concat {
// recurse
operands, newChanged := mapConcat(n.Operands)
mapped = append(mapped, query.Operator{
Kind: n.Kind,
Operands: operands,
})
changed = changed || newChanged
continue
}
// no need to recurse: `concat` nodes only have patterns.
mapped = append(mapped, query.Operator{
Kind: query.And,
Operands: n.Operands,
})
changed = true
continue
}
mapped = append(mapped, node)
}
return mapped, changed
}
var symbolTypes = map[string]string{
"function": "function",
"func": "function",
"module": "module",
"namespace": "namespace",
"package": "package",
"class": "class",
"method": "method",
"property": "property",
"field": "field",
"constructor": "constructor",
"interface": "interface",
"variable": "variable",
"var": "variable",
"constant": "constant",
"const": "constant",
"string": "string",
"number": "number",
"boolean": "boolean",
"bool": "boolean",
"array": "array",
"object": "object",
"key": "key",
"enum": "enum-member",
"struct": "struct",
"type-parameter": "type-parameter",
}
func symbolPatterns(b query.Basic) *query.Basic {
rawPatternTree, err := query.Parse(query.StringHuman([]query.Node{b.Pattern}), query.SearchTypeStandard)
if err != nil {
return nil
}
changed := false
var symbolType string // store the first pattern that matches a recognized symbol type.
isNegated := false
newPattern := query.MapPattern(rawPatternTree, func(value string, negated bool, annotation query.Annotation) query.Node {
symbolAlias, ok := symbolTypes[value]
if !ok || changed {
return query.Pattern{
Value: value,
Negated: negated,
Annotation: annotation,
}
}
changed = true
symbolType = symbolAlias
isNegated = negated
// remove this node
return nil
})
if !changed {
return nil
}
selectParam := query.Parameter{
Field: query.FieldSelect,
Value: fmt.Sprintf("symbol.%s", symbolType),
Negated: isNegated,
Annotation: query.Annotation{},
}
symbolParam := query.Parameter{
Field: query.FieldType,
Value: "symbol",
Negated: false,
Annotation: query.Annotation{},
}
var pattern query.Node
if len(newPattern) > 0 {
// Process concat nodes
nodes, err := query.Sequence(query.For(query.SearchTypeStandard))(newPattern)
if err != nil {
return nil
}
pattern = nodes[0] // guaranteed root at first node
}
return &query.Basic{
Parameters: append(b.Parameters, selectParam, symbolParam),
Pattern: pattern,
}
}
type repoFilterReplacement struct {
match *regexp.Regexp
replace string
}
var repoFilterReplacements = []repoFilterReplacement{
{
match: regexp.MustCompile(`^(?:https?:\/\/)github\.com\/([^\/]+)\/([^\/\?#]+)(?:.+)?$`),
replace: "^github.com/$1/$2$",
},
}
func rewriteRepoFilter(b query.Basic) *query.Basic {
newParams := make([]query.Parameter, 0, len(b.Parameters))
anyParamChanged := false
for _, param := range b.Parameters {
if param.Field != "repo" {
newParams = append(newParams, param)
continue
}
changed := false
for _, replacer := range repoFilterReplacements {
if replacer.match.MatchString(param.Value) {
newParams = append(newParams, query.Parameter{
Field: param.Field,
Value: replacer.match.ReplaceAllString(param.Value, replacer.replace),
Negated: param.Negated,
Annotation: param.Annotation,
})
changed = true
anyParamChanged = true
break
}
}
if !changed {
newParams = append(newParams, param)
}
}
if !anyParamChanged {
return nil
}
newQuery := b.MapParameters(newParams)
return &newQuery
}
func langPatterns(b query.Basic) *query.Basic {
rawPatternTree, err := query.Parse(query.StringHuman([]query.Node{b.Pattern}), query.SearchTypeStandard)
if err != nil {
return nil
}
changed := false
var lang string // store the first pattern that matches a recognized language.
isNegated := false
newPattern := query.MapPattern(rawPatternTree, func(value string, negated bool, annotation query.Annotation) query.Node {
langAlias, ok := languages.GetLanguageByNameOrAlias(value)
if !ok || changed {
return query.Pattern{
Value: value,
Negated: negated,
Annotation: annotation,
}
}
changed = true
lang = langAlias
isNegated = negated
// remove this node
return nil
})
if !changed {
return nil
}
langParam := query.Parameter{
Field: query.FieldLang,
Value: lang,
Negated: isNegated,
Annotation: query.Annotation{},
}
var pattern query.Node
if len(newPattern) > 0 {
// Process concat nodes
nodes, err := query.Sequence(query.For(query.SearchTypeStandard))(newPattern)
if err != nil {
return nil
}
pattern = nodes[0] // guaranteed root at first node
}
return &query.Basic{
Parameters: append(b.Parameters, langParam),
Pattern: pattern,
}
}
func typePatterns(b query.Basic) *query.Basic {
rawPatternTree, err := query.Parse(query.StringHuman([]query.Node{b.Pattern}), query.SearchTypeStandard)
if err != nil {
return nil
}
changed := false
var typ string // store the first pattern that matches a recognized `type:`.
newPattern := query.MapPattern(rawPatternTree, func(value string, negated bool, annotation query.Annotation) query.Node {
if changed {
return query.Pattern{
Value: value,
Negated: negated,
Annotation: annotation,
}
}
switch strings.ToLower(value) {
case "symbol", "commit", "diff", "path":
typ = value
changed = true
// remove this node
return nil
}
return query.Pattern{
Value: value,
Negated: negated,
Annotation: annotation,
}
})
if !changed {
return nil
}
typParam := query.Parameter{
Field: query.FieldType,
Value: typ,
Negated: false,
Annotation: query.Annotation{},
}
var pattern query.Node
if len(newPattern) > 0 {
// Process concat nodes
nodes, err := query.Sequence(query.For(query.SearchTypeStandard))(newPattern)
if err != nil {
return nil
}
pattern = nodes[0] // guaranteed root at first node
}
return &query.Basic{
Parameters: append(b.Parameters, typParam),
Pattern: pattern,
}
}
var lookup = map[string]struct{}{
"github.com": {},
"gitlab.com": {},
}
// patternToCodeHostFilters checks if a pattern contains a code host URL and
// extracts the org/repo/branch and path and lifts these to filters, as
// applicable.
func patternToCodeHostFilters(v string, negated bool) *[]query.Node {
if !strings.HasPrefix(v, "https://") {
// normalize v with https:// prefix.
v = "https://" + v
}
u, err := url.Parse(v)
if err != nil {
return nil
}
domain := strings.TrimPrefix(u.Host, "www.")
if _, ok := lookup[domain]; !ok {
return nil
}
var value string
path := strings.Trim(u.Path, "/")
pathElems := strings.Split(path, "/")
if len(pathElems) == 0 {
value = regexp.QuoteMeta(domain)
value = fmt.Sprintf("^%s", value)
return &[]query.Node{
query.Parameter{
Field: query.FieldRepo,
Value: value,
Negated: negated,
Annotation: query.Annotation{},
}}
} else if len(pathElems) == 1 {
value = regexp.QuoteMeta(domain)
value = fmt.Sprintf("^%s/%s", value, strings.Join(pathElems, "/"))
return &[]query.Node{
query.Parameter{
Field: query.FieldRepo,
Value: value,
Negated: negated,
Annotation: query.Annotation{},
}}
} else if len(pathElems) == 2 {
value = regexp.QuoteMeta(domain)
value = fmt.Sprintf("^%s/%s$", value, strings.Join(pathElems, "/"))
return &[]query.Node{
query.Parameter{
Field: query.FieldRepo,
Value: value,
Negated: negated,
Annotation: query.Annotation{},
}}
} else if len(pathElems) == 4 && (pathElems[2] == "tree" || pathElems[2] == "commit") {
repoValue := regexp.QuoteMeta(domain)
repoValue = fmt.Sprintf("^%s/%s$", repoValue, strings.Join(pathElems[:2], "/"))
revision := pathElems[3]
return &[]query.Node{
query.Parameter{
Field: query.FieldRepo,
Value: repoValue,
Negated: negated,
Annotation: query.Annotation{},
},
query.Parameter{
Field: query.FieldRev,
Value: revision,
Negated: negated,
Annotation: query.Annotation{},
},
}
} else if len(pathElems) >= 5 {
repoValue := regexp.QuoteMeta(domain)
repoValue = fmt.Sprintf("^%s/%s$", repoValue, strings.Join(pathElems[:2], "/"))
revision := pathElems[3]
pathValue := strings.Join(pathElems[4:], "/")
pathValue = regexp.QuoteMeta(pathValue)
if pathElems[2] == "blob" {
pathValue = fmt.Sprintf("^%s$", pathValue)
} else if pathElems[2] == "tree" {
pathValue = fmt.Sprintf("^%s", pathValue)
} else {
// We don't know what this is.
return nil
}
return &[]query.Node{
query.Parameter{
Field: query.FieldRepo,
Value: repoValue,
Negated: negated,
Annotation: query.Annotation{},
},
query.Parameter{
Field: query.FieldRev,
Value: revision,
Negated: negated,
Annotation: query.Annotation{},
},
query.Parameter{
Field: query.FieldFile,
Value: pathValue,
Negated: negated,
Annotation: query.Annotation{},
},
}
}
return nil
}
// patternsToCodeHostFilters converts patterns to `repo` or `path` filters if they
// can be interpreted as URIs.
func patternsToCodeHostFilters(b query.Basic) *query.Basic {
rawPatternTree, err := query.Parse(query.StringHuman([]query.Node{b.Pattern}), query.SearchTypeStandard)
if err != nil {
return nil
}
filterParams := []query.Node{}
changed := false
newParseTree := query.MapPattern(rawPatternTree, func(value string, negated bool, annotation query.Annotation) query.Node {
if params := patternToCodeHostFilters(value, negated); params != nil {
changed = true
filterParams = append(filterParams, *params...)
// Collect the param and delete pattern. We're going to
// add those parameters after. We can't map patterns
// in-place because that might create parameters in
// concat nodes.
return nil
}
return query.Pattern{
Value: value,
Negated: negated,
Annotation: annotation,
}
})
if !changed {
return nil
}
newParseTree = query.NewOperator(append(newParseTree, filterParams...), query.And) // Reduce with NewOperator to obtain valid partitioning.
newNodes, err := query.Sequence(query.For(query.SearchTypeStandard))(newParseTree)
if err != nil {
return nil
}
newBasic, err := query.ToBasicQuery(newNodes)
if err != nil {
return nil
}
return &newBasic
}

View File

@ -1,191 +0,0 @@
package smartsearch
import (
"encoding/json"
"testing"
"github.com/hexops/autogold/v2"
"github.com/sourcegraph/sourcegraph/internal/search/query"
)
func apply(input string, transform []transform) string {
type want struct {
Input string
Query string
}
q, _ := query.ParseStandard(input)
b, _ := query.ToBasicQuery(q)
out := applyTransformation(b, transform)
var queryStr string
if out == nil {
queryStr = "DOES NOT APPLY"
} else {
queryStr = query.StringHuman(out.ToParseTree())
}
result := want{Input: input, Query: queryStr}
j, _ := json.MarshalIndent(result, "", " ")
return string(j)
}
func Test_unquotePatterns(t *testing.T) {
rule := []transform{unquotePatterns}
test := func(input string) string {
return apply(input, rule)
}
cases := []string{
`"monitor"`,
`repo:^github\.com/sourcegraph/sourcegraph$ "monitor" "*Monitor"`,
`content:"not quoted"`,
}
for _, c := range cases {
t.Run("unquote patterns", func(t *testing.T) {
autogold.ExpectFile(t, autogold.Raw(test(c)))
})
}
}
func Test_unorderedPatterns(t *testing.T) {
rule := []transform{unorderedPatterns}
test := func(input string) string {
return apply(input, rule)
}
cases := []string{
`context:global parse func`,
}
for _, c := range cases {
t.Run("AND patterns", func(t *testing.T) {
autogold.ExpectFile(t, autogold.Raw(test(c)))
})
}
}
func Test_langPatterns(t *testing.T) {
rule := []transform{langPatterns}
test := func(input string) string {
return apply(input, rule)
}
cases := []string{
`context:global python`,
`context:global parse python`,
}
for _, c := range cases {
t.Run("lang patterns", func(t *testing.T) {
autogold.ExpectFile(t, autogold.Raw(test(c)))
})
}
}
func Test_symbolPatterns(t *testing.T) {
rule := []transform{symbolPatterns}
test := func(input string) string {
return apply(input, rule)
}
cases := []string{
`context:global function`,
`context:global parse function`,
}
for _, c := range cases {
t.Run("symbol patterns", func(t *testing.T) {
autogold.ExpectFile(t, autogold.Raw(test(c)))
})
}
}
func Test_typePatterns(t *testing.T) {
rule := []transform{typePatterns}
test := func(input string) string {
return apply(input, rule)
}
cases := []string{
`context:global fix commit`,
`context:global code monitor commit`,
`context:global code or monitor commit`,
}
for _, c := range cases {
t.Run("type patterns", func(t *testing.T) {
autogold.ExpectFile(t, autogold.Raw(test(c)))
})
}
}
func Test_regexpPatterns(t *testing.T) {
rule := []transform{regexpPatterns}
test := func(input string) string {
return apply(input, rule)
}
cases := []string{
`[a-z]+`,
`(ab)*`,
`c++`,
`my.yaml.conf`,
`(using|struct)`,
`test.get(id)`,
}
for _, c := range cases {
t.Run("regexp patterns", func(t *testing.T) {
autogold.ExpectFile(t, autogold.Raw(test(c)))
})
}
}
func Test_patternsToCodeHostFilters(t *testing.T) {
rule := []transform{patternsToCodeHostFilters}
test := func(input string) string {
return apply(input, rule)
}
cases := []string{
`https://github.com/sourcegraph/sourcegraph`,
`https://github.com/sourcegraph`,
`github.com/sourcegraph`,
`https://github.com/sourcegraph/sourcegraph/blob/main/lib/README.md#L50`,
`https://github.com/sourcegraph/sourcegraph/tree/main/lib`,
`https://github.com/sourcegraph/sourcegraph/tree/2.12`,
`https://github.com/sourcegraph/sourcegraph/commit/abc`,
}
for _, c := range cases {
t.Run("URL patterns", func(t *testing.T) {
autogold.ExpectFile(t, autogold.Raw(test(c)))
})
}
}
func Test_rewriteRepoFilter(t *testing.T) {
rule := []transform{rewriteRepoFilter}
test := func(input string) string {
return apply(input, rule)
}
cases := []string{
`repo:https://github.com/sourcegraph/sourcegraph`,
`repo:http://github.com/sourcegraph/sourcegraph`,
`repo:https://github.com/sourcegraph/sourcegraph/blob/main/lib/README.md#L50`,
`repo:https://github.com/sourcegraph/sourcegraph/tree/main/lib`,
`repo:https://github.com/sourcegraph/sourcegraph/tree/2.12`,
`repo:https://github.com/sourcegraph/sourcegraph/commit/abc`,
}
for _, c := range cases {
t.Run("rewrite repo filter", func(t *testing.T) {
autogold.ExpectFile(t, autogold.Raw(test(c)))
})
}
}

View File

@ -1,241 +0,0 @@
package smartsearch
import (
"context"
"fmt"
searchrepos "github.com/sourcegraph/sourcegraph/internal/search/repos"
"go.opentelemetry.io/otel/attribute"
"github.com/sourcegraph/sourcegraph/internal/search"
alertobserver "github.com/sourcegraph/sourcegraph/internal/search/alert"
"github.com/sourcegraph/sourcegraph/internal/search/job"
"github.com/sourcegraph/sourcegraph/internal/search/limits"
"github.com/sourcegraph/sourcegraph/internal/search/query"
"github.com/sourcegraph/sourcegraph/internal/search/streaming"
"github.com/sourcegraph/sourcegraph/lib/errors"
)
// autoQuery is an automatically generated query with associated data (e.g., description).
type autoQuery struct {
description string
query query.Basic
}
// newJob is a function that converts a query to a job, and one which lucky
// search expects in order to function. This function corresponds to
// `jobutil.NewBasicJob` normally (we can't call it directly for circular
// dependencies), and otherwise abstracts job creation for tests.
type newJob func(query.Basic) (job.Job, error)
// NewSmartSearchJob creates generators for opportunistic search queries
// that apply various rules, transforming the original input plan into various
// queries that alter its interpretation (e.g., search literally for quotes or
// not, attempt to search the pattern as a regexp, and so on). There is no
// random choice when applying rules.
func NewSmartSearchJob(initialJob job.Job, newJob newJob, plan query.Plan) *FeelingLuckySearchJob {
generators := make([]next, 0, len(plan))
for _, b := range plan {
generators = append(generators, NewGenerator(b, rulesNarrow, rulesWiden))
}
newGeneratedJob := func(autoQ *autoQuery) job.Job {
child, err := newJob(autoQ.query)
if err != nil {
return nil
}
notifier := &notifier{autoQuery: autoQ}
return &generatedSearchJob{
Child: child,
NewNotification: notifier.New,
}
}
return &FeelingLuckySearchJob{
initialJob: initialJob,
generators: generators,
newGeneratedJob: newGeneratedJob,
}
}
// FeelingLuckySearchJob represents a lucky search. Note `newGeneratedJob`
// returns a job given an autoQuery. It is a function so that generated queries
// can be composed at runtime (with auto queries that dictate runtime control
// flow) with static inputs (search inputs), while not exposing static inputs.
type FeelingLuckySearchJob struct {
initialJob job.Job
generators []next
newGeneratedJob func(*autoQuery) job.Job
}
// Do not run autogenerated queries if RESULT_THRESHOLD results exist on the original query.
const RESULT_THRESHOLD = limits.DefaultMaxSearchResultsStreaming
func (f *FeelingLuckySearchJob) Run(ctx context.Context, clients job.RuntimeClients, parentStream streaming.Sender) (alert *search.Alert, err error) {
_, ctx, parentStream, finish := job.StartSpan(ctx, parentStream, f)
defer func() { finish(alert, err) }()
// Count stream results to know whether to run generated queries
stream := streaming.NewResultCountingStream(parentStream)
var maxAlerter search.MaxAlerter
var errs errors.MultiError
alert, err = f.initialJob.Run(ctx, clients, stream)
if errForReal := errors.Ignore(err, errors.IsPred(searchrepos.ErrNoResolvedRepos)); errForReal != nil {
return alert, errForReal
}
maxAlerter.Add(alert)
originalResultSetSize := stream.Count()
if originalResultSetSize >= RESULT_THRESHOLD {
return alert, err
}
if originalResultSetSize > 0 {
// TODO(@rvantonder): Only run additional searches if the
// original query strictly returned NO results. This clamp will
// be removed to also add additional results pending
// optimizations: https://github.com/sourcegraph/sourcegraph/issues/43721.
return alert, err
}
var luckyAlertType alertobserver.LuckyAlertType
if originalResultSetSize == 0 {
luckyAlertType = alertobserver.LuckyAlertPure
} else {
luckyAlertType = alertobserver.LuckyAlertAdded
}
generated := &alertobserver.ErrLuckyQueries{Type: luckyAlertType, ProposedQueries: []*search.QueryDescription{}}
var autoQ *autoQuery
for _, next := range f.generators {
for next != nil {
autoQ, next = next()
j := f.newGeneratedJob(autoQ)
if j == nil {
// Generated an invalid job with this query, just continue.
continue
}
alert, err = j.Run(ctx, clients, stream)
if stream.Count()-originalResultSetSize >= RESULT_THRESHOLD {
// We've sent additional results up to the maximum bound. Let's stop here.
var lErr *alertobserver.ErrLuckyQueries
if errors.As(err, &lErr) {
generated.ProposedQueries = append(generated.ProposedQueries, lErr.ProposedQueries...)
}
if len(generated.ProposedQueries) > 0 {
errs = errors.Append(errs, generated)
}
return maxAlerter.Alert, errs
}
var lErr *alertobserver.ErrLuckyQueries
if errors.As(err, &lErr) {
// collected generated queries, we'll add it after this loop is done running.
generated.ProposedQueries = append(generated.ProposedQueries, lErr.ProposedQueries...)
} else {
errs = errors.Append(errs, err)
}
maxAlerter.Add(alert)
}
}
if len(generated.ProposedQueries) > 0 {
errs = errors.Append(errs, generated)
}
return maxAlerter.Alert, errs
}
func (f *FeelingLuckySearchJob) Name() string {
return "FeelingLuckySearchJob"
}
func (f *FeelingLuckySearchJob) Attributes(job.Verbosity) []attribute.KeyValue { return nil }
func (f *FeelingLuckySearchJob) Children() []job.Describer {
return []job.Describer{f.initialJob}
}
func (f *FeelingLuckySearchJob) MapChildren(fn job.MapFunc) job.Job {
cp := *f
cp.initialJob = job.Map(f.initialJob, fn)
return &cp
}
// generatedSearchJob represents a generated search at run time. Note
// `NewNotification` returns the query notifications (encoded as error) given
// the result count of the job. It is a function so that notifications can be
// composed at runtime (with result counts) with static inputs (query string),
// while not exposing static inputs.
type generatedSearchJob struct {
Child job.Job
NewNotification func(count int) error
}
func (g *generatedSearchJob) Run(ctx context.Context, clients job.RuntimeClients, parentStream streaming.Sender) (*search.Alert, error) {
stream := streaming.NewResultCountingStream(parentStream)
alert, err := g.Child.Run(ctx, clients, stream)
resultCount := stream.Count()
if resultCount == 0 {
return nil, nil
}
if ctx.Err() != nil {
notification := g.NewNotification(resultCount)
return alert, errors.Append(err, notification)
}
notification := g.NewNotification(resultCount)
if err != nil {
return alert, errors.Append(err, notification)
}
return alert, notification
}
func (g *generatedSearchJob) Name() string {
return "GeneratedSearchJob"
}
func (g *generatedSearchJob) Children() []job.Describer { return []job.Describer{g.Child} }
func (g *generatedSearchJob) Attributes(job.Verbosity) []attribute.KeyValue { return nil }
func (g *generatedSearchJob) MapChildren(fn job.MapFunc) job.Job {
cp := *g
cp.Child = job.Map(g.Child, fn)
return &cp
}
// notifier stores static values that should not be exposed to runtime concerns.
// notifier exposes a method `New` for constructing notifications that require
// runtime information.
type notifier struct {
*autoQuery
}
func (n *notifier) New(count int) error {
var resultCountString string
if count == limits.DefaultMaxSearchResultsStreaming {
resultCountString = fmt.Sprintf("%d+ results", count)
} else if count == 1 {
resultCountString = "1 result"
} else {
resultCountString = fmt.Sprintf("%d additional results", count)
}
annotations := make(map[search.AnnotationName]string)
annotations[search.ResultCount] = resultCountString
return &alertobserver.ErrLuckyQueries{
ProposedQueries: []*search.QueryDescription{{
Description: n.description,
Annotations: map[search.AnnotationName]string{
search.ResultCount: resultCountString,
},
Query: query.StringHuman(n.query.ToParseTree()),
PatternType: query.SearchTypeLucky,
}},
}
}

View File

@ -1,131 +0,0 @@
package smartsearch
import (
"context"
"strconv"
"testing"
"github.com/hexops/autogold/v2"
"github.com/stretchr/testify/require"
"github.com/sourcegraph/sourcegraph/internal/search"
alertobserver "github.com/sourcegraph/sourcegraph/internal/search/alert"
"github.com/sourcegraph/sourcegraph/internal/search/job"
"github.com/sourcegraph/sourcegraph/internal/search/job/mockjob"
"github.com/sourcegraph/sourcegraph/internal/search/limits"
"github.com/sourcegraph/sourcegraph/internal/search/query"
"github.com/sourcegraph/sourcegraph/internal/search/result"
"github.com/sourcegraph/sourcegraph/internal/search/streaming"
)
func TestNewSmartSearchJob_Run(t *testing.T) {
// Setup: A child job that sends the same result
mockJob := mockjob.NewMockJob()
mockJob.RunFunc.SetDefaultHook(func(ctx context.Context, _ job.RuntimeClients, s streaming.Sender) (*search.Alert, error) {
s.Send(streaming.SearchEvent{
Results: []result.Match{&result.FileMatch{
File: result.File{Path: "haut-medoc"},
}},
})
return nil, nil
})
mockAutoQuery := &autoQuery{description: "mock", query: query.Basic{}}
j := FeelingLuckySearchJob{
initialJob: mockJob,
generators: []next{func() (*autoQuery, next) { return mockAutoQuery, nil }},
newGeneratedJob: func(*autoQuery) job.Job {
return mockJob
},
}
var sent []result.Match
stream := streaming.StreamFunc(func(e streaming.SearchEvent) {
sent = append(sent, e.Results...)
})
t.Run("deduplicate results returned by generated jobs", func(t *testing.T) {
j.Run(context.Background(), job.RuntimeClients{}, stream)
require.Equal(t, 1, len(sent))
})
}
func TestGeneratedSearchJob(t *testing.T) {
mockJob := mockjob.NewMockJob()
setMockJobResultSize := func(n int) {
mockJob.RunFunc.SetDefaultHook(func(ctx context.Context, _ job.RuntimeClients, s streaming.Sender) (*search.Alert, error) {
for i := range n {
select {
case <-ctx.Done():
return nil, ctx.Err()
default:
}
s.Send(streaming.SearchEvent{
Results: []result.Match{&result.FileMatch{
File: result.File{Path: strconv.Itoa(i)},
}},
})
}
return nil, nil
})
}
test := func(resultSize int) string {
setMockJobResultSize(resultSize)
q, _ := query.ParseStandard("test")
mockQuery, _ := query.ToBasicQuery(q)
notifier := &notifier{autoQuery: &autoQuery{description: "test", query: mockQuery}}
j := &generatedSearchJob{
Child: mockJob,
NewNotification: notifier.New,
}
_, err := j.Run(context.Background(), job.RuntimeClients{}, streaming.NewAggregatingStream())
if err == nil {
return ""
}
return err.(*alertobserver.ErrLuckyQueries).ProposedQueries[0].Annotations[search.ResultCount]
}
autogold.Expect(autogold.Raw("")).Equal(t, autogold.Raw(test(0)))
autogold.Expect(autogold.Raw("1 result")).Equal(t, autogold.Raw(test(1)))
autogold.Expect(autogold.Raw("10000+ results")).Equal(t, autogold.Raw(test(limits.DefaultMaxSearchResultsStreaming)))
}
func TestNewSmartSearchJob_ResultCount(t *testing.T) {
// This test ensures the invariant that generated queries do not run if
// at least RESULT_THRESHOLD results are emitted by the initial job. If
// less than RESULT_THRESHOLD results are seen, the logic will run a
// generated query, which always panics.
mockJob := mockjob.NewMockJob()
mockJob.RunFunc.SetDefaultHook(func(ctx context.Context, _ job.RuntimeClients, s streaming.Sender) (*search.Alert, error) {
for i := range RESULT_THRESHOLD {
s.Send(streaming.SearchEvent{
Results: []result.Match{&result.FileMatch{
File: result.File{Path: strconv.Itoa(i)},
}},
})
}
return nil, nil
})
mockAutoQuery := &autoQuery{description: "mock", query: query.Basic{}}
j := FeelingLuckySearchJob{
initialJob: mockJob,
generators: []next{func() (*autoQuery, next) { return mockAutoQuery, nil }},
newGeneratedJob: func(*autoQuery) job.Job {
return mockjob.NewStrictMockJob() // always panic, and should never get run.
},
}
var sent []result.Match
stream := streaming.StreamFunc(func(e streaming.SearchEvent) {
sent = append(sent, e.Results...)
})
t.Run("do not run generated queries over RESULT_THRESHOLD", func(t *testing.T) {
j.Run(context.Background(), job.RuntimeClients{}, stream)
require.Equal(t, RESULT_THRESHOLD, len(sent))
})
}

View File

@ -1,17 +0,0 @@
[
{
"Description": "apply search type for pattern ⚬ apply language filter for pattern",
"Input": "go commit yikes derp",
"Query": "type:commit lang:Go yikes derp"
},
{
"Description": "apply search type for pattern",
"Input": "go commit yikes derp",
"Query": "type:commit go yikes derp"
},
{
"Description": "apply language filter for pattern",
"Input": "go commit yikes derp",
"Query": "lang:Go commit yikes derp"
}
]

View File

@ -1,7 +0,0 @@
[
{
"Description": "AND patterns together",
"Input": "go commit yikes derp",
"Query": "(go AND commit AND yikes AND derp)"
}
]

View File

@ -1,37 +0,0 @@
[
{
"Description": "apply search type for pattern ⚬ apply language filter for pattern",
"Input": "go commit yikes derp",
"Query": "type:commit lang:Go yikes derp"
},
{
"Description": "apply search type for pattern",
"Input": "go commit yikes derp",
"Query": "type:commit go yikes derp"
},
{
"Description": "apply language filter for pattern",
"Input": "go commit yikes derp",
"Query": "lang:Go commit yikes derp"
},
{
"Description": "apply search type for pattern ⚬ apply language filter for pattern ⚬ AND patterns together",
"Input": "go commit yikes derp",
"Query": "type:commit lang:Go (yikes AND derp)"
},
{
"Description": "apply search type for pattern ⚬ AND patterns together",
"Input": "go commit yikes derp",
"Query": "type:commit (go AND yikes AND derp)"
},
{
"Description": "apply language filter for pattern ⚬ AND patterns together",
"Input": "go commit yikes derp",
"Query": "lang:Go (commit AND yikes AND derp)"
},
{
"Description": "AND patterns together",
"Input": "go commit yikes derp",
"Query": "(go AND commit AND yikes AND derp)"
}
]

View File

@ -1,4 +0,0 @@
{
"Input": "context:global parse python",
"Query": "context:global lang:Python parse"
}

View File

@ -1,4 +0,0 @@
{
"Input": "context:global python",
"Query": "context:global lang:Python"
}

View File

@ -1,4 +0,0 @@
{
"Input": "https://github.com/sourcegraph",
"Query": "repo:^github\\.com/sourcegraph"
}

View File

@ -1,4 +0,0 @@
{
"Input": "github.com/sourcegraph",
"Query": "repo:^github\\.com/sourcegraph"
}

View File

@ -1,4 +0,0 @@
{
"Input": "https://github.com/sourcegraph/sourcegraph/blob/main/lib/README.md#L50",
"Query": "repo:^github\\.com/sourcegraph/sourcegraph$ rev:main file:^lib/README\\.md$"
}

View File

@ -1,4 +0,0 @@
{
"Input": "https://github.com/sourcegraph/sourcegraph/tree/main/lib",
"Query": "repo:^github\\.com/sourcegraph/sourcegraph$ rev:main file:^lib"
}

View File

@ -1,4 +0,0 @@
{
"Input": "https://github.com/sourcegraph/sourcegraph/tree/2.12",
"Query": "repo:^github\\.com/sourcegraph/sourcegraph$ rev:2.12"
}

View File

@ -1,4 +0,0 @@
{
"Input": "https://github.com/sourcegraph/sourcegraph/commit/abc",
"Query": "repo:^github\\.com/sourcegraph/sourcegraph$ rev:abc"
}

View File

@ -1,4 +0,0 @@
{
"Input": "https://github.com/sourcegraph/sourcegraph",
"Query": "repo:^github\\.com/sourcegraph/sourcegraph$"
}

View File

@ -1,4 +0,0 @@
{
"Input": "(ab)*",
"Query": "/(ab)*/"
}

View File

@ -1,4 +0,0 @@
{
"Input": "c++",
"Query": "DOES NOT APPLY"
}

View File

@ -1,4 +0,0 @@
{
"Input": "my.yaml.conf",
"Query": "DOES NOT APPLY"
}

View File

@ -1,4 +0,0 @@
{
"Input": "(using|struct)",
"Query": "/(using|struct)/"
}

View File

@ -1,4 +0,0 @@
{
"Input": "test.get(id)",
"Query": "DOES NOT APPLY"
}

View File

@ -1,4 +0,0 @@
{
"Input": "[a-z]+",
"Query": "/[a-z]+/"
}

View File

@ -1,4 +0,0 @@
{
"Input": "repo:http://github.com/sourcegraph/sourcegraph",
"Query": "repo:^github.com/sourcegraph/sourcegraph$"
}

View File

@ -1,4 +0,0 @@
{
"Input": "repo:https://github.com/sourcegraph/sourcegraph/blob/main/lib/README.md#L50",
"Query": "repo:^github.com/sourcegraph/sourcegraph$"
}

View File

@ -1,4 +0,0 @@
{
"Input": "repo:https://github.com/sourcegraph/sourcegraph/tree/main/lib",
"Query": "repo:^github.com/sourcegraph/sourcegraph$"
}

View File

@ -1,4 +0,0 @@
{
"Input": "repo:https://github.com/sourcegraph/sourcegraph/tree/2.12",
"Query": "repo:^github.com/sourcegraph/sourcegraph$"
}

View File

@ -1,4 +0,0 @@
{
"Input": "repo:https://github.com/sourcegraph/sourcegraph/commit/abc",
"Query": "repo:^github.com/sourcegraph/sourcegraph$"
}

View File

@ -1,4 +0,0 @@
{
"Input": "repo:https://github.com/sourcegraph/sourcegraph",
"Query": "repo:^github.com/sourcegraph/sourcegraph$"
}

View File

@ -1,4 +0,0 @@
{
"Input": "context:global parse function",
"Query": "context:global select:symbol.function type:symbol parse"
}

View File

@ -1,4 +0,0 @@
{
"Input": "context:global function",
"Query": "context:global select:symbol.function type:symbol"
}

View File

@ -1,4 +0,0 @@
{
"Input": "context:global code monitor commit",
"Query": "context:global type:commit code monitor"
}

View File

@ -1,4 +0,0 @@
{
"Input": "context:global code or monitor commit",
"Query": "context:global type:commit (code OR monitor)"
}

View File

@ -1,4 +0,0 @@
{
"Input": "context:global fix commit",
"Query": "context:global type:commit fix"
}

View File

@ -1,4 +0,0 @@
{
"Input": "context:global parse func",
"Query": "context:global (parse AND func)"
}

View File

@ -1,4 +0,0 @@
{
"Input": "repo:^github\\.com/sourcegraph/sourcegraph$ \"monitor\" \"*Monitor\"",
"Query": "repo:^github\\.com/sourcegraph/sourcegraph$ monitor *Monitor"
}

View File

@ -1,4 +0,0 @@
{
"Input": "content:\"not quoted\"",
"Query": "DOES NOT APPLY"
}

View File

@ -1,4 +0,0 @@
{
"Input": "\"monitor\"",
"Query": "monitor"
}

View File

@ -106,7 +106,7 @@ func TestOwnersOpsgenieTeam(t *testing.T) {
enc.SetIndent(" ", " ")
assert.NoError(t, enc.Encode(observabilityAlertsConfig))
// The below can be copy-pasted into site-config 'observability.alerts':
// https://sourcegraph.sourcegraph.com/search?q=context:global+repo:github.com/sourcegraph/deploy-sourcegraph-cloud+file:overlays/prod/frontend/files/site.json+%22observability.alerts%22:+%5B...%5D&patternType=structural&sm=1&groupBy=repo
// https://sourcegraph.sourcegraph.com/search?q=context:global+repo:github.com/sourcegraph/deploy-sourcegraph-cloud+file:overlays/prod/frontend/files/site.json+%22observability.alerts%22:+%5B...%5D&patternType=structural&sm=0&groupBy=repo
autogold.Expect(`[
{
"level": "critical",