go + golangci: update to go to 1.19.3 and golanci to 1.50.1 (#43747)

* upgrade to go 1.19.3

* upgrade golangci-lint 1.50.1

* disable unused and unparam lints

* Fix all the lint errors

Co-authored-by: Jean-Hadrien Chabran <jh@chabran.fr>
Co-authored-by: Keegan Carruthers-Smith <keegan.csmith@gmail.com>
This commit is contained in:
William Bezuidenhout 2022-11-02 15:08:25 +02:00 committed by GitHub
parent f1fb3aa514
commit 736a5626cd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
45 changed files with 86 additions and 83 deletions

View File

@ -24,7 +24,7 @@ jobs:
- name: Install Go
uses: actions/setup-go@v2
with:
go-version: 1.18.7
go-version: 1.19.3
- name: Install asdf plugins
uses: asdf-vm/actions/install@v1

View File

@ -16,8 +16,11 @@ linters:
- staticcheck
- typecheck
- unconvert
- unused
- unparam
# Both of these are disabled because they produce a lot of errors. The issues below address these errors
# https://github.com/sourcegraph/sourcegraph/issues/43753
# https://github.com/sourcegraph/sourcegraph/issues/43755
#- unused
#- unparam
- exportloopref
linters-settings:
@ -43,9 +46,15 @@ linters-settings:
- exitAfterDefer # Only occurs in auxiliary tools
- ifElseChain # Noisy for not much gain
- singleCaseSwitch # Noisy for not much gain
unparam:
govet:
disable:
- composites
staticcheck:
checks:
- "all"
- "-SA1019" #TODO(burmudar): Mostly because of opentracing deprecatio
forbidigo:
forbid:
# Use errors.Newf instead
@ -58,6 +67,9 @@ issues:
- path: _test\.go
linters:
- bodyclose
- unparam
- gosimple
- unused
run:
timeout: 5m

View File

@ -1,4 +1,4 @@
golang 1.18.7
golang 1.19.3
nodejs 16.7.0
# We're using yarn classic to install yarn 3.x as defined in package.json
yarn 1.22.19

View File

@ -328,7 +328,11 @@ export const GlobalNavbar: React.FunctionComponent<React.PropsWithChildren<Globa
className="mr-1"
to={
'/sign-in?returnTo=' +
encodeURI(history.location.pathname + history.location.search + history.location.hash)
encodeURI(
history.location.pathname +
history.location.search +
history.location.hash
)
}
variant="secondary"
outline={true}

View File

@ -234,10 +234,10 @@ func addSentry(r *mux.Router) {
logger.Warn("failed to forward", sglog.Error(err), sglog.Int("statusCode", resp.StatusCode))
return
}
resp.Body.Close()
}()
w.WriteHeader(http.StatusOK)
return
})
}

View File

@ -163,7 +163,7 @@ func TestLockoutStore(t *testing.T) {
// than the claims ExpiresAt. Additionally CI can be busy, so lets add
// a decent amount of fudge to this (10s).
want := time.Now().Add(5 * time.Minute).Truncate(jwt.TimePrecision)
got := *&claims.ExpiresAt.Time
got := claims.ExpiresAt.Time
if durationAbs(want.Sub(got)) > 10*time.Second {
t.Fatalf("unexpected ExpiresAt time:\ngot: %s\nwant: %s", got, want)
}

View File

@ -87,7 +87,7 @@ func TestDecodeSendEmail(t *testing.T) {
ts := httptest.NewServer(m)
t.Cleanup(ts.Close)
client := &(*internalapi.Client)
client := *internalapi.Client
client.URL = ts.URL
// Do not worry about error here, run assertions in the test handler

View File

@ -226,6 +226,13 @@ func (s *Server) cleanupRepos(ctx context.Context, gitServerAddrs gitserver.GitS
// Record the number and disk usage used of repos that should
// not belong on this instance and remove up to SRC_WRONG_SHARD_DELETE_LIMIT in a single Janitor run.
addr, err := s.addrForRepo(bCtx, name, gitServerAddrs)
if err != nil {
s.Logger.Error("failed to get server address for repo", log.String("repoName", string(name)))
// We bail out here because it would mean that the hostname doesn't match below and
// it would remove repos if the DB is down for example
return
}
if !s.hostnameMatch(addr) {
wrongShardRepoCount++
wrongShardRepoSize += size

View File

@ -76,7 +76,6 @@ func MakeSqliteSearchFunc(operations *sharedobservability.Operations, cachedData
}
err = errors.Newf("Processing symbols using the SQLite backend is taking a while on this %s repository. %s", humanize.Bytes(uint64(size)), help)
return
}()
dbFile, err := cachedDatabaseWriter.GetOrCreateDatabaseFile(ctx, args)

View File

@ -6,7 +6,7 @@ pushd "$(dirname "${BASH_SOURCE[0]}")/.." >/dev/null
mkdir -p .bin
version="1.45.2"
version="1.50.1"
suffix="${version}-$(go env GOOS)-$(go env GOARCH)"
target="$PWD/.bin/golangci-lint-${suffix}"

View File

@ -13,7 +13,7 @@ type GitLabCodeHost struct {
c *gitlab.Client
}
func NewGitLabCodeHost(ctx context.Context, def *CodeHostDefinition) (*GitLabCodeHost, error) {
func NewGitLabCodeHost(_ context.Context, def *CodeHostDefinition) (*GitLabCodeHost, error) {
baseURL, err := url.Parse(def.URL)
if err != nil {
return nil, err

View File

@ -79,7 +79,7 @@ func (r *Runner) Run(ctx context.Context, concurrency int) error {
var done int64
total := len(srcRepos)
g := group.NewWithResults[error]().WithMaxConcurrency(20)
g := group.NewWithResults[error]().WithMaxConcurrency(concurrency)
for _, repo := range srcRepos {
repo := repo
g.Go(func() error {

View File

@ -47,6 +47,7 @@ var (
//
// Must be kept in sync with the generator in Create.
func VisitAll(adrDir string, visit func(adr ArchitectureDecisionRecord) error) error {
// nolint:staticcheck,gosimple
return filepath.WalkDir(adrDir, func(path string, entry fs.DirEntry, err error) error {
if entry.IsDir() {
return nil

View File

@ -34,7 +34,7 @@ var checks = map[string]check.CheckFunc{
"asdf": check.CommandOutputContains("asdf", "version"),
"git": check.Combine(check.InPath("git"), checkGitVersion(">= 2.34.1")),
"yarn": check.Combine(check.InPath("yarn"), checkYarnVersion("~> 1.22.4")),
"go": check.Combine(check.InPath("go"), checkGoVersion("~> 1.18.7")),
"go": check.Combine(check.InPath("go"), checkGoVersion("~> 1.19.3")),
"node": check.Combine(check.InPath("node"), check.CommandOutputContains(`node -e "console.log(\"foobar\")"`, "foobar")),
"rust": check.Combine(check.InPath("cargo"), check.CommandOutputContains(`cargo version`, "1.58.0")),
"docker-installed": check.WrapErrMessage(check.InPath("docker"), "if Docker is installed and the check fails, you might need to start Docker.app and restart terminal and 'sg setup'"),

View File

@ -29,14 +29,14 @@ func UpdateCompose(path string, creds credentials.Credentials, pinTag string) er
std.Out.WriteNoticef("Checking %q", path)
composeFile, err := os.ReadFile(path)
if err != nil {
composeFile, innerErr := os.ReadFile(path)
if innerErr != nil {
return errors.Wrapf(err, "couldn't read %s", path)
}
checked++
newComposeFile, err := updateComposeFile(composeFile, creds, pinTag)
if err != nil {
newComposeFile, innerErr := updateComposeFile(composeFile, creds, pinTag)
if innerErr != nil {
return err
}
if newComposeFile == nil {

View File

@ -119,5 +119,5 @@ func decodeIDIntoUniqueViewID(id string) (string, error) {
if !strings.Contains(sDecoded, "insight_view") {
return "", errors.Newf("decoded id is not an insight_view id: %s", sDecoded)
}
return strings.Trim(strings.TrimLeft(sDecoded, "insight_view:"), "\""), nil
return strings.Trim(strings.TrimPrefix(sDecoded, "insight_view:"), "\""), nil
}

View File

@ -136,7 +136,7 @@ func constructStartCmdLongHelp() string {
case "batches":
names = append(names, fmt.Sprintf("%s 🦡", name))
default:
names = append(names, fmt.Sprintf("%s", name))
names = append(names, name)
}
}
sort.Strings(names)

View File

@ -126,7 +126,6 @@ func newOAuthFlowHandler(db database.DB, serviceType string) http.Handler {
return
}
http.Redirect(w, req, "/install-github-app-success", http.StatusFound)
return
}))
mux.Handle("/get-github-app-installation", http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
logger := log.Scoped("get-github-app-installation", "handler for getting github app installations")

View File

@ -43,7 +43,6 @@ func gitserverProxy(gitserverClient GitserverClient, gitPath string) http.Handle
Transport: httpcli.InternalClient.Transport,
}
p.ServeHTTP(w, r)
return
})
}

View File

@ -161,7 +161,7 @@ func ParseDiff(files []string) (diff Diff) {
if err == nil {
b := make([]byte, 19) // "#!/usr/bin/env bash" = 19 chars
_, _ = f.Read(b)
if bytes.Compare(b[0:2], []byte("#!")) == 0 && bytes.Contains(b, []byte("bash")) {
if bytes.Equal(b[0:2], []byte("#!")) && bytes.Contains(b, []byte("bash")) {
// If the file starts with a shebang and has "bash" somewhere after, it's most probably
// some shell script.
diff |= Shell

View File

@ -67,7 +67,7 @@ func (p Provider) FetchAccount(ctx context.Context, user *types.User, current []
}
func (p Provider) checkAccountsAgainstVerifiedEmails(accts gerrit.ListAccountsResponse, user *types.User, verifiedEmails []string) (*extsvc.Account, bool, error) {
if accts == nil || len(accts) == 0 {
if len(accts) == 0 {
return nil, false, nil
}
for _, email := range verifiedEmails {

View File

@ -248,7 +248,7 @@ func (r *searchAggregationResults) Send(event streaming.SearchEvent) {
r.tabulator(nil, err)
continue
}
current, _ := combined[groupKey]
current := combined[groupKey]
combined[groupKey] = current + count
}
}

View File

@ -437,8 +437,6 @@ func (h *historicalEnqueuer) convertJustInTimeInsights(ctx context.Context) {
h.logger.Warn("unable to purge jobs for old seriesID", sglog.String("seriesId", oldSeriesId), sglog.Error(err))
}
}
return
}
func markInsightsComplete(ctx context.Context, completed []itypes.InsightSeries, dataSeriesStore store.DataSeriesStore) {

View File

@ -268,7 +268,7 @@ func filterRecordingsBySeriesRepos(ctx context.Context, repoStore discovery.Repo
if record.RepoID == nil {
continue
}
if included := repos[*record.RepoID]; included == true {
if included := repos[*record.RepoID]; included {
filteredRecords = append(filteredRecords, record)
}
}

View File

@ -78,11 +78,7 @@ func (c *ComputeExecutor) Execute(ctx context.Context, query, groupBy string, re
})
for _, group := range grouped {
if _, ok := groupedValues[group.Value]; ok {
groupedValues[group.Value] += group.Count
} else {
groupedValues[group.Value] = group.Count
}
groupedValues[group.Value] += group.Count
}
}

View File

@ -36,7 +36,9 @@ func Search(ctx context.Context, query string, patternType *string, decoder stre
return err
}
if patternType != nil {
req.URL.Query().Add("t", *patternType)
query := req.URL.Query()
query.Add("t", *patternType)
req.URL.RawQuery = query.Encode()
}
req = req.WithContext(ctx)

View File

@ -511,15 +511,11 @@ func (r *Resolver) CreateLineChartSearchInsight(ctx context.Context, args *graph
seriesFillStrategy := makeFillSeriesStrategy(ctx, insightTx, backfiller, r.scheduler, r.insightEnqueuer)
var scoped []types.InsightSeries
for _, series := range args.Input.DataSeries {
c, err := createAndAttachSeries(ctx, insightTx, seriesFillStrategy, view, series)
_, err := createAndAttachSeries(ctx, insightTx, seriesFillStrategy, view, series)
if err != nil {
return nil, errors.Wrap(err, "createAndAttachSeries")
}
if len(c.Repositories) > 0 {
scoped = append(scoped, *c)
}
}
if len(dashboardIds) > 0 {
@ -1300,7 +1296,7 @@ func sortSeriesResolvers(ctx context.Context, seriesOptions types.SeriesDisplayO
// First sort lexicographically (ascending) to make sure the ordering is consistent even if some result counts are equal.
sort.SliceStable(resolvers, func(i, j int) bool {
hasSemVar, result := ascLexSort(resolvers[i].Label(), resolvers[j].Label())
if hasSemVar == true {
if hasSemVar {
return result
}
return strings.Compare(resolvers[i].Label(), resolvers[j].Label()) < 0
@ -1324,7 +1320,7 @@ func sortSeriesResolvers(ctx context.Context, seriesOptions types.SeriesDisplayO
} else {
sort.SliceStable(resolvers, func(i, j int) bool {
hasSemVar, result := ascLexSort(resolvers[i].Label(), resolvers[j].Label())
if hasSemVar == true {
if hasSemVar {
return !result
}
return strings.Compare(resolvers[i].Label(), resolvers[j].Label()) > 0

View File

@ -101,7 +101,7 @@ func (h *inProgressHandler) Handle(ctx context.Context, logger log.Logger, recor
Value: series.SampleIntervalValue,
}, series.CreatedAt.Truncate(time.Hour*24))
for true {
for {
repoId, more, finish := itr.NextWithFinish()
if !more {
break

View File

@ -184,7 +184,7 @@ func (s *Store) SeriesPoints(ctx context.Context, opts SeriesPointsOpts) ([]Seri
func (s *Store) LoadSeriesInMem(ctx context.Context, opts SeriesPointsOpts) (points []SeriesPoint, err error) {
denylist, err := s.permStore.GetUnauthorizedRepoIDs(ctx)
if err != nil {
return []SeriesPoint{}, err
return nil, err
}
denyBitmap := roaring.New()
for _, id := range denylist {
@ -255,6 +255,10 @@ func (s *Store) LoadSeriesInMem(ctx context.Context, opts SeriesPointsOpts) (poi
return nil
})
if err != nil {
return nil, err
}
pointsMap := make(map[string]*SeriesPoint)
captureValues := make(map[string]struct{})

View File

@ -47,7 +47,6 @@ func (s *Service) Search(ctx context.Context, args search.SymbolsParameters) (_
err = errors.Newf("Still processing symbols ([more details](https://docs.sourcegraph.com/code_navigation/explanations/rockskip)). Estimated completion: %s.", status.Remaining())
}
}
return
}()
}

View File

@ -21,7 +21,7 @@ func (s *CodeIntelByLanguage) Count() float64 { return s.Count_ }
func GetCodeIntelByLanguage(ctx context.Context, db database.DB, cache bool, dateRange string) ([]*CodeIntelByLanguage, error) {
cacheKey := fmt.Sprintf(`CodeIntelByLanguage:%s`, dateRange)
if cache == true {
if cache {
if nodes, err := getArrayFromCache[CodeIntelByLanguage](cacheKey); err == nil {
return nodes, nil
}

View File

@ -28,7 +28,7 @@ func (s *CodeIntelTopRepositories) HasPrecise() bool { return s.HasPrecise_ }
func GetCodeIntelTopRepositories(ctx context.Context, db database.DB, cache bool, dateRange string) ([]*CodeIntelTopRepositories, error) {
cacheKey := fmt.Sprintf(`CodeIntelTopRepositories:%s`, dateRange)
if cache == true {
if cache {
if nodes, err := getArrayFromCache[CodeIntelTopRepositories](cacheKey); err == nil {
return nodes, nil
}

View File

@ -42,7 +42,7 @@ func (n *AnalyticsNode) RegisteredUsers() float64 { return n.Data.RegisteredUser
func (f *AnalyticsFetcher) Nodes(ctx context.Context) ([]*AnalyticsNode, error) {
cacheKey := fmt.Sprintf(`%s:%s:%s:%s`, f.group, f.dateRange, f.grouping, "nodes")
if f.cache == true {
if f.cache {
if nodes, err := getArrayFromCache[AnalyticsNode](cacheKey); err == nil {
return nodes, nil
}
@ -137,7 +137,7 @@ func (s *AnalyticsSummary) TotalRegisteredUsers() float64 { return s.Data.TotalR
func (f *AnalyticsFetcher) Summary(ctx context.Context) (*AnalyticsSummary, error) {
cacheKey := fmt.Sprintf(`%s:%s:%s:%s`, f.group, f.dateRange, f.grouping, "summary")
if f.cache == true {
if f.cache {
if summary, err := getItemFromCache[AnalyticsSummary](cacheKey); err == nil {
return summary, nil
}

View File

@ -15,7 +15,7 @@ type Repos struct {
func (r *Repos) Summary(ctx context.Context) (*ReposSummary, error) {
cacheKey := "Repos:Summary"
if r.Cache == true {
if r.Cache {
if summary, err := getItemFromCache[ReposSummary](cacheKey); err == nil {
return summary, nil
}

View File

@ -38,11 +38,11 @@ func (s *Users) Activity() (*AnalyticsFetcher, error) {
var (
frequencyQuery = `
WITH user_days_used AS (
SELECT
SELECT
CASE WHEN user_id = 0 THEN anonymous_user_id ELSE CAST(user_id AS TEXT) END AS user_id,
COUNT(DISTINCT DATE(timestamp)) AS days_used
COUNT(DISTINCT DATE(timestamp)) AS days_used
FROM event_logs
WHERE
WHERE
DATE(timestamp) %s
AND %s
GROUP BY 1
@ -53,8 +53,8 @@ var (
GROUP BY 1
),
days_used_total_frequency AS (
SELECT
days_used_frequency.days_used,
SELECT
days_used_frequency.days_used,
SUM(more_days_used_frequency.frequency) AS frequency
FROM days_used_frequency
LEFT JOIN days_used_frequency AS more_days_used_frequency
@ -65,9 +65,9 @@ var (
SELECT MAX(frequency) AS max_frequency
FROM days_used_total_frequency
)
SELECT
days_used,
frequency,
SELECT
days_used,
frequency,
frequency * 100.00 / COALESCE(max_frequency, 1) AS percentage
FROM days_used_total_frequency, max_days_used_total_frequency
ORDER BY 1 ASC;
@ -76,7 +76,7 @@ var (
func (f *Users) Frequencies(ctx context.Context) ([]*UsersFrequencyNode, error) {
cacheKey := fmt.Sprintf("Users:%s:%s", "Frequencies", f.DateRange)
if f.Cache == true {
if f.Cache {
if nodes, err := getArrayFromCache[UsersFrequencyNode](cacheKey); err == nil {
return nodes, nil
}
@ -138,11 +138,11 @@ func (n *UsersFrequencyNode) Percentage() float64 { return n.Data.Percentage }
var (
mauQuery = `
SELECT
SELECT
TO_CHAR(timestamp, 'YYYY-MM') AS date,
COUNT(DISTINCT CASE WHEN user_id = 0 THEN anonymous_user_id ELSE CAST(user_id AS TEXT) END) AS count
FROM event_logs
WHERE
WHERE
timestamp BETWEEN %s AND %s
AND %s
GROUP BY 1

View File

@ -110,9 +110,7 @@ func scanIndexWithCount(s dbutil.Scanner) (index types.Index, count int, err err
return index, 0, err
}
for _, entry := range executionLogs {
index.ExecutionLogs = append(index.ExecutionLogs, entry)
}
index.ExecutionLogs = append(index.ExecutionLogs, executionLogs...)
return index, count, nil
}

View File

@ -204,8 +204,8 @@ func StartAndWaitForCompletion(cmd *exec.Cmd) error {
if err := cmd.Wait(); err != nil {
if len(b.Bytes()) > 0 {
log15.Error("failed to execute comby command", "error", string(b.Bytes()))
msg := fmt.Sprintf("failed to wait for executing comby command: comby error: %s", b.Bytes())
log15.Error("failed to execute comby command", "error", b.String())
msg := fmt.Sprintf("failed to wait for executing comby command: comby error: %s", b.String())
return errors.Wrap(err, msg)
}
var stderr string

View File

@ -659,11 +659,6 @@ func TestExternalAccounts_UpdateGitHubAppInstallations(t *testing.T) {
require.NoError(t, err)
require.Equal(t, 3, len(accts))
acctIds := []int32{}
for _, acct := range accts {
acctIds = append(acctIds, acct.ID)
}
err = db.UserExternalAccounts().UpdateGitHubAppInstallations(ctx, acct, installations)
require.NoError(t, err)

View File

@ -3,7 +3,6 @@ package database
import (
"context"
"encoding/json"
"fmt"
"strings"
"time"
@ -61,7 +60,7 @@ func (e *SecurityEvent) marshalArgumentAsJSON() string {
if e.Argument == nil {
return "{}"
}
return fmt.Sprintf("%s", e.Argument)
return string(e.Argument)
}
// SecurityEventLogsStore provides persistence for security events.

View File

@ -98,11 +98,7 @@ func isAllowedDiffArg(arg string) bool {
// make sure that arg is not a local file
_, err := os.Stat(arg)
if os.IsNotExist(err) {
return true
}
return false
return os.IsNotExist(err)
}
// isAllowedGitArg checks if the arg is allowed.

View File

@ -11,7 +11,7 @@ import (
type sliceWrapper []any
func (s sliceWrapper) MarshalJSON() ([]byte, error) {
if s == nil || len(s) == 0 {
if len(s) == 0 {
return nil, nil
}

View File

@ -47,7 +47,7 @@ func TestGitHubWebhookHandle(t *testing.T) {
t.Fatal(err)
}
conn := *&schema.GitHubConnection{
conn := schema.GitHubConnection{
Url: "https://github.com",
Token: "token",
Repos: []string{"owner/name"},

View File

@ -212,7 +212,7 @@ type alertKind string
const (
smartSearchAdditionalResults alertKind = "smart-search-additional-results"
smartSearchPureResults = "smart-search-pure-results"
smartSearchPureResults alertKind = "smart-search-pure-results"
)
func (o *Observer) errorToAlert(ctx context.Context, err error) (*search.Alert, error) {
@ -256,8 +256,7 @@ func (o *Observer) errorToAlert(ctx context.Context, err error) (*search.Alert,
}
if errors.As(err, &mErr) {
var a *search.Alert
a = AlertForMissingRepoRevs(mErr.Missing)
a := AlertForMissingRepoRevs(mErr.Missing)
a.Priority = 6
return a, nil
}

View File

@ -205,7 +205,7 @@ func (s *HorizontalSearcher) streamSearchExperimentalRanking(ctx context.Context
endpoints := make([]string, 0, len(clients))
for endpoint := range clients {
endpoints = append(endpoints, endpoint)
endpoints = append(endpoints, endpoint) //nolint:staticcheck
}
siteConfig := newRankingSiteConfig(conf.Get().SiteConfiguration)

View File

@ -220,7 +220,7 @@ func (n *notifier) New(count int) error {
if count == limits.DefaultMaxSearchResultsStreaming {
resultCountString = fmt.Sprintf("%d+ results", count)
} else if count == 1 {
resultCountString = fmt.Sprintf("1 result")
resultCountString = "1 result"
} else {
resultCountString = fmt.Sprintf("%d additional results", count)
}