Add cursor based pagination to repositories page (#46624)

* [Repositories] add cursor-based pagination

Co-authored-by: Indradhanush Gupta <indradhanush.gupta@gmail.com>

Co-authored-by: Indradhanush Gupta <indradhanush.gupta@gmail.com>
This commit is contained in:
Naman Kumar 2023-01-20 15:05:32 +05:30 committed by GitHub
parent 45afb64ae1
commit 4f2f4b2073
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
22 changed files with 956 additions and 381 deletions

View File

@ -628,17 +628,19 @@ export class FilteredConnection<
this.showMoreClicks.next()
}
private buildArgs = (filterValues: Map<string, FilteredConnectionFilterValue>): FilteredConnectionArgs => {
let args: FilteredConnectionArgs = {}
for (const key of filterValues.keys()) {
const value = filterValues.get(key)
if (value === undefined) {
continue
}
args = { ...args, ...value.args }
private buildArgs = buildFilterArgs
}
export const buildFilterArgs = (filterValues: Map<string, FilteredConnectionFilterValue>): FilteredConnectionArgs => {
let args: FilteredConnectionArgs = {}
for (const key of filterValues.keys()) {
const value = filterValues.get(key)
if (value === undefined) {
continue
}
return args
args = { ...args, ...value.args }
}
return args
}
/**

View File

@ -41,7 +41,7 @@ export interface UsePaginatedConnectionResult<TResult, TVariables, TNode> extend
connection?: PaginatedConnection<TNode>
loading: boolean
error?: ApolloError
refetch: () => any
refetch: (variables?: TVariables) => any
}
interface UsePaginatedConnectionConfig<TResult> {
@ -53,6 +53,8 @@ interface UsePaginatedConnectionConfig<TResult> {
fetchPolicy?: WatchQueryFetchPolicy
// Allows running an optional callback on any successful request
onCompleted?: (data: TResult) => void
// Allows to provide polling interval to useQuery
pollInterval?: number
}
interface UsePaginatedConnectionParameters<TResult, TVariables extends PaginatedConnectionQueryArguments, TNode> {
@ -98,6 +100,7 @@ export const usePageSwitcherPagination = <TResult, TVariables extends PaginatedC
variables: queryVariables,
fetchPolicy: options?.fetchPolicy,
onCompleted: options?.onCompleted,
pollInterval: options?.pollInterval,
})
const connection = useMemo(() => {

View File

@ -65,7 +65,7 @@ export const hasNextPage = (connection: Connection<unknown>): boolean =>
: typeof connection.totalCount === 'number' && connection.nodes.length < connection.totalCount
export interface GetUrlQueryParameters {
first: {
first?: {
actual: number
default: number
}
@ -93,7 +93,7 @@ export const getUrlQuery = ({
searchParameters.set(QUERY_KEY, query)
}
if (first.actual !== first.default) {
if (!!first && first.actual !== first.default) {
searchParameters.set('first', String(first.actual))
}
@ -111,7 +111,7 @@ export const getUrlQuery = ({
}
}
if (visibleResultCount && visibleResultCount !== 0 && visibleResultCount !== first.actual) {
if (visibleResultCount && visibleResultCount !== 0 && visibleResultCount !== first?.actual) {
searchParameters.set('visible', String(visibleResultCount))
}

View File

@ -1,8 +1,8 @@
import React, { useCallback, useEffect, useMemo } from 'react'
import React, { useState, useEffect, useMemo } from 'react'
import { mdiCloudDownload, mdiCog, mdiBrain } from '@mdi/js'
import { isEqual } from 'lodash'
import { RouteComponentProps } from 'react-router'
import { Observable } from 'rxjs'
import { logger } from '@sourcegraph/common'
import { useQuery } from '@sourcegraph/http-client'
@ -15,6 +15,7 @@ import {
Container,
H4,
Icon,
Input,
Link,
LoadingSpinner,
PageHeader,
@ -22,17 +23,22 @@ import {
Tooltip,
ErrorAlert,
LinkOrSpan,
PageSwitcher,
} from '@sourcegraph/wildcard'
import { EXTERNAL_SERVICE_IDS_AND_NAMES } from '../components/externalServices/backend'
import {
FilteredConnection,
buildFilterArgs,
FilterControl,
FilteredConnectionFilterValue,
FilteredConnectionFilter,
FilteredConnectionQueryArguments,
} from '../components/FilteredConnection'
import { usePageSwitcherPagination } from '../components/FilteredConnection/hooks/usePageSwitcherPagination'
import { getFilterFromURL, getUrlQuery } from '../components/FilteredConnection/utils'
import { PageTitle } from '../components/PageTitle'
import {
RepositoriesResult,
RepositoriesVariables,
RepositoryOrderBy,
RepositoryStatsResult,
ExternalServiceIDsAndNamesVariables,
@ -43,7 +49,7 @@ import {
import { refreshSiteFlags } from '../site/backend'
import { ValueLegendList, ValueLegendListProps } from './analytics/components/ValueLegendList'
import { fetchAllRepositoriesAndPollIfEmptyOrAnyCloning, REPOSITORY_STATS, REPO_PAGE_POLL_INTERVAL } from './backend'
import { REPOSITORY_STATS, REPO_PAGE_POLL_INTERVAL, REPOSITORIES_QUERY } from './backend'
import { ExternalRepositoryIcon } from './components/ExternalRepositoryIcon'
import { RepoMirrorInfo } from './components/RepoMirrorInfo'
@ -355,17 +361,79 @@ export const SiteAdminRepositoriesPage: React.FunctionComponent<React.PropsWithC
return filtersWithExternalServices
}, [extSvcs])
const queryRepositories = useCallback(
(args: FilteredConnectionQueryArguments): Observable<RepositoriesResult['repositories']> =>
fetchAllRepositoriesAndPollIfEmptyOrAnyCloning(args),
[]
const [filterValues, setFilterValues] = useState<Map<string, FilteredConnectionFilterValue>>(() =>
getFilterFromURL(new URLSearchParams(location.search), filters)
)
useEffect(() => {
setFilterValues(getFilterFromURL(new URLSearchParams(location.search), filters))
}, [filters, location.search])
const [searchQuery, setSearchQuery] = useState<string>(
() => new URLSearchParams(location.search).get('query') || ''
)
useEffect(() => {
const searchFragment = getUrlQuery({
query: searchQuery,
filters,
filterValues,
search: location.search,
})
const searchFragmentParams = new URLSearchParams(searchFragment)
searchFragmentParams.sort()
const oldParams = new URLSearchParams(location.search)
oldParams.sort()
if (!isEqual(Array.from(searchFragmentParams), Array.from(oldParams))) {
history.replace({
search: searchFragment,
hash: location.hash,
// Do not throw away flash messages
state: location.state,
})
}
}, [filters, filterValues, searchQuery, location, history])
const variables = useMemo<RepositoriesVariables>(() => {
const args = buildFilterArgs(filterValues)
return {
...args,
query: searchQuery,
indexed: args.indexed ?? true,
notIndexed: args.notIndexed ?? true,
failedFetch: args.failedFetch ?? false,
corrupted: args.corrupted ?? false,
cloneStatus: args.cloneStatus ?? null,
externalService: args.externalService ?? null,
} as RepositoriesVariables
}, [searchQuery, filterValues])
const {
connection,
loading: reposLoading,
error: reposError,
refetch,
...paginationProps
} = usePageSwitcherPagination<RepositoriesResult, RepositoriesVariables, SiteAdminRepositoryFields>({
query: REPOSITORIES_QUERY,
variables,
getConnection: ({ data }) => data?.repositories || undefined,
options: { pollInterval: 5000 },
})
useEffect(() => {
refetch(variables)
}, [refetch, variables])
const showRepositoriesAddedBanner = new URLSearchParams(location.search).has('repositoriesUpdated')
const licenseInfo = window.context.licenseInfo
const error = repoStatsError || extSvcError
const loading = repoStatsLoading || extSvcLoading
const error = repoStatsError || extSvcError || reposError
const loading = repoStatsLoading || extSvcLoading || reposLoading
return (
<div className="site-admin-repositories-page">
@ -413,20 +481,49 @@ export const SiteAdminRepositoriesPage: React.FunctionComponent<React.PropsWithC
{loading && !error && <LoadingSpinner />}
{legends && <ValueLegendList className="mb-3" items={legends} />}
{extSvcs && (
<FilteredConnection<SiteAdminRepositoryFields, Omit<RepositoryNodeProps, 'node'>>
className="mb-0"
listClassName="list-group list-group-flush mb-0"
summaryClassName="mt-2"
withCenteredSummary={true}
noun="repository"
pluralNoun="repositories"
queryConnection={queryRepositories}
nodeComponent={RepositoryNode}
inputClassName="ml-2 flex-1"
filters={filters}
history={history}
location={location}
/>
<>
<div className="d-flex justify-content-center">
<FilterControl
filters={filters}
values={filterValues}
onValueSelect={(
filter: FilteredConnectionFilter,
value: FilteredConnectionFilterValue
) =>
setFilterValues(values => {
const newValues = new Map(values)
newValues.set(filter.id, value)
return newValues
})
}
/>
<Input
type="search"
className="flex-1"
placeholder="Search repositories..."
name="query"
value={searchQuery}
onChange={event => setSearchQuery(event.currentTarget.value)}
autoComplete="off"
autoCorrect="off"
autoCapitalize="off"
spellCheck={false}
aria-label="Search repositories..."
variant="regular"
/>
</div>
<ul className="list-group list-group-flush mt-4">
{(connection?.nodes || []).map(node => (
<RepositoryNode key={node.id} node={node} />
))}
</ul>
<PageSwitcher
{...paginationProps}
className="mt-4"
totalCount={connection?.totalCount ?? null}
totalLabel="repositories"
/>
</>
)}
</Container>
</div>

View File

@ -20,7 +20,7 @@ export const OVERVIEW_STATISTICS = gql`
totalCount
}
repositories {
totalCount(precise: true)
totalCount
}
repositoryStats {
gitDirBytes

View File

@ -3,7 +3,7 @@ import { parse as parseJSONC } from 'jsonc-parser'
import { Observable } from 'rxjs'
import { map, mapTo, tap } from 'rxjs/operators'
import { repeatUntil, resetAllMemoizationCaches } from '@sourcegraph/common'
import { resetAllMemoizationCaches } from '@sourcegraph/common'
import { createInvalidGraphQLMutationResponseError, dataOrThrowErrors, gql, useQuery } from '@sourcegraph/http-client'
import { Settings } from '@sourcegraph/shared/src/settings/settings'
@ -31,9 +31,6 @@ import {
RandomizeUserPasswordResult,
ReloadSiteResult,
ReloadSiteVariables,
RepositoriesResult,
RepositoriesVariables,
RepositoryOrderBy,
Scalars,
ScheduleRepositoryPermissionsSyncResult,
ScheduleRepositoryPermissionsSyncVariables,
@ -136,6 +133,7 @@ const siteAdminRepositoryFieldsFragment = gql`
${externalRepositoryFieldsFragment}
fragment SiteAdminRepositoryFields on Repository {
__typename
id
name
createdAt
@ -150,86 +148,55 @@ const siteAdminRepositoryFieldsFragment = gql`
}
}
`
/**
* Fetches all repositories.
*
* @returns Observable that emits the list of repositories
*/
function fetchAllRepositories(args: Partial<RepositoriesVariables>): Observable<RepositoriesResult['repositories']> {
return requestGraphQL<RepositoriesResult, RepositoriesVariables>(
gql`
query Repositories(
$first: Int
$query: String
$indexed: Boolean
$notIndexed: Boolean
$failedFetch: Boolean
$corrupted: Boolean
$cloneStatus: CloneStatus
$orderBy: RepositoryOrderBy
$descending: Boolean
$externalService: ID
) {
repositories(
first: $first
query: $query
indexed: $indexed
notIndexed: $notIndexed
failedFetch: $failedFetch
corrupted: $corrupted
cloneStatus: $cloneStatus
orderBy: $orderBy
descending: $descending
externalService: $externalService
) {
nodes {
...SiteAdminRepositoryFields
}
totalCount(precise: true)
pageInfo {
hasNextPage
}
}
export const REPOSITORIES_QUERY = gql`
query Repositories(
$first: Int
$last: Int
$after: String
$before: String
$query: String
$indexed: Boolean
$notIndexed: Boolean
$failedFetch: Boolean
$corrupted: Boolean
$cloneStatus: CloneStatus
$orderBy: RepositoryOrderBy
$descending: Boolean
$externalService: ID
) {
repositories(
first: $first
last: $last
after: $after
before: $before
query: $query
indexed: $indexed
notIndexed: $notIndexed
failedFetch: $failedFetch
corrupted: $corrupted
cloneStatus: $cloneStatus
orderBy: $orderBy
descending: $descending
externalService: $externalService
) {
nodes {
...SiteAdminRepositoryFields
}
totalCount
pageInfo {
hasNextPage
hasPreviousPage
startCursor
endCursor
}
${siteAdminRepositoryFieldsFragment}
`,
{
indexed: args.indexed ?? true,
notIndexed: args.notIndexed ?? true,
failedFetch: args.failedFetch ?? false,
corrupted: args.corrupted ?? false,
first: args.first ?? null,
query: args.query ?? null,
cloneStatus: args.cloneStatus ?? null,
orderBy: args.orderBy ?? RepositoryOrderBy.REPOSITORY_NAME,
descending: args.descending ?? false,
externalService: args.externalService ?? null,
}
).pipe(
map(dataOrThrowErrors),
map(data => data.repositories)
)
}
}
${siteAdminRepositoryFieldsFragment}
`
export const REPO_PAGE_POLL_INTERVAL = 5000
export function fetchAllRepositoriesAndPollIfEmptyOrAnyCloning(
args: Partial<RepositoriesVariables>
): Observable<RepositoriesResult['repositories']> {
return fetchAllRepositories(args).pipe(
// Poll every 5000ms if repositories are being cloned or the list is empty.
repeatUntil(
result =>
result.nodes &&
result.nodes.length > 0 &&
result.nodes.every(nodes => !nodes.mirrorInfo.cloneInProgress && nodes.mirrorInfo.cloned),
{ delay: REPO_PAGE_POLL_INTERVAL }
)
)
}
export const SLOW_REQUESTS = gql`
query SlowRequests($after: String) {
slowRequests(after: $after) {

View File

@ -24,9 +24,9 @@ type ConnectionResolverStore[N any] interface {
// ComputeNodes returns the list of nodes based on the pagination args.
ComputeNodes(context.Context, *database.PaginationArgs) ([]*N, error)
// MarshalCursor returns cursor for a node and is called for generating start and end cursors.
MarshalCursor(*N) (*string, error)
MarshalCursor(*N, database.OrderBy) (*string, error)
// UnmarshalCursor returns node id from after/before cursor string.
UnmarshalCursor(string) (*int, error)
UnmarshalCursor(string, database.OrderBy) (*string, error)
}
type ConnectionResolverArgs struct {
@ -60,6 +60,10 @@ type ConnectionResolverOptions struct {
//
// Defaults to `true` when not set.
Reverse *bool
// Columns to order by
OrderBy database.OrderBy
// Order direction
Ascending bool
}
// MaxPageSize returns the configured max page limit for the connection
@ -104,7 +108,10 @@ func (r *ConnectionResolver[N]) paginationArgs() (*database.PaginationArgs, erro
return nil, nil
}
paginationArgs := database.PaginationArgs{}
paginationArgs := database.PaginationArgs{
OrderBy: r.options.OrderBy,
Ascending: r.options.Ascending,
}
limit := r.pageSize() + 1
if r.args.First != nil {
@ -116,7 +123,7 @@ func (r *ConnectionResolver[N]) paginationArgs() (*database.PaginationArgs, erro
}
if r.args.After != nil {
after, err := r.store.UnmarshalCursor(*r.args.After)
after, err := r.store.UnmarshalCursor(*r.args.After, r.options.OrderBy)
if err != nil {
return nil, err
}
@ -125,7 +132,7 @@ func (r *ConnectionResolver[N]) paginationArgs() (*database.PaginationArgs, erro
}
if r.args.Before != nil {
before, err := r.store.UnmarshalCursor(*r.args.Before)
before, err := r.store.UnmarshalCursor(*r.args.Before, r.options.OrderBy)
if err != nil {
return nil, err
}
@ -146,11 +153,11 @@ func (r *ConnectionResolver[N]) TotalCount(ctx context.Context) (int32, error) {
r.data.total, r.data.totalError = r.store.ComputeTotal(ctx)
})
if r.data.totalError != nil || r.data.total == nil {
return 0, r.data.totalError
if r.data.total != nil {
return *r.data.total, r.data.totalError
}
return *r.data.total, r.data.totalError
return 0, r.data.totalError
}
// Nodes returns value for connection.Nodes and is called by the graphql api.
@ -208,6 +215,7 @@ func (r *ConnectionResolver[N]) PageInfo(ctx context.Context) (*ConnectionPageIn
nodes: nodes,
store: r.store,
args: r.args,
orderBy: r.options.OrderBy,
}, nil
}
@ -217,6 +225,7 @@ type ConnectionPageInfo[N any] struct {
nodes []*N
store ConnectionResolverStore[N]
args *ConnectionResolverArgs
orderBy database.OrderBy
}
// HasNextPage returns value for connection.pageInfo.hasNextPage and is called by the graphql api.
@ -251,7 +260,7 @@ func (p *ConnectionPageInfo[N]) EndCursor() (cursor *string, err error) {
return nil, nil
}
cursor, err = p.store.MarshalCursor(p.nodes[len(p.nodes)-1])
cursor, err = p.store.MarshalCursor(p.nodes[len(p.nodes)-1], p.orderBy)
return
}
@ -262,7 +271,7 @@ func (p *ConnectionPageInfo[N]) StartCursor() (cursor *string, err error) {
return nil, nil
}
cursor, err = p.store.MarshalCursor(p.nodes[0])
cursor, err = p.store.MarshalCursor(p.nodes[0], p.orderBy)
return
}
@ -270,7 +279,11 @@ func (p *ConnectionPageInfo[N]) StartCursor() (cursor *string, err error) {
// NewConnectionResolver returns a new connection resolver built using the store and connection args.
func NewConnectionResolver[N any](store ConnectionResolverStore[N], args *ConnectionResolverArgs, options *ConnectionResolverOptions) (*ConnectionResolver[N], error) {
if options == nil {
options = &ConnectionResolverOptions{}
options = &ConnectionResolverOptions{OrderBy: database.OrderBy{{Field: "id"}}}
}
if len(options.OrderBy) == 0 {
options.OrderBy = database.OrderBy{{Field: "id"}}
}
return &ConnectionResolver[N]{

View File

@ -3,7 +3,6 @@ package graphqlutil
import (
"context"
"fmt"
"strconv"
"testing"
"github.com/google/go-cmp/cmp"
@ -55,19 +54,14 @@ func (s *testConnectionStore) ComputeNodes(ctx context.Context, args *database.P
return nodes, nil
}
func (*testConnectionStore) MarshalCursor(n *testConnectionNode) (*string, error) {
func (*testConnectionStore) MarshalCursor(n *testConnectionNode, _ database.OrderBy) (*string, error) {
cursor := string(n.ID())
return &cursor, nil
}
func (*testConnectionStore) UnmarshalCursor(cursor string) (*int, error) {
id, err := strconv.Atoi(cursor)
if err != nil {
return nil, err
}
return &id, nil
func (*testConnectionStore) UnmarshalCursor(cursor string, _ database.OrderBy) (*string, error) {
return &cursor, nil
}
func newInt(n int) *int {
@ -116,13 +110,13 @@ func withLastPA(last int, a *database.PaginationArgs) *database.PaginationArgs {
return a
}
func withAfterPA(after int, a *database.PaginationArgs) *database.PaginationArgs {
func withAfterPA(after string, a *database.PaginationArgs) *database.PaginationArgs {
a.After = &after
return a
}
func withBeforePA(before int, a *database.PaginationArgs) *database.PaginationArgs {
func withBeforePA(before string, a *database.PaginationArgs) *database.PaginationArgs {
a.Before = &before
return a
@ -166,6 +160,14 @@ func testResolverNodesResponse(t *testing.T, resolver *ConnectionResolver[testCo
}
}
func buildPaginationArgs() *database.PaginationArgs {
args := database.PaginationArgs{
OrderBy: database.OrderBy{{Field: "id"}},
}
return &args
}
func TestConnectionNodes(t *testing.T) {
for _, test := range []struct {
name string
@ -177,30 +179,30 @@ func TestConnectionNodes(t *testing.T) {
{
name: "default",
connectionArgs: withFirstCA(5, &ConnectionResolverArgs{}),
wantPaginationArgs: withFirstPA(6, &database.PaginationArgs{}),
wantPaginationArgs: withFirstPA(6, buildPaginationArgs()),
wantNodes: 2,
},
{
name: "last arg",
wantPaginationArgs: withLastPA(6, &database.PaginationArgs{}),
wantPaginationArgs: withLastPA(6, buildPaginationArgs()),
connectionArgs: withLastCA(5, &ConnectionResolverArgs{}),
wantNodes: 2,
},
{
name: "after arg",
wantPaginationArgs: withAfterPA(0, withFirstPA(6, &database.PaginationArgs{})),
wantPaginationArgs: withAfterPA("0", withFirstPA(6, buildPaginationArgs())),
connectionArgs: withAfterCA("0", withFirstCA(5, &ConnectionResolverArgs{})),
wantNodes: 2,
},
{
name: "before arg",
wantPaginationArgs: withBeforePA(0, withLastPA(6, &database.PaginationArgs{})),
wantPaginationArgs: withBeforePA("0", withLastPA(6, buildPaginationArgs())),
connectionArgs: withBeforeCA("0", withLastCA(5, &ConnectionResolverArgs{})),
wantNodes: 2,
},
{
name: "with limit",
wantPaginationArgs: withBeforePA(0, withLastPA(2, &database.PaginationArgs{})),
wantPaginationArgs: withBeforePA("0", withLastPA(2, buildPaginationArgs())),
connectionArgs: withBeforeCA("0", withLastCA(1, &ConnectionResolverArgs{})),
wantNodes: 1,
},

View File

@ -195,7 +195,7 @@ func (s *membersConnectionStore) ComputeNodes(ctx context.Context, args *databas
return userResolvers, nil
}
func (s *membersConnectionStore) MarshalCursor(node *UserResolver) (*string, error) {
func (s *membersConnectionStore) MarshalCursor(node *UserResolver, _ database.OrderBy) (*string, error) {
if node == nil {
return nil, errors.New(`node is nil`)
}
@ -205,13 +205,13 @@ func (s *membersConnectionStore) MarshalCursor(node *UserResolver) (*string, err
return &cursor, nil
}
func (s *membersConnectionStore) UnmarshalCursor(cusror string) (*int, error) {
func (s *membersConnectionStore) UnmarshalCursor(cusror string, _ database.OrderBy) (*string, error) {
nodeID, err := UnmarshalUserID(graphql.ID(cusror))
if err != nil {
return nil, err
}
id := int(nodeID)
id := string(nodeID)
return &id, nil
}

View File

@ -2,6 +2,9 @@ package graphqlbackend
import (
"context"
"fmt"
"strconv"
"strings"
"sync"
"time"
@ -19,7 +22,6 @@ import (
)
type repositoryArgs struct {
graphqlutil.ConnectionArgs
Query *string // Search query
Names *[]string
@ -36,42 +38,17 @@ type repositoryArgs struct {
OrderBy string
Descending bool
After *string
graphqlutil.ConnectionResolverArgs
}
func (args *repositoryArgs) toReposListOptions() (database.ReposListOptions, error) {
opt := database.ReposListOptions{
OrderBy: database.RepoListOrderBy{{
Field: ToDBRepoListColumn(args.OrderBy),
Descending: args.Descending,
}},
}
opt := database.ReposListOptions{}
if args.Names != nil {
opt.Names = *args.Names
}
if args.Query != nil {
opt.Query = *args.Query
}
if args.After != nil {
cursor, err := UnmarshalRepositoryCursor(args.After)
if err != nil {
return opt, err
}
opt.Cursors = append(opt.Cursors, cursor)
} else {
cursor := types.Cursor{
Column: string(ToDBRepoListColumn(args.OrderBy)),
}
if args.Descending {
cursor.Direction = "prev"
} else {
cursor.Direction = "next"
}
opt.Cursors = append(opt.Cursors, &cursor)
}
args.Set(&opt.LimitOffset)
if args.CloneStatus != nil {
opt.CloneStatus = types.ParseCloneStatusFromGraphQL(*args.CloneStatus)
@ -114,22 +91,153 @@ func (args *repositoryArgs) toReposListOptions() (database.ReposListOptions, err
return opt, nil
}
func (r *schemaResolver) Repositories(args *repositoryArgs) (*repositoryConnectionResolver, error) {
func (r *schemaResolver) Repositories(ctx context.Context, args *repositoryArgs) (*graphqlutil.ConnectionResolver[RepositoryResolver], error) {
opt, err := args.toReposListOptions()
if err != nil {
return nil, err
}
return &repositoryConnectionResolver{
connectionStore := &repositoriesConnectionStore{
ctx: ctx,
db: r.db,
logger: r.logger.Scoped("repositoryConnectionResolver", "resolves connections to a repository"),
opt: opt,
indexed: args.Indexed,
notIndexed: args.NotIndexed,
}, nil
}
maxPageSize := 1000
// `REPOSITORY_NAME` is the enum value in the graphql schema.
orderBy := "REPOSITORY_NAME"
if args.OrderBy != "" {
orderBy = args.OrderBy
}
connectionOptions := graphqlutil.ConnectionResolverOptions{
MaxPageSize: &maxPageSize,
OrderBy: database.OrderBy{{Field: string(ToDBRepoListColumn(orderBy))}, {Field: "id"}},
Ascending: !args.Descending,
}
return graphqlutil.NewConnectionResolver[RepositoryResolver](connectionStore, &args.ConnectionResolverArgs, &connectionOptions)
}
type repositoriesConnectionStore struct {
ctx context.Context
logger log.Logger
db database.DB
opt database.ReposListOptions
indexed bool
notIndexed bool
}
func (s *repositoriesConnectionStore) MarshalCursor(node *RepositoryResolver, orderBy database.OrderBy) (*string, error) {
column := orderBy[0].Field
var value string
switch database.RepoListColumn(column) {
case database.RepoListName:
value = node.Name()
case database.RepoListCreatedAt:
value = fmt.Sprintf("'%v'", node.RawCreatedAt())
case database.RepoListSize:
size, err := node.DiskSizeBytes(s.ctx)
if err != nil {
return nil, err
}
value = strconv.FormatInt(int64(*size), 10)
default:
return nil, errors.New(fmt.Sprintf("invalid OrderBy.Field. Expected: one of (name, created_at, gr.repo_size_bytes). Actual: %s", column))
}
cursor := MarshalRepositoryCursor(
&types.Cursor{
Column: column,
Value: fmt.Sprintf("%s@%d", value, node.IDInt32()),
},
)
return &cursor, nil
}
func (s *repositoriesConnectionStore) UnmarshalCursor(cursor string, orderBy database.OrderBy) (*string, error) {
repoCursor, err := UnmarshalRepositoryCursor(&cursor)
if err != nil {
return nil, err
}
if len(orderBy) == 0 {
return nil, errors.New("no orderBy provided")
}
column := orderBy[0].Field
if repoCursor.Column != column {
return nil, errors.New(fmt.Sprintf("Invalid cursor. Expected: %s Actual: %s", column, repoCursor.Column))
}
csv := ""
values := strings.Split(repoCursor.Value, "@")
if len(values) != 2 {
return nil, errors.New(fmt.Sprintf("Invalid cursor. Expected Value: <%s>@<id> Actual Value: %s", column, repoCursor.Value))
}
switch database.RepoListColumn(column) {
case database.RepoListName:
csv = fmt.Sprintf("'%v', %v", values[0], values[1])
case database.RepoListCreatedAt:
csv = fmt.Sprintf("%v, %v", values[0], values[1])
case database.RepoListSize:
csv = fmt.Sprintf("%v, %v", values[0], values[1])
default:
return nil, errors.New("Invalid OrderBy Field.")
}
return &csv, err
}
func i32ptr(v int32) *int32 { return &v }
func (r *repositoriesConnectionStore) ComputeTotal(ctx context.Context) (countptr *int32, err error) {
// 🚨 SECURITY: Only site admins can list all repos, because a total repository
// count does not respect repository permissions.
if err := auth.CheckCurrentUserIsSiteAdmin(ctx, r.db); err != nil {
return i32ptr(int32(0)), nil
}
// Counting repositories is slow on Sourcegraph.com. Don't wait very long for an exact count.
if envvar.SourcegraphDotComMode() {
return i32ptr(int32(0)), nil
}
count, err := r.db.Repos().Count(ctx, r.opt)
return i32ptr(int32(count)), err
}
func (r *repositoriesConnectionStore) ComputeNodes(ctx context.Context, args *database.PaginationArgs) ([]*RepositoryResolver, error) {
opt := r.opt
opt.PaginationArgs = args
client := gitserver.NewClient(r.db)
repos, err := backend.NewRepos(r.logger, r.db, client).List(ctx, opt)
if err != nil {
return nil, err
}
resolvers := make([]*RepositoryResolver, 0, len(repos))
for _, repo := range repos {
resolvers = append(resolvers, NewRepositoryResolver(r.db, client, repo))
}
return resolvers, nil
}
// NOTE(naman): The old resolver `RepositoryConnectionResolver` defined below is
// deprecated and replaced by `graphqlutil.ConnectionResolver` above which implements
// proper cursor-based pagination and do not support `precise` argument for totalCount.
// The old resolver is still being used by `AuthorizedUserRepositories` API, therefore
// the code is not removed yet.
type TotalCountArgs struct {
Precise bool
}
@ -256,7 +364,6 @@ func (r *repositoryConnectionResolver) TotalCount(ctx context.Context, args *Tot
}()
}
i32ptr := func(v int32) *int32 { return &v }
count, err := r.db.Repos().Count(ctx, r.opt)
return i32ptr(int32(count)), err
}

View File

@ -16,18 +16,39 @@ import (
"github.com/sourcegraph/sourcegraph/internal/actor"
"github.com/sourcegraph/sourcegraph/internal/api"
"github.com/sourcegraph/sourcegraph/internal/auth"
"github.com/sourcegraph/sourcegraph/internal/database"
"github.com/sourcegraph/sourcegraph/internal/database/dbtest"
"github.com/sourcegraph/sourcegraph/internal/types"
"github.com/sourcegraph/sourcegraph/lib/errors"
)
func buildCursor(node *types.Repo) *string {
cursor := MarshalRepositoryCursor(
&types.Cursor{
Column: "name",
Value: fmt.Sprintf("%s@%d", node.Name, node.ID),
},
)
return &cursor
}
func buildCursorBySize(node *types.Repo, size int64) *string {
cursor := MarshalRepositoryCursor(
&types.Cursor{
Column: "gr.repo_size_bytes",
Value: fmt.Sprintf("%d@%d", size, node.ID),
},
)
return &cursor
}
func TestRepositoriesCloneStatusFiltering(t *testing.T) {
mockRepos := []*types.Repo{
{Name: "repo1"}, // not_cloned
{Name: "repo2"}, // cloning
{Name: "repo3"}, // cloned
{ID: 1, Name: "repo1"}, // not_cloned
{ID: 2, Name: "repo2"}, // cloning
{ID: 3, Name: "repo3"}, // cloned
}
repos := database.NewMockRepoStore()
@ -69,7 +90,7 @@ func TestRepositoriesCloneStatusFiltering(t *testing.T) {
Schema: schema,
Query: `
{
repositories {
repositories(first: 3) {
nodes { name }
totalCount
pageInfo { hasNextPage }
@ -84,18 +105,11 @@ func TestRepositoriesCloneStatusFiltering(t *testing.T) {
{ "name": "repo2" },
{ "name": "repo3" }
],
"totalCount": null,
"totalCount": 0,
"pageInfo": {"hasNextPage": false}
}
}
`,
ExpectedErrors: []*gqlerrors.QueryError{
{
Path: []any{"repositories", "totalCount"},
Message: auth.ErrMustBeSiteAdmin.Error(),
ResolverError: auth.ErrMustBeSiteAdmin,
},
},
},
})
})
@ -108,7 +122,7 @@ func TestRepositoriesCloneStatusFiltering(t *testing.T) {
Schema: schema,
Query: `
{
repositories {
repositories(first: 3) {
nodes { name }
totalCount
pageInfo { hasNextPage }
@ -136,7 +150,7 @@ func TestRepositoriesCloneStatusFiltering(t *testing.T) {
// when setting them explicitly
Query: `
{
repositories(cloned: true, notCloned: true) {
repositories(first: 3, cloned: true, notCloned: true) {
nodes { name }
totalCount
pageInfo { hasNextPage }
@ -183,7 +197,7 @@ func TestRepositoriesCloneStatusFiltering(t *testing.T) {
Schema: schema,
Query: `
{
repositories(cloned: false) {
repositories(first: 3, cloned: false) {
nodes { name }
pageInfo { hasNextPage }
}
@ -205,7 +219,7 @@ func TestRepositoriesCloneStatusFiltering(t *testing.T) {
Schema: schema,
Query: `
{
repositories(notCloned: false) {
repositories(first: 3, notCloned: false) {
nodes { name }
pageInfo { hasNextPage }
}
@ -226,7 +240,7 @@ func TestRepositoriesCloneStatusFiltering(t *testing.T) {
Schema: schema,
Query: `
{
repositories(notCloned: false, cloned: false) {
repositories(first: 3, notCloned: false, cloned: false) {
nodes { name }
pageInfo { hasNextPage }
}
@ -245,7 +259,7 @@ func TestRepositoriesCloneStatusFiltering(t *testing.T) {
Schema: schema,
Query: `
{
repositories(cloneStatus: CLONED) {
repositories(first: 3, cloneStatus: CLONED) {
nodes { name }
pageInfo { hasNextPage }
}
@ -266,7 +280,7 @@ func TestRepositoriesCloneStatusFiltering(t *testing.T) {
Schema: schema,
Query: `
{
repositories(cloneStatus: CLONING) {
repositories(first: 3, cloneStatus: CLONING) {
nodes { name }
pageInfo { hasNextPage }
}
@ -287,7 +301,7 @@ func TestRepositoriesCloneStatusFiltering(t *testing.T) {
Schema: schema,
Query: `
{
repositories(cloneStatus: NOT_CLONED) {
repositories(first: 3, cloneStatus: NOT_CLONED) {
nodes { name }
pageInfo { hasNextPage }
}
@ -355,7 +369,7 @@ func TestRepositoriesIndexingFiltering(t *testing.T) {
Schema: schema,
Query: `
{
repositories {
repositories(first: 5) {
nodes { name }
totalCount
pageInfo { hasNextPage }
@ -384,7 +398,7 @@ func TestRepositoriesIndexingFiltering(t *testing.T) {
// when setting them explicitly
Query: `
{
repositories(indexed: true, notIndexed: true) {
repositories(first: 5, indexed: true, notIndexed: true) {
nodes { name }
totalCount
pageInfo { hasNextPage }
@ -410,7 +424,7 @@ func TestRepositoriesIndexingFiltering(t *testing.T) {
Schema: schema,
Query: `
{
repositories(indexed: false) {
repositories(first: 5, indexed: false) {
nodes { name }
totalCount
pageInfo { hasNextPage }
@ -434,7 +448,7 @@ func TestRepositoriesIndexingFiltering(t *testing.T) {
Schema: schema,
Query: `
{
repositories(notIndexed: false) {
repositories(first: 5, notIndexed: false) {
nodes { name }
totalCount
pageInfo { hasNextPage }
@ -458,7 +472,7 @@ func TestRepositoriesIndexingFiltering(t *testing.T) {
Schema: schema,
Query: `
{
repositories(notIndexed: false, indexed: false) {
repositories(first: 5, notIndexed: false, indexed: false) {
nodes { name }
totalCount
pageInfo { hasNextPage }
@ -506,18 +520,18 @@ func TestRepositories_CursorPagination(t *testing.T) {
RunTest(t, &Test{
Schema: mustParseGraphQLSchema(t, db),
Query: buildQuery(1, ""),
ExpectedResult: `
ExpectedResult: fmt.Sprintf(`
{
"repositories": {
"nodes": [{
"name": "repo1"
}],
"pageInfo": {
"endCursor": "UmVwb3NpdG9yeUN1cnNvcjp7IkNvbHVtbiI6Im5hbWUiLCJWYWx1ZSI6InJlcG8yIiwiRGlyZWN0aW9uIjoibmV4dCJ9"
"endCursor": "%s"
}
}
}
`,
`, *buildCursor(mockRepos[0])),
})
})
@ -526,19 +540,19 @@ func TestRepositories_CursorPagination(t *testing.T) {
RunTest(t, &Test{
Schema: mustParseGraphQLSchema(t, db),
Query: buildQuery(1, "UmVwb3NpdG9yeUN1cnNvcjp7IkNvbHVtbiI6Im5hbWUiLCJWYWx1ZSI6InJlcG8yIiwiRGlyZWN0aW9uIjoibmV4dCJ9"),
ExpectedResult: `
Query: buildQuery(1, *buildCursor(mockRepos[0])),
ExpectedResult: fmt.Sprintf(`
{
"repositories": {
"nodes": [{
"name": "repo2"
}],
"pageInfo": {
"endCursor": "UmVwb3NpdG9yeUN1cnNvcjp7IkNvbHVtbiI6Im5hbWUiLCJWYWx1ZSI6InJlcG8zIiwiRGlyZWN0aW9uIjoibmV4dCJ9"
"endCursor": "%s"
}
}
}
`,
`, *buildCursor(mockRepos[1])),
})
})
@ -547,19 +561,19 @@ func TestRepositories_CursorPagination(t *testing.T) {
RunTest(t, &Test{
Schema: mustParseGraphQLSchema(t, db),
Query: buildQuery(1, "UmVwb3NpdG9yeUN1cnNvcjp7IkNvbHVtbiI6Im5hbWUiLCJWYWx1ZSI6InJlcG8yIiwiRGlyZWN0aW9uIjoicHJldiJ9"),
ExpectedResult: `
Query: buildQuery(1, *buildCursor(mockRepos[0])),
ExpectedResult: fmt.Sprintf(`
{
"repositories": {
"nodes": [{
"name": "repo2"
}],
"pageInfo": {
"endCursor": "UmVwb3NpdG9yeUN1cnNvcjp7IkNvbHVtbiI6Im5hbWUiLCJWYWx1ZSI6InJlcG8zIiwiRGlyZWN0aW9uIjoicHJldiJ9"
"endCursor": "%s"
}
}
}
`,
`, *buildCursor(mockRepos[1])),
})
})
@ -569,7 +583,7 @@ func TestRepositories_CursorPagination(t *testing.T) {
RunTest(t, &Test{
Schema: mustParseGraphQLSchema(t, db),
Query: buildQuery(3, ""),
ExpectedResult: `
ExpectedResult: fmt.Sprintf(`
{
"repositories": {
"nodes": [{
@ -580,11 +594,11 @@ func TestRepositories_CursorPagination(t *testing.T) {
"name": "repo3"
}],
"pageInfo": {
"endCursor": null
"endCursor": "%s"
}
}
}
`,
`, *buildCursor(mockRepos[2])),
})
})
@ -616,7 +630,12 @@ func TestRepositories_CursorPagination(t *testing.T) {
ExpectedResult: "null",
ExpectedErrors: []*gqlerrors.QueryError{
{
Path: []any{"repositories"},
Path: []any{"repositories", "nodes"},
Message: `cannot unmarshal repository cursor type: ""`,
ResolverError: errors.New(`cannot unmarshal repository cursor type: ""`),
},
{
Path: []any{"repositories", "pageInfo"},
Message: `cannot unmarshal repository cursor type: ""`,
ResolverError: errors.New(`cannot unmarshal repository cursor type: ""`),
},
@ -689,114 +708,295 @@ func TestRepositories_Integration(t *testing.T) {
ctx = actor.WithActor(ctx, actor.FromUser(admin.ID))
tests := []repositoriesQueryTest{
// no args
{
wantRepos: []string{"repo1", "repo2", "repo3", "repo4", "repo5", "repo6", "repo7", "repo8"},
wantTotalCount: 8,
},
// first
{
args: "first: 2",
wantRepos: []string{"repo1", "repo2"},
wantTotalCount: 8,
args: "first: 2",
wantRepos: []string{"repo1", "repo2"},
wantTotalCount: 8,
wantNextPage: true,
wantPreviousPage: false,
wantStartCursor: buildCursor(repos[0].repo),
wantEndCursor: buildCursor(repos[1].repo),
},
// second page with first, after args
{
args: fmt.Sprintf(`first: 2, after: "%s"`, *buildCursor(repos[0].repo)),
wantRepos: []string{"repo2", "repo3"},
wantTotalCount: 8,
wantNextPage: true,
wantPreviousPage: true,
wantStartCursor: buildCursor(repos[1].repo),
wantEndCursor: buildCursor(repos[2].repo),
},
// last page with first, after args
{
args: fmt.Sprintf(`first: 2, after: "%s"`, *buildCursor(repos[5].repo)),
wantRepos: []string{"repo7", "repo8"},
wantTotalCount: 8,
wantNextPage: false,
wantPreviousPage: true,
wantStartCursor: buildCursor(repos[6].repo),
wantEndCursor: buildCursor(repos[7].repo),
},
// last
{
args: "last: 2",
wantRepos: []string{"repo7", "repo8"},
wantTotalCount: 8,
wantNextPage: false,
wantPreviousPage: true,
wantStartCursor: buildCursor(repos[6].repo),
wantEndCursor: buildCursor(repos[7].repo),
},
// second last page with last, before args
{
args: fmt.Sprintf(`last: 2, before: "%s"`, *buildCursor(repos[6].repo)),
wantRepos: []string{"repo5", "repo6"},
wantTotalCount: 8,
wantNextPage: true,
wantPreviousPage: true,
wantStartCursor: buildCursor(repos[4].repo),
wantEndCursor: buildCursor(repos[5].repo),
},
// back to first page with last, before args
{
args: fmt.Sprintf(`last: 2, before: "%s"`, *buildCursor(repos[2].repo)),
wantRepos: []string{"repo1", "repo2"},
wantTotalCount: 8,
wantNextPage: true,
wantPreviousPage: false,
wantStartCursor: buildCursor(repos[0].repo),
wantEndCursor: buildCursor(repos[1].repo),
},
// descending first
{
args: "first: 2, descending: true",
wantRepos: []string{"repo8", "repo7"},
wantTotalCount: 8,
wantNextPage: true,
wantPreviousPage: false,
wantStartCursor: buildCursor(repos[7].repo),
wantEndCursor: buildCursor(repos[6].repo),
},
// descending second page with first, after args
{
args: fmt.Sprintf(`first: 2, descending: true, after: "%s"`, *buildCursor(repos[6].repo)),
wantRepos: []string{"repo6", "repo5"},
wantTotalCount: 8,
wantNextPage: true,
wantPreviousPage: true,
wantStartCursor: buildCursor(repos[5].repo),
wantEndCursor: buildCursor(repos[4].repo),
},
// descending last page with first, after args
{
args: fmt.Sprintf(`first: 2, descending: true, after: "%s"`, *buildCursor(repos[2].repo)),
wantRepos: []string{"repo2", "repo1"},
wantTotalCount: 8,
wantNextPage: false,
wantPreviousPage: true,
wantStartCursor: buildCursor(repos[1].repo),
wantEndCursor: buildCursor(repos[0].repo),
},
// descending last
{
args: "last: 2, descending: true",
wantRepos: []string{"repo2", "repo1"},
wantTotalCount: 8,
wantNextPage: false,
wantPreviousPage: true,
wantStartCursor: buildCursor(repos[1].repo),
wantEndCursor: buildCursor(repos[0].repo),
},
// descending second last page with last, before args
{
args: fmt.Sprintf(`last: 2, descending: true, before: "%s"`, *buildCursor(repos[3].repo)),
wantRepos: []string{"repo6", "repo5"},
wantTotalCount: 8,
wantNextPage: true,
wantPreviousPage: true,
wantStartCursor: buildCursor(repos[5].repo),
wantEndCursor: buildCursor(repos[4].repo),
},
// descending back to first page with last, before args
{
args: fmt.Sprintf(`last: 2, descending: true, before: "%s"`, *buildCursor(repos[5].repo)),
wantRepos: []string{"repo8", "repo7"},
wantTotalCount: 8,
wantNextPage: true,
wantPreviousPage: false,
wantStartCursor: buildCursor(repos[7].repo),
wantEndCursor: buildCursor(repos[6].repo),
},
// cloned
{
// cloned only says whether to "Include cloned repositories.", it doesn't exclude non-cloned.
args: "cloned: true",
wantRepos: []string{"repo1", "repo2", "repo3", "repo4", "repo5", "repo6", "repo7", "repo8"},
wantTotalCount: 8,
args: "first: 10, cloned: true",
wantRepos: []string{"repo1", "repo2", "repo3", "repo4", "repo5", "repo6", "repo7", "repo8"},
wantTotalCount: 8,
wantNextPage: false,
wantPreviousPage: false,
wantStartCursor: buildCursor(repos[0].repo),
wantEndCursor: buildCursor(repos[7].repo),
},
{
args: "cloned: false",
wantRepos: []string{"repo1", "repo2", "repo3", "repo4"},
wantTotalCount: 4,
args: "first: 10, cloned: false",
wantRepos: []string{"repo1", "repo2", "repo3", "repo4"},
wantTotalCount: 4,
wantNextPage: false,
wantPreviousPage: false,
wantStartCursor: buildCursor(repos[0].repo),
wantEndCursor: buildCursor(repos[3].repo),
},
{
args: "cloned: false, first: 2",
wantRepos: []string{"repo1", "repo2"},
wantTotalCount: 4,
args: "cloned: false, first: 2",
wantRepos: []string{"repo1", "repo2"},
wantTotalCount: 4,
wantNextPage: true,
wantPreviousPage: false,
wantStartCursor: buildCursor(repos[0].repo),
wantEndCursor: buildCursor(repos[1].repo),
},
// notCloned
{
args: "notCloned: true",
wantRepos: []string{"repo1", "repo2", "repo3", "repo4", "repo5", "repo6", "repo7", "repo8"},
wantTotalCount: 8,
args: "first: 10, notCloned: true",
wantRepos: []string{"repo1", "repo2", "repo3", "repo4", "repo5", "repo6", "repo7", "repo8"},
wantTotalCount: 8,
wantNextPage: false,
wantPreviousPage: false,
wantStartCursor: buildCursor(repos[0].repo),
wantEndCursor: buildCursor(repos[7].repo),
},
{
args: "notCloned: false",
wantRepos: []string{"repo5", "repo6", "repo7", "repo8"},
wantTotalCount: 4,
args: "first: 10, notCloned: false",
wantRepos: []string{"repo5", "repo6", "repo7", "repo8"},
wantTotalCount: 4,
wantNextPage: false,
wantPreviousPage: false,
wantStartCursor: buildCursor(repos[4].repo),
wantEndCursor: buildCursor(repos[7].repo),
},
// failedFetch
{
args: "failedFetch: true",
wantRepos: []string{"repo2", "repo4", "repo6"},
wantTotalCount: 3,
args: "first: 10, failedFetch: true",
wantRepos: []string{"repo2", "repo4", "repo6"},
wantTotalCount: 3,
wantNextPage: false,
wantPreviousPage: false,
wantStartCursor: buildCursor(repos[1].repo),
wantEndCursor: buildCursor(repos[5].repo),
},
{
args: "failedFetch: true, first: 2",
wantRepos: []string{"repo2", "repo4"},
wantTotalCount: 3,
args: "failedFetch: true, first: 2",
wantRepos: []string{"repo2", "repo4"},
wantTotalCount: 3,
wantNextPage: true,
wantPreviousPage: false,
wantStartCursor: buildCursor(repos[1].repo),
wantEndCursor: buildCursor(repos[3].repo),
},
{
args: "failedFetch: false",
wantRepos: []string{"repo1", "repo2", "repo3", "repo4", "repo5", "repo6", "repo7", "repo8"},
wantTotalCount: 8,
args: "first: 10, failedFetch: false",
wantRepos: []string{"repo1", "repo2", "repo3", "repo4", "repo5", "repo6", "repo7", "repo8"},
wantTotalCount: 8,
wantNextPage: false,
wantPreviousPage: false,
wantStartCursor: buildCursor(repos[0].repo),
wantEndCursor: buildCursor(repos[7].repo),
},
// cloneStatus
{
args: "cloneStatus:NOT_CLONED",
wantRepos: []string{"repo1", "repo2"},
wantTotalCount: 2,
args: "first: 10, cloneStatus:NOT_CLONED",
wantRepos: []string{"repo1", "repo2"},
wantTotalCount: 2,
wantNextPage: false,
wantPreviousPage: false,
wantStartCursor: buildCursor(repos[0].repo),
wantEndCursor: buildCursor(repos[1].repo),
},
{
args: "cloneStatus:CLONING",
wantRepos: []string{"repo3", "repo4"},
wantTotalCount: 2,
args: "first: 10, cloneStatus:CLONING",
wantRepos: []string{"repo3", "repo4"},
wantTotalCount: 2,
wantNextPage: false,
wantPreviousPage: false,
wantStartCursor: buildCursor(repos[2].repo),
wantEndCursor: buildCursor(repos[3].repo),
},
{
args: "cloneStatus:CLONED",
wantRepos: []string{"repo5", "repo6", "repo7", "repo8"},
wantTotalCount: 4,
args: "first: 10, cloneStatus:CLONED",
wantRepos: []string{"repo5", "repo6", "repo7", "repo8"},
wantTotalCount: 4,
wantNextPage: false,
wantPreviousPage: false,
wantStartCursor: buildCursor(repos[4].repo),
wantEndCursor: buildCursor(repos[7].repo),
},
{
args: "cloneStatus:NOT_CLONED, first: 1",
wantRepos: []string{"repo1"},
wantTotalCount: 2,
args: "cloneStatus:NOT_CLONED, first: 1",
wantRepos: []string{"repo1"},
wantTotalCount: 2,
wantNextPage: true,
wantPreviousPage: false,
wantStartCursor: buildCursor(repos[0].repo),
wantEndCursor: buildCursor(repos[0].repo),
},
// indexed
{
// indexed only says whether to "Include indexed repositories.", it doesn't exclude non-indexed.
args: "indexed: true",
wantRepos: []string{"repo1", "repo2", "repo3", "repo4", "repo5", "repo6", "repo7", "repo8"},
wantTotalCount: 8,
args: "first: 10, indexed: true",
wantRepos: []string{"repo1", "repo2", "repo3", "repo4", "repo5", "repo6", "repo7", "repo8"},
wantTotalCount: 8,
wantNextPage: false,
wantPreviousPage: false,
wantStartCursor: buildCursor(repos[0].repo),
wantEndCursor: buildCursor(repos[7].repo),
},
{
args: "indexed: false",
wantRepos: []string{"repo1", "repo2", "repo3", "repo4", "repo5", "repo6", "repo7"},
wantTotalCount: 7,
args: "first: 10, indexed: false",
wantRepos: []string{"repo1", "repo2", "repo3", "repo4", "repo5", "repo6", "repo7"},
wantTotalCount: 7,
wantNextPage: false,
wantPreviousPage: false,
wantStartCursor: buildCursor(repos[0].repo),
wantEndCursor: buildCursor(repos[6].repo),
},
{
args: "indexed: false, first: 2",
wantRepos: []string{"repo1", "repo2"},
wantTotalCount: 7,
args: "indexed: false, first: 2",
wantRepos: []string{"repo1", "repo2"},
wantTotalCount: 7,
wantNextPage: true,
wantPreviousPage: false,
wantStartCursor: buildCursor(repos[0].repo),
wantEndCursor: buildCursor(repos[1].repo),
},
// notIndexed
{
args: "notIndexed: true",
wantRepos: []string{"repo1", "repo2", "repo3", "repo4", "repo5", "repo6", "repo7", "repo8"},
wantTotalCount: 8,
args: "first: 10, notIndexed: true",
wantRepos: []string{"repo1", "repo2", "repo3", "repo4", "repo5", "repo6", "repo7", "repo8"},
wantTotalCount: 8,
wantNextPage: false,
wantPreviousPage: false,
wantStartCursor: buildCursor(repos[0].repo),
wantEndCursor: buildCursor(repos[7].repo),
},
{
args: "notIndexed: false",
wantRepos: []string{"repo8"},
wantTotalCount: 1,
args: "first: 10, notIndexed: false",
wantRepos: []string{"repo8"},
wantTotalCount: 1,
wantNextPage: false,
wantPreviousPage: false,
wantStartCursor: buildCursor(repos[7].repo),
wantEndCursor: buildCursor(repos[7].repo),
},
{
args: "orderBy:SIZE, descending:false, first: 5",
wantRepos: []string{"repo6", "repo1", "repo2", "repo3", "repo4"},
wantTotalCount: 8,
args: "orderBy:SIZE, descending:false, first: 5",
wantRepos: []string{"repo6", "repo1", "repo2", "repo3", "repo4"},
wantTotalCount: 8,
wantNextPage: true,
wantPreviousPage: false,
wantStartCursor: buildCursorBySize(repos[5].repo, repos[5].size),
wantEndCursor: buildCursorBySize(repos[3].repo, repos[3].size),
},
}
@ -809,12 +1009,13 @@ func TestRepositories_Integration(t *testing.T) {
}
type repositoriesQueryTest struct {
args string
wantRepos []string
wantNoTotalCount bool
args string
wantRepos []string
wantTotalCount int
wantEndCursor *string
wantStartCursor *string
wantNextPage bool
wantPreviousPage bool
}
func runRepositoriesQuery(t *testing.T, ctx context.Context, schema *graphql.Schema, want repositoriesQueryTest) {
@ -824,9 +1025,17 @@ func runRepositoriesQuery(t *testing.T, ctx context.Context, schema *graphql.Sch
Name string `json:"name"`
}
type pageInfo struct {
HasNextPage bool `json:"hasNextPage"`
HasPreviousPage bool `json:"hasPreviousPage"`
StartCursor *string `json:"startCursor"`
EndCursor *string `json:"endCursor"`
}
type repositories struct {
Nodes []node `json:"nodes"`
TotalCount *int `json:"totalCount"`
Nodes []node `json:"nodes"`
TotalCount *int `json:"totalCount"`
PageInfo pageInfo `json:"pageInfo"`
}
type expected struct {
@ -842,24 +1051,35 @@ func runRepositoriesQuery(t *testing.T, ctx context.Context, schema *graphql.Sch
Repositories: repositories{
Nodes: nodes,
TotalCount: &want.wantTotalCount,
PageInfo: pageInfo{
HasNextPage: want.wantNextPage,
HasPreviousPage: want.wantPreviousPage,
StartCursor: want.wantStartCursor,
EndCursor: want.wantEndCursor,
},
},
}
if want.wantNoTotalCount {
ex.Repositories.TotalCount = nil
}
marshaled, err := json.Marshal(ex)
if err != nil {
t.Fatalf("failed to marshal expected repositories query result: %s", err)
}
var query string
if want.args != "" {
query = fmt.Sprintf(`{ repositories(%s) { nodes { name } totalCount } } `, want.args)
} else {
query = `{ repositories { nodes { name } totalCount } }`
}
query := fmt.Sprintf(`
{
repositories(%s) {
nodes {
name
}
totalCount
pageInfo {
hasNextPage
hasPreviousPage
startCursor
endCursor
}
}
}`, want.args)
RunTest(t, &Test{
Context: ctx,

View File

@ -292,6 +292,14 @@ func (r *RepositoryResolver) CreatedAt() gqlutil.DateTime {
return gqlutil.DateTime{Time: time.Now()}
}
func (r *RepositoryResolver) RawCreatedAt() string {
if r.innerRepo == nil {
return ""
}
return r.innerRepo.CreatedAt.Format(time.RFC3339)
}
func (r *RepositoryResolver) UpdatedAt() *gqlutil.DateTime {
return nil
}

View File

@ -23,20 +23,17 @@ func (r *RepositoryResolver) Contributors(args *struct {
repositoryContributorsArgs
graphqlutil.ConnectionResolverArgs
}) (*graphqlutil.ConnectionResolver[repositoryContributorResolver], error) {
connectionArgs := &graphqlutil.ConnectionResolverArgs{
First: args.First,
Last: args.Last,
After: args.After,
Before: args.Before,
}
connectionStore := &repositoryContributorConnectionStore{
db: r.db,
args: &args.repositoryContributorsArgs,
connectionArgs: connectionArgs,
connectionArgs: &args.ConnectionResolverArgs,
repo: r,
}
reverse := false
return graphqlutil.NewConnectionResolver[repositoryContributorResolver](connectionStore, connectionArgs, &graphqlutil.ConnectionResolverOptions{Reverse: &reverse})
connectionOptions := graphqlutil.ConnectionResolverOptions{
Reverse: &reverse,
}
return graphqlutil.NewConnectionResolver[repositoryContributorResolver](connectionStore, &args.ConnectionResolverArgs, &connectionOptions)
}
type repositoryContributorConnectionStore struct {
@ -52,17 +49,13 @@ type repositoryContributorConnectionStore struct {
err error
}
func (s *repositoryContributorConnectionStore) MarshalCursor(node *repositoryContributorResolver) (*string, error) {
func (s *repositoryContributorConnectionStore) MarshalCursor(node *repositoryContributorResolver, _ database.OrderBy) (*string, error) {
position := strconv.Itoa(node.index)
return &position, nil
}
func (s *repositoryContributorConnectionStore) UnmarshalCursor(cursor string) (*int, error) {
position, err := strconv.Atoi(cursor)
if err != nil {
return nil, err
}
return &position, nil
func (s *repositoryContributorConnectionStore) UnmarshalCursor(cursor string, _ database.OrderBy) (*string, error) {
return &cursor, nil
}
func (s *repositoryContributorConnectionStore) ComputeTotal(ctx context.Context) (*int32, error) {
@ -123,13 +116,21 @@ func OffsetBasedCursorSlice[T any](nodes []T, args *database.PaginationArgs) ([]
totalFloat := float64(len(nodes))
if args.First != nil {
if args.After != nil {
start = int(math.Min(float64(*args.After)+1, totalFloat))
after, err := strconv.Atoi(*args.After)
if err != nil {
return nil, 0, err
}
start = int(math.Min(float64(after)+1, totalFloat))
}
end = int(math.Min(float64(start+*args.First), totalFloat))
} else if args.Last != nil {
end = int(totalFloat)
if args.Before != nil {
end = int(math.Max(float64(*args.Before), 0))
before, err := strconv.Atoi(*args.Before)
if err != nil {
return nil, 0, err
}
end = int(math.Max(float64(before), 0))
}
start = int(math.Max(float64(end-*args.Last), 0))
} else {

View File

@ -11,9 +11,9 @@ import (
func TestOffsetBasedCursorSlice(t *testing.T) {
slice := []int{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}
int1 := 1
int2 := 2
int8 := 8
string1 := "1"
string8 := "8"
testCases := []struct {
name string
@ -27,7 +27,7 @@ func TestOffsetBasedCursorSlice(t *testing.T) {
},
{
"next page",
&database.PaginationArgs{First: &int2, After: &int1},
&database.PaginationArgs{First: &int2, After: &string1},
autogold.Want("first two items", []int{3, 4}),
},
{
@ -37,7 +37,7 @@ func TestOffsetBasedCursorSlice(t *testing.T) {
},
{
"previous page",
&database.PaginationArgs{Last: &int2, Before: &int8},
&database.PaginationArgs{Last: &int2, Before: &string8},
autogold.Want("first two items", []int{7, 8}),
},
}

View File

@ -2,6 +2,7 @@ package graphqlbackend
import (
"context"
"strconv"
"github.com/graph-gophers/graphql-go"
"github.com/graph-gophers/graphql-go/relay"
@ -150,19 +151,19 @@ type savedSearchesConnectionStore struct {
orgID *int32
}
func (s *savedSearchesConnectionStore) MarshalCursor(node *savedSearchResolver) (*string, error) {
func (s *savedSearchesConnectionStore) MarshalCursor(node *savedSearchResolver, _ database.OrderBy) (*string, error) {
cursor := string(node.ID())
return &cursor, nil
}
func (s *savedSearchesConnectionStore) UnmarshalCursor(cursor string) (*int, error) {
func (s *savedSearchesConnectionStore) UnmarshalCursor(cursor string, _ database.OrderBy) (*string, error) {
nodeID, err := unmarshalSavedSearchID(graphql.ID(cursor))
if err != nil {
return nil, err
}
id := int(nodeID)
id := strconv.Itoa(int(nodeID))
return &id, nil
}

View File

@ -1287,6 +1287,10 @@ type Query {
"""
first: Int
"""
Returns the last n repositories from the list.
"""
last: Int
"""
Return repositories whose names match the query.
"""
query: String
@ -1295,6 +1299,10 @@ type Query {
"""
after: String
"""
An opaque cursor that is used for pagination.
"""
before: String
"""
Return repositories whose names are in the list.
"""
names: [String!]
@ -1338,7 +1346,7 @@ type Query {
Sort direction.
"""
descending: Boolean = false
): RepositoryConnection!
): NewRepositoryConnection!
"""
Looks up a Phabricator repository by name.
@ -3077,9 +3085,30 @@ type ExternalServiceSyncJob implements Node {
"""
reposUnmodified: Int!
}
"""
A list of repositories.
The old `RepositoryConnection` is deprecated and is replaced by
this new connection which support proper cursor based pagination.
The new connection does not include `precise` argument for totalCount.
"""
type NewRepositoryConnection {
"""
A list of repositories.
"""
nodes: [Repository!]!
"""
The total count of repositories in the connection.
"""
totalCount: Int!
"""
Pagination information.
"""
pageInfo: ConnectionPageInfo!
}
"""
Deprecated! A list of repositories.
"""
type RepositoryConnection {
"""

View File

@ -2,6 +2,7 @@ package graphqlbackend
import (
"context"
"strconv"
"github.com/graph-gophers/graphql-go"
"github.com/graph-gophers/graphql-go/relay"
@ -28,7 +29,10 @@ func (s *SiteConfigurationChangeConnectionStore) ComputeNodes(ctx context.Contex
// determine next/previous page. Instead, dereference the values from args first (if
// they're non-nil) and then assign them address of the new variables.
paginationArgs := args.Clone()
isModifiedPaginationArgs := modifyArgs(paginationArgs)
isModifiedPaginationArgs, err := modifyArgs(paginationArgs)
if err != nil {
return []*SiteConfigurationChangeResolver{}, err
}
history, err := s.db.Conf().ListSiteConfigs(ctx, paginationArgs)
if err != nil {
@ -58,35 +62,45 @@ func (s *SiteConfigurationChangeConnectionStore) ComputeNodes(ctx context.Contex
return resolvers, nil
}
func (s *SiteConfigurationChangeConnectionStore) MarshalCursor(node *SiteConfigurationChangeResolver) (*string, error) {
func (s *SiteConfigurationChangeConnectionStore) MarshalCursor(node *SiteConfigurationChangeResolver, _ database.OrderBy) (*string, error) {
cursor := string(node.ID())
return &cursor, nil
}
func (s *SiteConfigurationChangeConnectionStore) UnmarshalCursor(cursor string) (*int, error) {
func (s *SiteConfigurationChangeConnectionStore) UnmarshalCursor(cursor string, _ database.OrderBy) (*string, error) {
var id int
err := relay.UnmarshalSpec(graphql.ID(cursor), &id)
return &id, err
if err != nil {
return nil, err
}
idStr := strconv.Itoa(id)
return &idStr, err
}
// modifyArgs will fetch one more than the originally requested number of items because we need one
// older item to get the diff of the oldes item in the list.
//
// A separate function so that this can be tested in isolation.
func modifyArgs(args *database.PaginationArgs) bool {
func modifyArgs(args *database.PaginationArgs) (bool, error) {
var modified bool
if args.First != nil {
*args.First += 1
modified = true
} else if args.Last != nil && args.Before != nil {
if *args.Before > 0 {
before, err := strconv.Atoi(*args.Before)
if err != nil {
return false, err
}
if before > 0 {
modified = true
*args.Last += 1
*args.Before -= 1
*args.Before = strconv.Itoa(before - 1)
}
}
return modified
return modified, nil
}
func generateResolversForFirst(history []*database.SiteConfig, db database.DB) []*SiteConfigurationChangeResolver {

View File

@ -3,6 +3,7 @@ package graphqlbackend
import (
"context"
"fmt"
"strconv"
"testing"
"github.com/google/go-cmp/cmp"
@ -19,6 +20,12 @@ type siteConfigStubs struct {
siteConfigs []*database.SiteConfig
}
func toStringPtr(n int) *string {
str := strconv.Itoa(n)
return &str
}
func setupSiteConfigStubs(t *testing.T) *siteConfigStubs {
logger := log.NoOp()
db := database.NewDB(logger, dbtest.NewDB(logger, t))
@ -176,7 +183,7 @@ func TestSiteConfigConnection(t *testing.T) {
},
{
Schema: mustParseGraphQLSchema(t, stubs.db),
Label: "Get last 2 site configuration history",
Label: "Get last 3 site configuration history",
Context: context,
Query: `
{
@ -449,7 +456,7 @@ func TestSiteConfigurationChangeConnectionStoreComputeNodes(t *testing.T) {
name: "first: 2, after: 4",
paginationArgs: &database.PaginationArgs{
First: intPtr(2),
After: intPtr(4),
After: toStringPtr(4),
},
expectedSiteConfigIDs: []int32{3, 2},
expectedPreviousSiteConfigIDs: []int32{2, 1},
@ -458,7 +465,7 @@ func TestSiteConfigurationChangeConnectionStoreComputeNodes(t *testing.T) {
name: "first: 10, after: 4",
paginationArgs: &database.PaginationArgs{
First: intPtr(10),
After: intPtr(4),
After: toStringPtr(4),
},
expectedSiteConfigIDs: []int32{3, 2, 1},
expectedPreviousSiteConfigIDs: []int32{2, 1, 0},
@ -467,7 +474,7 @@ func TestSiteConfigurationChangeConnectionStoreComputeNodes(t *testing.T) {
name: "first: 2, after: 1",
paginationArgs: &database.PaginationArgs{
First: intPtr(2),
After: intPtr(1),
After: toStringPtr(1),
},
expectedSiteConfigIDs: []int32{},
expectedPreviousSiteConfigIDs: []int32{},
@ -476,7 +483,7 @@ func TestSiteConfigurationChangeConnectionStoreComputeNodes(t *testing.T) {
name: "last: 2, before: 2",
paginationArgs: &database.PaginationArgs{
Last: intPtr(2),
Before: intPtr(2),
Before: toStringPtr(2),
},
expectedSiteConfigIDs: []int32{3, 4},
expectedPreviousSiteConfigIDs: []int32{2, 3},
@ -485,7 +492,7 @@ func TestSiteConfigurationChangeConnectionStoreComputeNodes(t *testing.T) {
name: "last: 10, before: 2",
paginationArgs: &database.PaginationArgs{
Last: intPtr(10),
Before: intPtr(2),
Before: toStringPtr(2),
},
expectedSiteConfigIDs: []int32{3, 4, 5},
expectedPreviousSiteConfigIDs: []int32{2, 3, 4},
@ -494,7 +501,7 @@ func TestSiteConfigurationChangeConnectionStoreComputeNodes(t *testing.T) {
name: "last: 2, before: 5",
paginationArgs: &database.PaginationArgs{
Last: intPtr(2),
Before: intPtr(5),
Before: toStringPtr(5),
},
expectedSiteConfigIDs: []int32{},
expectedPreviousSiteConfigIDs: []int32{},
@ -558,8 +565,8 @@ func TestModifyArgs(t *testing.T) {
},
{
name: "first: 5, after: 10 (next page)",
args: &database.PaginationArgs{First: intPtr(5), After: intPtr(10)},
expectedArgs: &database.PaginationArgs{First: intPtr(6), After: intPtr(10)},
args: &database.PaginationArgs{First: intPtr(5), After: toStringPtr(10)},
expectedArgs: &database.PaginationArgs{First: intPtr(6), After: toStringPtr(10)},
expectedModified: true,
},
{
@ -570,27 +577,31 @@ func TestModifyArgs(t *testing.T) {
},
{
name: "last: 5, before: 10 (previous page)",
args: &database.PaginationArgs{Last: intPtr(5), Before: intPtr(10)},
expectedArgs: &database.PaginationArgs{Last: intPtr(6), Before: intPtr(9)},
args: &database.PaginationArgs{Last: intPtr(5), Before: toStringPtr(10)},
expectedArgs: &database.PaginationArgs{Last: intPtr(6), Before: toStringPtr(9)},
expectedModified: true,
},
{
name: "last: 5, before: 1 (edge case)",
args: &database.PaginationArgs{Last: intPtr(5), Before: intPtr(1)},
expectedArgs: &database.PaginationArgs{Last: intPtr(6), Before: intPtr(0)},
args: &database.PaginationArgs{Last: intPtr(5), Before: toStringPtr(1)},
expectedArgs: &database.PaginationArgs{Last: intPtr(6), Before: toStringPtr(0)},
expectedModified: true,
},
{
name: "last: 5, before: 0 (same as last page but a mathematical edge case)",
args: &database.PaginationArgs{Last: intPtr(5), Before: intPtr(0)},
expectedArgs: &database.PaginationArgs{Last: intPtr(5), Before: intPtr(0)},
args: &database.PaginationArgs{Last: intPtr(5), Before: toStringPtr(0)},
expectedArgs: &database.PaginationArgs{Last: intPtr(5), Before: toStringPtr(0)},
expectedModified: false,
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
modified := modifyArgs(tc.args)
modified, err := modifyArgs(tc.args)
if err != nil {
t.Fatal(err)
}
if modified != tc.expectedModified {
t.Errorf("Expected modified to be %v, but got %v", modified, tc.expectedModified)
}

View File

@ -71,7 +71,7 @@ func TestSiteConfigurationHistory(t *testing.T) {
},
{
name: "last: 20 (more items than what exists in the database)",
args: &graphqlutil.ConnectionResolverArgs{Last: int32Ptr(5)},
args: &graphqlutil.ConnectionResolverArgs{Last: int32Ptr(20)},
expectedSiteConfigIDs: []int32{5, 4, 3, 2, 1},
},
{

View File

@ -286,6 +286,7 @@ func TestGetSiteConfigCount(t *testing.T) {
func TestListSiteConfigs(t *testing.T) {
toIntPtr := func(n int) *int { return &n }
toStringPtr := func(n string) *string { return &n }
if testing.Short() {
t.Skip()
@ -357,7 +358,7 @@ func TestListSiteConfigs(t *testing.T) {
name: "first: 2, after: 3",
listOptions: &PaginationArgs{
First: toIntPtr(2),
After: toIntPtr(3),
After: toStringPtr("3"),
},
expectedIDs: []int32{2, 1},
},
@ -365,7 +366,7 @@ func TestListSiteConfigs(t *testing.T) {
name: "first: 5, after: 3 (overflow)",
listOptions: &PaginationArgs{
First: toIntPtr(5),
After: toIntPtr(3),
After: toStringPtr("3"),
},
expectedIDs: []int32{2, 1},
},
@ -373,7 +374,7 @@ func TestListSiteConfigs(t *testing.T) {
name: "last: 2, after: 4",
listOptions: &PaginationArgs{
Last: toIntPtr(2),
After: toIntPtr(4),
After: toStringPtr("4"),
},
expectedIDs: []int32{1, 2},
},
@ -381,7 +382,7 @@ func TestListSiteConfigs(t *testing.T) {
name: "last: 5, after: 4 (overflow)",
listOptions: &PaginationArgs{
Last: toIntPtr(5),
After: toIntPtr(4),
After: toStringPtr("4"),
},
expectedIDs: []int32{1, 2, 3},
},
@ -389,7 +390,7 @@ func TestListSiteConfigs(t *testing.T) {
name: "first: 2, before: 1",
listOptions: &PaginationArgs{
First: toIntPtr(2),
Before: toIntPtr(1),
Before: toStringPtr("1"),
},
expectedIDs: []int32{4, 3},
},
@ -397,7 +398,7 @@ func TestListSiteConfigs(t *testing.T) {
name: "first: 5, before: 1 (overflow)",
listOptions: &PaginationArgs{
First: toIntPtr(5),
Before: toIntPtr(1),
Before: toStringPtr("1"),
},
expectedIDs: []int32{4, 3, 2},
},
@ -405,7 +406,7 @@ func TestListSiteConfigs(t *testing.T) {
name: "last: 2, before: 1",
listOptions: &PaginationArgs{
Last: toIntPtr(2),
Before: toIntPtr(1),
Before: toStringPtr("1"),
},
expectedIDs: []int32{2, 3},
},
@ -413,7 +414,7 @@ func TestListSiteConfigs(t *testing.T) {
name: "last: 5, before: 1 (overflow)",
listOptions: &PaginationArgs{
Last: toIntPtr(5),
Before: toIntPtr(1),
Before: toStringPtr("1"),
},
expectedIDs: []int32{2, 3, 4},
},

View File

@ -1,7 +1,9 @@
package database
import (
"fmt"
"strconv"
"strings"
"github.com/graph-gophers/graphql-go"
"github.com/graph-gophers/graphql-go/relay"
@ -77,32 +79,102 @@ func (a *QueryArgs) AppendAllToQuery(query *sqlf.Query) *sqlf.Query {
return query
}
type OrderBy []OrderByOption
func (o OrderBy) Columns() []string {
columns := []string{}
for _, orderOption := range o {
columns = append(columns, orderOption.Field)
}
return columns
}
func (o OrderBy) SQL(ascending bool) *sqlf.Query {
columns := []*sqlf.Query{}
for _, orderOption := range o {
columns = append(columns, orderOption.SQL(ascending))
}
return sqlf.Join(columns, ", ")
}
type OrderByOption struct {
Field string
Nulls string
}
func (o OrderByOption) SQL(ascending bool) *sqlf.Query {
var sb strings.Builder
sb.WriteString(o.Field)
if ascending {
sb.WriteString(" ASC")
} else {
sb.WriteString(" DESC")
}
if o.Nulls == "FIRST" || o.Nulls == "LAST" {
sb.WriteString(" NULLS " + o.Nulls)
}
return sqlf.Sprintf(sb.String())
}
type PaginationArgs struct {
First *int
Last *int
After *int
Before *int
After *string
Before *string
// TODDO(naman): explain default
OrderBy OrderBy
Ascending bool
}
func (p *PaginationArgs) SQL() (*QueryArgs, error) {
queryArgs := &QueryArgs{}
var conditions []*sqlf.Query
orderBy := p.OrderBy
if len(orderBy) < 1 {
orderBy = OrderBy{{Field: "id"}}
}
orderByColumns := orderBy.Columns()
if p.After != nil {
conditions = append(conditions, sqlf.Sprintf("id < %v", p.After))
columnsStr := strings.Join(orderByColumns, ", ")
condition := fmt.Sprintf("(%s) >", columnsStr)
if !p.Ascending {
condition = fmt.Sprintf("(%s) <", columnsStr)
}
conditions = append(conditions, sqlf.Sprintf(fmt.Sprintf(condition+" (%s)", *p.After)))
}
if p.Before != nil {
conditions = append(conditions, sqlf.Sprintf("id > %v", p.Before))
columnsStr := strings.Join(orderByColumns, ", ")
condition := fmt.Sprintf("(%s) <", columnsStr)
if !p.Ascending {
condition = fmt.Sprintf("(%s) >", columnsStr)
}
conditions = append(conditions, sqlf.Sprintf(fmt.Sprintf(condition+" (%s)", *p.Before)))
}
if len(conditions) > 0 {
queryArgs.Where = sqlf.Sprintf("%v", sqlf.Join(conditions, "AND "))
}
if p.First != nil {
queryArgs.Order = sqlf.Sprintf("id DESC")
queryArgs.Order = orderBy.SQL(p.Ascending)
queryArgs.Limit = sqlf.Sprintf("LIMIT %d", *p.First)
} else if p.Last != nil {
queryArgs.Order = sqlf.Sprintf("id ASC")
queryArgs.Order = orderBy.SQL(!p.Ascending)
queryArgs.Limit = sqlf.Sprintf("LIMIT %d", *p.Last)
} else {
return nil, errors.New("First or Last must be set")
@ -111,20 +183,21 @@ func (p *PaginationArgs) SQL() (*QueryArgs, error) {
return queryArgs, nil
}
func copyPtr[T any](n *T) *T {
if n == nil {
return nil
}
c := *n
return &c
}
// Clone (aka deepcopy) returns a new PaginationArgs object with the same values as "p".
func (p *PaginationArgs) Clone() *PaginationArgs {
copyIntPtr := func(n *int) *int {
if n == nil {
return nil
}
c := *n
return &c
}
return &PaginationArgs{
First: copyIntPtr(p.First),
Last: copyIntPtr(p.Last),
After: copyIntPtr(p.After),
Before: copyIntPtr(p.Before),
First: copyPtr[int](p.First),
Last: copyPtr[int](p.Last),
After: copyPtr[string](p.After),
Before: copyPtr[string](p.Before),
}
}

View File

@ -739,6 +739,9 @@ type ReposListOptions struct {
// and if it doesn't end up being used this is wasted compute.
ExcludeSources bool
// cursor-based pagination args
PaginationArgs *PaginationArgs
*LimitOffset
}
@ -930,6 +933,22 @@ func (s *repoStore) list(ctx context.Context, tr *trace.Trace, opt ReposListOpti
func (s *repoStore) listSQL(ctx context.Context, tr *trace.Trace, opt ReposListOptions) (*sqlf.Query, error) {
var ctes, joins, where []*sqlf.Query
querySuffix := sqlf.Sprintf("%s %s", opt.OrderBy.SQL(), opt.LimitOffset.SQL())
if opt.PaginationArgs != nil {
p, err := opt.PaginationArgs.SQL()
if err != nil {
return nil, err
}
if p.Where != nil {
where = append(where, p.Where)
}
querySuffix = p.AppendOrderToQuery(&sqlf.Query{})
querySuffix = p.AppendLimitToQuery(querySuffix)
}
// Cursor-based pagination requires parsing a handful of extra fields, which
// may result in additional query conditions.
if len(opt.Cursors) > 0 {
@ -1115,7 +1134,7 @@ func (s *repoStore) listSQL(ctx context.Context, tr *trace.Trace, opt ReposListO
}
if opt.NoCloned || opt.OnlyCloned || opt.FailedFetch || opt.OnlyCorrupted || opt.joinGitserverRepos ||
opt.CloneStatus != types.CloneStatusUnknown || containsSizeField(opt.OrderBy) {
opt.CloneStatus != types.CloneStatusUnknown || containsSizeField(opt.OrderBy) || (opt.PaginationArgs != nil && containsOrderBySizeField(opt.PaginationArgs.OrderBy)) {
joins = append(joins, sqlf.Sprintf("JOIN gitserver_repos gr ON gr.repo_id = repo.id"))
}
if opt.OnlyIndexed || opt.NoIndexed {
@ -1172,8 +1191,6 @@ func (s *repoStore) listSQL(ctx context.Context, tr *trace.Trace, opt ReposListO
queryPrefix = sqlf.Sprintf("WITH %s", sqlf.Join(ctes, ",\n"))
}
querySuffix := sqlf.Sprintf("%s %s", opt.OrderBy.SQL(), opt.LimitOffset.SQL())
columns := repoColumns
if !opt.ExcludeSources {
columns = append(columns, getSourcesByRepoQueryStr)
@ -1210,6 +1227,15 @@ func containsSizeField(orderBy RepoListOrderBy) bool {
return false
}
func containsOrderBySizeField(orderBy OrderBy) bool {
for _, field := range orderBy {
if field.Field == string(RepoListSize) {
return true
}
}
return false
}
const userReposCTEFmtstr = `
SELECT repo_id as id FROM external_service_repos WHERE user_id = %d
`