chore(svelte): Refactor infinity query implementation (#63824)

In preparation for other work, this commit substantially refactors the
"infinity query" store implementation. The internals have been changed
completely which allows us to simplify its public API.

- Simpler configuration, especially merging previous and next results.
- Restoration support. So far pages/components had to implement
restoring the state of an infinity store on their own. Now the
restoration strategy is part of the configuration. Pages/components only
have to get an opaque snapshot via `store.capture()` and restore it via
`store.restore(snapshot)`.
- More predictable state. It wasn't always obvious if the store content
stale data e.g. during restoring data. Now `data` will only be set when
the data is 'fresh'.
- Smarter 'incremental restoration' strategy. This strategy makes
multiple requests to restore the previous state. It makes multiple
requests because normally requests are cached and there this is fast.
When the data is not cached though there is a noticable delay due to
waterfall requests. Now we use a simple heuristic to determine whether
or not GraqhQL data might be cached. If not we make a single request to
restore the state.

For review I suggest to turn whitespace changes off.

## Test plan

Manual testing, unit tests.
This commit is contained in:
Felix Kling 2024-07-16 06:51:36 +02:00 committed by GitHub
parent 6a7b3bb102
commit 23616fa5c0
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
17 changed files with 654 additions and 651 deletions

View File

@ -2,5 +2,8 @@ const baseConfig = require('../../prettier.config.js')
module.exports = {
...baseConfig,
plugins: [...(baseConfig.plugins || []), 'prettier-plugin-svelte'],
overrides: [...(baseConfig.overrides || []), { files: '*.svelte', options: { parser: 'svelte', htmlWhitespaceSensitivity: 'strict' } }],
overrides: [
...(baseConfig.overrides || []),
{ files: '*.svelte', options: { parser: 'svelte', htmlWhitespaceSensitivity: 'strict' } },
],
}

View File

@ -1,215 +1,192 @@
import { type AnyVariables, Client, type OperationResult, CombinedError, cacheExchange } from '@urql/core'
import { test, expect, vi, beforeEach } from 'vitest'
import { describe, test, expect, vi, beforeEach, afterEach } from 'vitest'
import { pipe, filter, map, merge } from 'wonka'
import { infinityQuery } from './urql'
import { IncrementalRestoreStrategy, infinityQuery } from './urql'
function getMockClient(responses: Partial<OperationResult<any, AnyVariables>>[]): Client {
return new Client({
url: '#testingonly',
exchanges: [
cacheExchange, // This is required because infiniteQuery expects that a cache exchange is present
({ forward }) =>
operations$ => {
const mockResults$ = pipe(
operations$,
filter(operation => {
switch (operation.kind) {
case 'query':
case 'mutation':
return true
default:
return false
}
}),
map((operation): OperationResult<any, AnyVariables> => {
const response = responses.shift()
if (!response) {
describe('infinityQuery', () => {
function getMockClient(responses: Partial<OperationResult<any, AnyVariables>>[]): Client {
return new Client({
url: '#testingonly',
exchanges: [
cacheExchange, // This is required because infiniteQuery expects that a cache exchange is present
({ forward }) =>
operations$ => {
const mockResults$ = pipe(
operations$,
filter(operation => {
switch (operation.kind) {
case 'query':
case 'mutation':
return true
default:
return false
}
}),
map((operation): OperationResult<any, AnyVariables> => {
const response = responses.shift()
if (!response) {
return {
operation,
error: new CombinedError({
networkError: new Error('No more responses'),
}),
stale: false,
hasNext: false,
}
}
return {
...response,
operation,
error: new CombinedError({
networkError: new Error('No more responses'),
}),
data: response.data ?? undefined,
error: response.error ?? undefined,
stale: false,
hasNext: false,
}
}
return {
...response,
operation,
data: response.data ?? undefined,
error: response.error ?? undefined,
stale: false,
hasNext: false,
}
})
)
})
)
const forward$ = pipe(
operations$,
filter(operation => {
switch (operation.kind) {
case 'query':
case 'mutation':
return false
default:
return true
}
}),
forward
)
const forward$ = pipe(
operations$,
filter(operation => {
switch (operation.kind) {
case 'query':
case 'mutation':
return false
default:
return true
}
}),
forward
)
return merge([mockResults$, forward$])
},
],
})
}
return merge([mockResults$, forward$])
},
],
})
}
function getQuery(client: Client) {
return infinityQuery({
client,
query: 'query { list { nodes { id } } pageInfo { hasNextPage, endCursor } } }',
variables: {
first: 2,
afterCursor: null as string | null,
},
nextVariables: previousResult => {
if (previousResult?.data?.list?.pageInfo?.hasNextPage) {
function getQuery(client: Client) {
return infinityQuery({
client,
query: 'query { list { nodes { id } } pageInfo { hasNextPage, endCursor } } }',
variables: {
first: 2,
afterCursor: null as string | null,
},
map: result => {
const list = result.data?.list
return {
afterCursor: previousResult.data.list.pageInfo.endCursor,
nextVariables: list?.pageInfo.hasNextPage ? { afterCursor: list.pageInfo.endCursor } : undefined,
data: list?.nodes,
error: result.error,
}
}
return undefined
},
combine: (previousResult, nextResult) => {
if (!nextResult.data?.list) {
return nextResult
}
const previousNodes = previousResult.data?.list?.nodes ?? []
const nextNodes = nextResult.data.list?.nodes ?? []
return {
...nextResult,
},
merge: (prev, next) => [...(prev ?? []), ...(next ?? [])],
createRestoreStrategy: api =>
new IncrementalRestoreStrategy(
api,
n => n.length,
n => ({ first: n.length })
),
})
}
let query: ReturnType<typeof getQuery>
beforeEach(() => {
vi.useFakeTimers()
const client = getMockClient([
{
data: {
list: {
...nextResult.data.list,
nodes: [...previousNodes, ...nextNodes],
},
},
}
},
})
}
let query: ReturnType<typeof infinityQuery>
beforeEach(() => {
vi.useFakeTimers()
const client = getMockClient([
{
data: {
list: {
nodes: [{ id: 1 }, { id: 2 }],
pageInfo: {
hasNextPage: true,
endCursor: '2',
nodes: [{ id: 1 }, { id: 2 }],
pageInfo: {
hasNextPage: true,
endCursor: '2',
},
},
},
},
},
{
data: {
list: {
nodes: [{ id: 3 }, { id: 4 }],
pageInfo: {
hasNextPage: true,
endCursor: '4',
{
data: {
list: {
nodes: [{ id: 3 }, { id: 4 }],
pageInfo: {
hasNextPage: true,
endCursor: '4',
},
},
},
},
},
{
data: {
list: {
nodes: [{ id: 5 }, { id: 6 }],
pageInfo: {
hasNextPage: false,
{
data: {
list: {
nodes: [{ id: 5 }, { id: 6 }],
pageInfo: {
hasNextPage: false,
},
},
},
},
},
])
query = getQuery(client)
})
test('fetch more', async () => {
const subscribe = vi.fn()
query.subscribe(subscribe)
await vi.runAllTimersAsync()
// 1. call: fetching -> true
// 2. call: result
expect(subscribe).toHaveBeenCalledTimes(2)
expect(subscribe.mock.calls[0][0]).toMatchObject({
fetching: true,
])
query = getQuery(client)
})
expect(subscribe.mock.calls[1][0]).toMatchObject({
fetching: false,
data: {
list: {
nodes: [{ id: 1 }, { id: 2 }],
pageInfo: {
hasNextPage: true,
endCursor: '2',
},
afterEach(() => {
vi.useRealTimers()
})
test('fetch more', async () => {
const subscribe = vi.fn()
query.subscribe(subscribe)
await vi.runAllTimersAsync()
// 1. call: fetching -> true
// 2. call: result
expect(subscribe).toHaveBeenCalledTimes(2)
expect(subscribe.mock.calls[0][0]).toMatchObject({
fetching: true,
})
expect(subscribe.mock.calls[1][0]).toMatchObject({
fetching: false,
data: [{ id: 1 }, { id: 2 }],
nextVariables: {
afterCursor: '2',
},
},
})
})
// Fetch more data
query.fetchMore()
await vi.runAllTimersAsync()
// Fetch more data
query.fetchMore()
await vi.runAllTimersAsync()
// 3. call: fetching -> true
// 4. call: result
expect(subscribe).toHaveBeenCalledTimes(4)
expect(subscribe.mock.calls[2][0]).toMatchObject({
fetching: true,
})
expect(subscribe.mock.calls[3][0]).toMatchObject({
fetching: false,
data: {
list: {
nodes: [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }],
pageInfo: {
hasNextPage: true,
endCursor: '4',
},
// 3. call: fetching -> true
// 4. call: result
expect(subscribe).toHaveBeenCalledTimes(4)
expect(subscribe.mock.calls[2][0]).toMatchObject({
fetching: true,
})
expect(subscribe.mock.calls[3][0]).toMatchObject({
fetching: false,
data: [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }],
nextVariables: {
afterCursor: '4',
},
},
})
})
test('restoring state', async () => {
const subscribe = vi.fn()
query.subscribe(subscribe)
await vi.runAllTimersAsync()
await query.restore(result => (result.data as any).list.nodes.length < 5)
expect(subscribe).toHaveBeenCalledTimes(6)
expect(subscribe.mock.calls[4][0]).toMatchObject({
restoring: true,
})
expect(subscribe.mock.calls[5][0]).toMatchObject({
restoring: false,
data: {
list: {
nodes: [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }, { id: 5 }, { id: 6 }],
pageInfo: {
hasNextPage: false,
},
},
},
})
})
test('restoring state', async () => {
const subscribe = vi.fn()
query.subscribe(subscribe)
const snapshot = query.capture()
query.restore({ ...snapshot!, count: 6 })
await vi.runAllTimersAsync()
expect(subscribe.mock.calls[3][0]).toMatchObject({
fetching: false,
data: [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }, { id: 5 }, { id: 6 }],
})
})
})

View File

@ -2,22 +2,20 @@ import {
Client,
cacheExchange,
fetchExchange,
mapExchange,
type Exchange,
makeOperation,
mapExchange,
type AnyVariables,
type OperationResult,
createRequest,
type DocumentInput,
type Exchange,
type OperationResult,
} from '@urql/core'
import type { OperationDefinitionNode } from 'graphql'
import { once } from 'lodash'
import { from, isObservable, Subject, type Observable, concat, of } from 'rxjs'
import { map, switchMap, scan, startWith } from 'rxjs/operators'
import { type Readable, readable, get } from 'svelte/store'
import { type Readable, get, writable, type Writable } from 'svelte/store'
import type { GraphQLResult } from '@sourcegraph/http-client'
import { uniqueID } from '$lib/dom'
import { GRAPHQL_URI } from '$lib/http-client'
import { getHeaders } from './shared'
@ -65,7 +63,7 @@ export function query<TData = any, TVariables extends AnyVariables = AnyVariable
return getGraphQLClient().query<TData, TVariables>(query, variables).toPromise()
}
interface InfinityQueryArgs<TData = any, TVariables extends AnyVariables = AnyVariables> {
interface InfinityQueryArgs<TData, TPayload = any, TVariables extends AnyVariables = AnyVariables, TSnapshot = any> {
/**
* The {@link Client} instance to use for the query.
*/
@ -74,76 +72,112 @@ interface InfinityQueryArgs<TData = any, TVariables extends AnyVariables = AnyVa
/**
* The GraphQL query to execute.
*/
query: DocumentInput<TData, TVariables>
query: DocumentInput<TPayload, TVariables>
/**
* The initial variables to use for the query.
*/
variables: TVariables | Observable<TVariables>
variables: TVariables | Promise<TVariables>
/**
* A function that returns the next set of variables to use for the query.
* Process the result of the query. This function maps the response to the data used
* and computes the next set of query variables, if any.
*
* @param result - The result of the query.
* @param previousResult - The previous result of the query.
*
* @remarks
* `nextVariables` is called when {@link InfinityQueryStore.fetchMore} is called to get the next set
* of variables to fetch the next page of data. This function to extract the cursor for the next
* page from the previous result.
* @returns The new/combined result state.
*/
nextVariables: (previousResult: OperationResult<TData, TVariables>) => Partial<TVariables> | undefined
map: (result: OperationResult<TPayload, TVariables>) => InfinityStoreResult<TData, TVariables>
/**
* A function to combine the previous result with the next result.
*
* @param previousResult - The previous result of the query.
* @param nextResult - The next result of the query.
* @returns The combined result of the query.
*
* @remarks
* `combine` is called when the next result is received to merge the previous result with the new
* result. This function is used to append the new data to the previous data.
* Optional callback to merge the data from the previous result with the new data.
* If not provided the new data will replace the old data.
*/
combine: (
previousResult: OperationResultState<TData, TVariables>,
nextResult: OperationResultState<TData, TVariables>
) => OperationResultState<TData, TVariables>
merge?: (previousData: TData | undefined, newData: TData | undefined) => TData
/**
* Returns a strategy for restoring the data when navigating back to a page.
*/
createRestoreStrategy?: (api: InfinityAPI<TData, TVariables>) => RestoreStrategy<TSnapshot, TData>
}
interface OperationResultState<TData = any, TVariables extends AnyVariables = AnyVariables>
extends OperationResult<TData, TVariables> {
/**
* Internal API for the infinity query store. Used by restore strategies to control the store.
*/
interface InfinityAPI<TData, TVariables extends AnyVariables = AnyVariables> {
/**
* The internal store representing the current state of the query.
*/
store: Writable<InfinityStoreResultState<TData, TVariables>>
/**
* Helper function for fetching and processing the next set of data.
*/
fetch(
variables: Partial<TVariables>,
previous: InfinityStoreResult<TData, TVariables>
): Promise<InfinityStoreResult<TData, TVariables>>
}
/**
* The processed/combined result of a GraphQL query.
*/
export interface InfinityStoreResult<TData = any, TVariables extends AnyVariables = AnyVariables> {
data?: TData
/**
* Set if there was an error fetching the data. When set, no more data will be fetched.
*/
error?: Error
/**
* The set of variables to use for the next fetch. If not set no more data will be fetched.
*/
nextVariables?: Partial<TVariables>
}
/**
* The state of the infinity query store.
*/
interface InfinityStoreResultState<TData = any, TVariables extends AnyVariables = AnyVariables>
extends InfinityStoreResult<TData, TVariables> {
/**
* Whether a GraphQL request is currently in flight.
*/
fetching: boolean
/**
* Whether the store is currently restoring data.
*/
restoring: boolean
}
// This needs to be exported so that TS type inference can work in SvelteKit generated files.
export interface InfinityQueryStore<TData = any, TVariables extends AnyVariables = AnyVariables>
extends Readable<OperationResultState<TData, TVariables>> {
export interface InfinityQueryStore<TData = any, TVariables extends AnyVariables = AnyVariables, TSnapshot = any>
extends Readable<InfinityStoreResultState<TData, TVariables>> {
/**
* Reruns the query with the next set of variables returned by {@link InfinityQueryArgs.nextVariables}.
* Reruns the query with the next set of query variables.
*
* @remarks
* A new query will only be executed if there is no query currently in flight and {@link InfinityQueryArgs.nextVariables}
* returns a value different from `undefined`.
* A new query will only be executed if there is no query currently in flight and {@link InfinityStoreResult.nextVariables}
* is set.
*/
fetchMore: () => void
/**
* Fetches more data until the given restoreHandler returns `false`.
*
* @param restoreHandler - A function that returns `true` if more data should be fetched.
*
* @remarks
* When navigating back to a page that was previously fetched with `infinityQuery`, the page
* should call `restore` until the previous data state is restored.
* Fetches data while the given predicate is true. Using this function is different f
* rom calling {@link fetchMore} in a loop, because it will set/unset the fetching state
* only once.
*/
restore: (restoreHandler: (result: OperationResultState<TData, TVariables>) => boolean) => Promise<void>
fetchWhile: (predicate: (data: TData) => boolean) => Promise<void>
/**
* Restores the data state from a snapshot, which is returned by {@link capture}.
*
* @param snapshot - The snapshot to restore.
* @returns A promise that resolves when the data has been restored.
*/
restore: (snapshot: TSnapshot | undefined) => Promise<void>
/**
* Captures the current data state to a snapshot that can be used to restore the data later.
* @returns The snapshot.
*/
capture: () => TSnapshot | undefined
}
/**
@ -157,125 +191,216 @@ export interface InfinityQueryStore<TData = any, TVariables extends AnyVariables
* with the given {@link InfinityQueryArgs.variables}.
*
* The caller can call {@link InfinityQueryStore.fetchMore} to fetch more data. The store will
* call {@link InfinityQueryArgs.nextVariables} to get the next set of variables to use for the query
* and merge it into the initial variables.
* When the result is received, the store will call {@link InfinityQueryArgs.combine} to merge the
* previous result with the new result.
* call {@link InfinityQueryArgs.mapResult} to process the query result, combine it with the previous result
* and to compute the query variables for the next fetch, if any.
*
* Calling this function will prefetch the initial data, i.e. the data is fetch before the store is
* subscribed to.
*/
export function infinityQuery<TData = any, TVariables extends AnyVariables = AnyVariables>(
args: InfinityQueryArgs<TData, TVariables>
): InfinityQueryStore<TData, TVariables> {
// This is a hacky workaround to create an initialState. The empty object is
// invalid but the request will never be executed with these variables anyway.
const initialVariables = isObservable(args.variables) ? args.variables : of(args.variables)
const operation = args.client.createRequestOperation(
'query',
isObservable(args.variables)
? createRequest(args.query, {} as TVariables)
: createRequest(args.query, args.variables)
)
const initialState: OperationResultState<TData, TVariables> = {
operation,
error: undefined,
data: undefined,
extensions: undefined,
stale: false,
fetching: false,
restoring: false,
hasNext: false,
export function infinityQuery<
TData = any,
TPayload = any,
TVariables extends AnyVariables = AnyVariables,
TSnapshot = void
>(args: InfinityQueryArgs<TData, TPayload, TVariables, TSnapshot>): InfinityQueryStore<TData, TVariables, TSnapshot> {
const initialVariables = Promise.resolve(args.variables)
async function fetch(
variables: Partial<TVariables>,
previousResult: InfinityStoreResult<TData, TVariables>
): Promise<InfinityStoreResult<TData, TVariables>> {
const result = args.map(
await initialVariables.then(initialVariables =>
args.client.query(args.query, { ...initialVariables, ...variables })
)
)
if (args.merge) {
result.data = args.merge(previousResult.data, result.data)
}
return result
}
const nextVariables = new Subject<Partial<TVariables>>()
let shouldRestore: ((result: OperationResultState<TData, TVariables>) => boolean) | null = null
const initialState: InfinityStoreResultState<TData, TVariables> = { fetching: true }
const store = writable(initialState)
const restoreStrategy = args.createRestoreStrategy?.({ store, fetch })
// Prefetch data. We don't want to wait until the store is subscribed to. That allows us to use this function
// inside a data loader and the data will be prefetched before the component is rendered.
initialVariables.subscribe(variables => {
void args.client.query(args.query, variables).toPromise()
fetch({}, {}).then(result => {
store.update(current => {
// Only set the initial state if we haven't already started another fetch process,
// e.g. when restoring the state.
if (current === initialState) {
return { ...result, fetching: false }
}
return current
})
})
const result = readable(initialState, set => {
const subscription = initialVariables
.pipe(
switchMap(initialVariables =>
nextVariables.pipe(
startWith(initialVariables), // nextVaribles will not emit until the first fetchMore is called
switchMap(variables => {
const operation = args.client.createRequestOperation(
'query',
createRequest(args.query, { ...initialVariables, ...variables })
)
return concat(
of({ fetching: true, stale: false, restoring: false }),
from(args.client.executeRequestOperation(operation).toPromise()).pipe(
map(({ data, stale, operation, error, extensions }) => ({
fetching: false,
data,
stale: !!stale,
operation,
error,
extensions,
}))
)
)
})
)
),
scan((result, update) => {
const newResult = { ...result, ...update }
return update.fetching ? newResult : args.combine(result, newResult)
}, initialState)
)
.subscribe(result => {
if (shouldRestore) {
result.restoring = Boolean(
(result.data || result.error) && shouldRestore(result) && args.nextVariables(result)
)
/**
* Resolves when the store is not fetching anymore.
*/
function waitTillReady(): Promise<void> {
let unsubscribe: () => void
return new Promise<void>(resolve => {
unsubscribe = store.subscribe(current => {
if (!current.fetching) {
resolve()
}
set(result)
})
return () => subscription.unsubscribe()
})
}).finally(() => unsubscribe())
}
return {
...result,
subscribe: store.subscribe,
fetchMore: () => {
const current = get(result)
if (current.fetching || current.restoring) {
const previous = get(store)
if (previous.fetching) {
// When a fetch is already in progress, we don't want to start another one for the same variables.
return
}
const newVariables = args.nextVariables(current)
if (!newVariables) {
return
}
nextVariables.next(newVariables)
},
restore: restoreHandler => {
shouldRestore = result => {
return Boolean((result.data || result.error) && restoreHandler(result) && args.nextVariables(result))
}
return new Promise(resolve => {
const unsubscribe = result.subscribe(result => {
if (result.fetching) {
return
}
if (result.data || result.error) {
const newVariables = args.nextVariables(result)
if (restoreHandler(result) && newVariables) {
shouldRestore = restoreHandler
nextVariables.next(newVariables)
} else {
unsubscribe()
shouldRestore = null
resolve()
if (previous.nextVariables && !previous.error) {
store.set({ ...previous, fetching: true })
fetch(previous.nextVariables, previous).then(result => {
store.update(current => {
if (previous.nextVariables === current.nextVariables) {
return { ...result, fetching: false }
}
}
return current
})
})
})
}
},
fetchWhile: async predicate => {
// We need to wait until the store is not fetching anymore to ensure that we don't start
// another fetch process while one is already in progress.
await waitTillReady()
const current = get(store)
store.set({ ...current, fetching: true })
let result: InfinityStoreResult<TData, TVariables> = current
while (!result.error && result.nextVariables && result.data && predicate(result.data)) {
result = await fetch(result.nextVariables, result)
}
store.set({ ...result, fetching: false })
},
capture: () => restoreStrategy?.capture(get(store)),
restore: snapshot => {
if (restoreStrategy && snapshot) {
return restoreStrategy.restore(snapshot)
}
return Promise.resolve()
},
}
}
/**
* A restore strategy captures and restores the data state of a query.
*/
interface RestoreStrategy<TSnapshot, TData> {
capture(result: InfinityStoreResult<TData>): TSnapshot | undefined
restore(snapshot: TSnapshot): Promise<void>
}
// This needs to be exported so that TS type inference can work in SvelteKit generated files.
export interface IncrementalRestoreStrategySnapshot<TVariables extends AnyVariables> {
count: number
variables?: Partial<TVariables>
nonce: string
}
// We use this to indentify snapshots that were created in the current "session", which
// means there is a high chance that the data is still in the cache.
const NONCE = uniqueID('repeat-restore')
/**
* The incremental restore strategy captures and restores the data by counting the number of items.
* It will fetch more data until the count matches the snapshot count.
*
* This strategy is useful when every fetch returns a fixed number of items (i.e. after a cursor).
* In this case we want to make use of our GraphQL client's caching strategy and simply
* "replay" the previous fetches.
*
* This strategy works well when GraphQL requests are cached. To avoid waterfall requests in case the
* data is not cached, the strategy will fall back to requesting the data once with query variables
* from the snapshot.
*/
export class IncrementalRestoreStrategy<TData, TVariables extends AnyVariables>
implements RestoreStrategy<IncrementalRestoreStrategySnapshot<TVariables>, TData>
{
constructor(
private api: InfinityAPI<TData, TVariables>,
/**
* A function to map the data to a number. This number will be used to count the items.
*/
private mapper: (data: TData) => number,
/**
* A function to map the data to query variables. These variables will be used to fetch the data
* once when if there is a chance that the data is not in the cache (fallback).
*/
private variablesMapper?: (data: TData) => Partial<TVariables>
) {}
public capture(result: InfinityStoreResult<TData>): IncrementalRestoreStrategySnapshot<TVariables> | undefined {
return result.data
? {
count: this.mapper(result.data),
variables: this.variablesMapper ? this.variablesMapper(result.data) : undefined,
nonce: NONCE,
}
: undefined
}
public async restore(snapshot: IncrementalRestoreStrategySnapshot<TVariables>): Promise<void> {
this.api.store.set({ fetching: true })
const result = await (snapshot.nonce !== NONCE && snapshot.variables
? this.api.fetch(snapshot.variables, {})
: this.fetch(snapshot))
this.api.store.set({ ...result, fetching: false })
}
private async fetch(
snapshot: IncrementalRestoreStrategySnapshot<TVariables>
): Promise<InfinityStoreResult<TData, TVariables>> {
let current: InfinityStoreResult<TData, TVariables> = { nextVariables: {} }
while (current.nextVariables && ((current.data && this.mapper(current.data)) || 0) < snapshot.count) {
current = await this.api.fetch(current.nextVariables, current)
if (current.error || !current.data) {
break
}
}
return current
}
}
/**
* A restore strategy that overwrites the current store state with the response of a new query.
* The strategy uses the query variables form the snapshot to fetch the data.
*/
export class OverwriteRestoreStrategy<TData, TVariables extends AnyVariables>
implements RestoreStrategy<{ variables: Partial<TVariables> }, TData>
{
constructor(
private api: InfinityAPI<TData, TVariables>,
private variablesMapper: (data: TData) => Partial<TVariables>
) {}
capture(result: InfinityStoreResult<TData, TVariables>): { variables: Partial<TVariables> } | undefined {
if (!result.data) {
return undefined
}
const variables = this.variablesMapper(result.data)
return variables ? { variables } : undefined
}
async restore(snapshot: { variables: Partial<TVariables> }): Promise<void> {
this.api.store.set({ fetching: true })
const result = await this.api.fetch(snapshot.variables, {})
this.api.store.set({ ...result, fetching: false })
}
}

View File

@ -2,12 +2,15 @@
import { createHistoryResults } from '$testing/testdata'
import { Story } from '@storybook/addon-svelte-csf'
import HistoryPanel from './HistoryPanel.svelte'
import { readable } from 'svelte/store'
export const meta = {
component: HistoryPanel,
parameters: {
sveltekit_experimental: {
stores: {
page: {},
page: {
url: new URL(window.location.href),
},
},
},
},
@ -17,12 +20,19 @@
<script lang="ts">
let commitCount = 5
$: [initial] = createHistoryResults(1, commitCount)
$: store = {
...readable({ data: initial.nodes, fetching: false }),
fetchMore: () => {},
fetchWhile: () => Promise.resolve(),
capture: () => undefined,
restore: () => Promise.resolve(),
}
</script>
<Story name="Default">
<p>Commits to show: <input type="number" bind:value={commitCount} min="1" max="100" /></p>
<hr />
{#key commitCount}
<HistoryPanel history={initial} enableInlineDiffs={false} fetchMore={() => {}} />
<HistoryPanel history={store} enableInlineDiff={false} />
{/key}
</Story>

View File

@ -1,76 +1,64 @@
<script lang="ts" context="module">
type HistoryStore = InfinityQueryStore<HistoryPanel_HistoryConnection['nodes'], { afterCursor: string | null }>
export interface Capture {
history: ReturnType<HistoryStore['capture']>
scroller?: ScrollerCapture
}
</script>
<script lang="ts">
import { tick } from 'svelte'
import { page } from '$app/stores'
import Avatar from '$lib/Avatar.svelte'
import { SourcegraphURL } from '$lib/common'
import { scrollIntoViewOnMount } from '$lib/dom'
import type { InfinityQueryStore } from '$lib/graphql'
import Icon from '$lib/Icon.svelte'
import LoadingSpinner from '$lib/LoadingSpinner.svelte'
import Scroller, { type Capture as ScrollerCapture } from '$lib/Scroller.svelte'
import { replaceRevisionInURL } from '$lib/shared'
import Timestamp from '$lib/Timestamp.svelte'
import Tooltip from '$lib/Tooltip.svelte'
import { Badge } from '$lib/wildcard'
import { Alert, Badge } from '$lib/wildcard'
import type { HistoryPanel_HistoryConnection } from './HistoryPanel.gql'
export let history: HistoryPanel_HistoryConnection | null
export let fetchMore: (afterCursor: string | null) => void
export let loading: boolean = false
export let history: HistoryStore
export let enableInlineDiff: boolean = false
export let enableViewAtCommit: boolean = false
export function capture(): Capture {
return {
history: history.capture(),
scroller: scroller?.capture(),
}
}
export async function restore(data: Capture) {
await history.restore(data.history)
// If the selected revision is not in the set of currently loaded commits, load more
if (selectedRev) {
await history.fetchWhile(data => !data.find(commit => selectedRev?.startsWith(commit.abbreviatedOID)))
}
if (data.scroller) {
// Wait until DOM was update before updating the scroll position
await tick()
// restore might be called when the history panel is closed
// in which case scroller doesn't exist
scroller?.restore(data.scroller)
}
}
function loadMore() {
if (history?.pageInfo.hasNextPage) {
fetchMore(history.pageInfo.endCursor)
}
}
let scroller: Scroller
// If the selected revision is not in the set of currently loaded commits, load more
$: if (
selectedRev &&
history &&
history.nodes.length > 0 &&
!history.nodes.some(commit => commit.abbreviatedOID === selectedRev) &&
history.pageInfo.hasNextPage
) {
loadMore()
}
$: selectedRev = $page.url?.searchParams.get('rev')
$: diffEnabled = $page.url?.searchParams.has('diff')
$: closeURL = SourcegraphURL.from($page.url).deleteSearchParameter('rev', 'diff').toString()
</script>
<Scroller bind:this={scroller} margin={200} on:more={loadMore}>
{#if history}
<Scroller bind:this={scroller} margin={200} on:more={history.fetchMore}>
{#if $history.data}
<table>
{#each history.nodes as commit (commit.id)}
{#each $history.data as commit (commit.id)}
{@const selected = commit.abbreviatedOID === selectedRev || commit.oid === selectedRev}
<tr class:selected use:scrollIntoViewOnMount={selected}>
<td>
@ -111,12 +99,22 @@
{/each}
</table>
{/if}
{#if !history || loading}
<LoadingSpinner />
{#if $history.fetching}
<div class="info">
<LoadingSpinner />
</div>
{:else if $history.error}
<div class="info">
<Alert variant="danger">Unable to load history: {$history.error.message}</Alert>
</div>
{/if}
</Scroller>
<style lang="scss">
.info {
padding: 0.5rem 1rem;
}
table {
width: 100%;
max-width: 100%;

View File

@ -200,7 +200,8 @@
// When a toggle is unset, we revert back to the default pattern type. However, if the default pattern type
// is regexp, we should revert to keyword instead (otherwise it's not possible to disable the toggle).
function getUnselectedPatternType(): SearchPatternType {
const defaultPatternType = ($settings?.['search.defaultPatternType'] as SearchPatternType) ?? SearchPatternType.keyword
const defaultPatternType =
($settings?.['search.defaultPatternType'] as SearchPatternType) ?? SearchPatternType.keyword
return defaultPatternType === SearchPatternType.regexp ? SearchPatternType.keyword : defaultPatternType
}

View File

@ -63,7 +63,6 @@
import type { LayoutData, Snapshot } from './$types'
import FileTree from './FileTree.svelte'
import { createFileTreeStore } from './fileTreeStore'
import type { GitHistory_HistoryConnection, RepoPage_ReferencesLocationConnection } from './layout.gql'
import ReferencePanel from './ReferencePanel.svelte'
export let data: LayoutData
@ -91,28 +90,15 @@
let fileTreeSidePanel: Panel
let historyPanel: HistoryPanel
let selectedTab: number | null = null
let commitHistory: GitHistory_HistoryConnection | null
let references: RepoPage_ReferencesLocationConnection | null
const fileTreeStore = createFileTreeStore({ fetchFileTreeData: fetchSidebarFileTree })
$: ({ revision = '', parentPath, repoName, resolvedRevision, isCodyAvailable } = data)
$: fileTreeStore.set({ repoName, revision: resolvedRevision.commitID, path: parentPath })
$: commitHistoryQuery = data.commitHistory
$: if (!!commitHistoryQuery) {
// Reset commit history when the query observable changes. Without
// this we are showing the commit history of the previously selected
// file/folder until the new commit history is loaded.
commitHistory = null
}
$: commitHistory = $commitHistoryQuery?.data?.repository?.commit?.ancestors ?? null
// The observable query to fetch references (due to infinite scrolling)
$: sgURL = SourcegraphURL.from($page.url)
$: selectedLine = sgURL.lineRange
$: referenceQuery =
sgURL.viewState === 'references' && selectedLine?.line ? data.getReferenceStore(selectedLine) : null
$: references = $referenceQuery?.data?.repository?.commit?.blob?.lsif?.references ?? null
afterNavigate(async () => {
// We need to wait for referenceQuery to be updated before checking its state
@ -289,32 +275,22 @@
{#key data.filePath}
<HistoryPanel
bind:this={historyPanel}
history={commitHistory}
loading={$commitHistoryQuery?.fetching ?? true}
fetchMore={commitHistoryQuery.fetchMore}
history={data.commitHistory}
enableInlineDiff={$page.data.enableInlineDiff}
enableViewAtCommit={$page.data.enableViewAtCommit}
/>
{/key}
</TabPanel>
<TabPanel title="References" shortcut={referenceHotkey}>
{#if !referenceQuery}
{#if referenceQuery}
<ReferencePanel references={referenceQuery} />
{:else}
<div class="info">
<Alert variant="info"
>Hover over a symbol and click "Find references" to find references to the
symbol.</Alert
>
</div>
{:else if $referenceQuery && !$referenceQuery.fetching && (!references || references.nodes.length === 0)}
<div class="info">
<Alert variant="info">No references found.</Alert>
</div>
{:else}
<ReferencePanel
connection={references}
loading={$referenceQuery?.fetching ?? false}
on:more={referenceQuery?.fetchMore}
/>
{/if}
</TabPanel>
</Tabs>

View File

@ -1,11 +1,10 @@
import { dirname } from 'path'
import { from } from 'rxjs'
import { readable, derived, type Readable } from 'svelte/store'
import { CodyContextFiltersSchema, getFiltersFromCodyContextFilters } from '$lib/cody/config'
import type { LineOrPositionOrRange } from '$lib/common'
import { getGraphQLClient, infinityQuery, type GraphQLClient } from '$lib/graphql'
import { getGraphQLClient, infinityQuery, type GraphQLClient, IncrementalRestoreStrategy } from '$lib/graphql'
import { ROOT_PATH, fetchSidebarFileTree } from '$lib/repo/api/tree'
import { resolveRevision } from '$lib/repo/utils'
import { parseRepoRevision } from '$lib/shared'
@ -54,45 +53,30 @@ export const load: LayoutLoad = async ({ parent, params }) => {
commitHistory: infinityQuery({
client,
query: GitHistoryQuery,
variables: from(
resolvedRevision.then(revspec => ({
repoName,
revspec,
filePath,
first: HISTORY_COMMITS_PER_PAGE,
afterCursor: null as string | null,
}))
),
nextVariables: previousResult => {
if (previousResult?.data?.repository?.commit?.ancestors?.pageInfo?.hasNextPage) {
return {
afterCursor: previousResult.data.repository.commit.ancestors.pageInfo.endCursor,
}
}
return undefined
},
combine: (previousResult, nextResult) => {
if (!nextResult.data?.repository?.commit) {
return nextResult
}
const previousNodes = previousResult.data?.repository?.commit?.ancestors?.nodes ?? []
const nextNodes = nextResult.data.repository?.commit?.ancestors.nodes ?? []
variables: resolvedRevision.then(revspec => ({
repoName,
revspec,
filePath,
first: HISTORY_COMMITS_PER_PAGE,
afterCursor: null as string | null,
})),
map: result => {
const anestors = result.data?.repository?.commit?.ancestors
return {
...nextResult,
data: {
repository: {
...nextResult.data.repository,
commit: {
...nextResult.data.repository.commit,
ancestors: {
...nextResult.data.repository.commit.ancestors,
nodes: [...previousNodes, ...nextNodes],
},
},
},
},
nextVariables: anestors?.pageInfo.hasNextPage
? { afterCursor: anestors.pageInfo.endCursor }
: undefined,
data: anestors?.nodes,
error: result.error,
}
},
merge: (previous, next) => (previous ?? []).concat(next ?? []),
createRestoreStrategy: api =>
new IncrementalRestoreStrategy(
api,
n => n.length,
n => ({ first: n.length })
),
}),
// We are not extracting the selected position from the URL because that creates a dependency
@ -101,56 +85,27 @@ export const load: LayoutLoad = async ({ parent, params }) => {
infinityQuery({
client,
query: RepoPage_PreciseCodeIntel,
variables: from(
resolvedRevision.then(revspec => ({
repoName,
revspec,
filePath,
first: REFERENCES_PER_PAGE,
// Line and character are 1-indexed, but the API expects 0-indexed
line: lineOrPosition.line - 1,
character: lineOrPosition.character! - 1,
afterCursor: null as string | null,
}))
),
nextVariables: previousResult => {
if (previousResult?.data?.repository?.commit?.blob?.lsif?.references.pageInfo.hasNextPage) {
return {
afterCursor: previousResult.data.repository.commit.blob.lsif.references.pageInfo.endCursor,
}
}
return undefined
},
combine: (previousResult, nextResult) => {
if (!nextResult.data?.repository?.commit?.blob?.lsif) {
return nextResult
}
const previousNodes = previousResult.data?.repository?.commit?.blob?.lsif?.references?.nodes ?? []
const nextNodes = nextResult.data?.repository?.commit?.blob?.lsif?.references?.nodes ?? []
variables: resolvedRevision.then(revspec => ({
repoName,
revspec,
filePath,
first: REFERENCES_PER_PAGE,
// Line and character are 1-indexed, but the API expects 0-indexed
line: lineOrPosition.line - 1,
character: lineOrPosition.character! - 1,
afterCursor: null as string | null,
})),
map: result => {
const references = result.data?.repository?.commit?.blob?.lsif?.references
return {
...nextResult,
data: {
repository: {
...nextResult.data.repository,
commit: {
...nextResult.data.repository.commit,
blob: {
...nextResult.data.repository.commit.blob,
lsif: {
...nextResult.data.repository.commit.blob.lsif,
references: {
...nextResult.data.repository.commit.blob.lsif.references,
nodes: [...previousNodes, ...nextNodes],
},
},
},
},
},
},
nextVariables: references?.pageInfo.hasNextPage
? { afterCursor: references.pageInfo.endCursor }
: undefined,
data: references?.nodes,
error: result.error,
}
},
merge: (previous, next) => (previous ?? []).concat(next ?? []),
}),
}
}

View File

@ -1,8 +1,10 @@
<script lang="ts">
import { SourcegraphURL } from '$lib/common'
import type { InfinityQueryStore } from '$lib/graphql'
import LoadingSpinner from '$lib/LoadingSpinner.svelte'
import Scroller from '$lib/Scroller.svelte'
import Tooltip from '$lib/Tooltip.svelte'
import { Alert } from '$lib/wildcard'
import Panel from '$lib/wildcard/resizable-panel/Panel.svelte'
import PanelGroup from '$lib/wildcard/resizable-panel/PanelGroup.svelte'
import PanelResizeHandle from '$lib/wildcard/resizable-panel/PanelResizeHandle.svelte'
@ -11,8 +13,7 @@
import type { ReferencePanel_LocationConnection, ReferencePanel_Location } from './ReferencePanel.gql'
import ReferencePanelCodeExcerpt from './ReferencePanelCodeExcerpt.svelte'
export let connection: ReferencePanel_LocationConnection | null
export let loading: boolean
export let references: InfinityQueryStore<ReferencePanel_LocationConnection['nodes']>
// It appears that the backend returns duplicate locations. We need to filter them out.
function unique(locations: ReferencePanel_Location[]): ReferencePanel_Location[] {
@ -41,13 +42,18 @@
let selectedLocation: ReferencePanel_Location | null = null
$: previewURL = selectedLocation ? getPreviewURL(selectedLocation) : null
$: locations = connection ? unique(connection.nodes) : []
$: locations = $references.data ? unique($references.data) : []
</script>
<div class="root">
<PanelGroup id="references">
<Panel id="references-list">
<Scroller margin={600} on:more>
<Scroller margin={600} on:more={references.fetchMore}>
{#if !$references.fetching && !$references.error && locations.length === 0}
<div class="info">
<Alert variant="info">No references found.</Alert>
</div>
{/if}
<ul>
{#each locations as location (location.canonicalURL)}
{@const selected = selectedLocation?.canonicalURL === location.canonicalURL}
@ -76,8 +82,12 @@
</li>
{/each}
</ul>
{#if loading}
{#if $references.fetching}
<div class="loader"><LoadingSpinner center /></div>
{:else if $references.error}
<div class="loader">
<Alert variant="danger">Unable to load references: {$references.error.message}</Alert>
</div>
{/if}
</Scroller>
</Panel>

View File

@ -13,26 +13,25 @@
import RepositoryRevPicker from '../../../RepositoryRevPicker.svelte'
import type { PageData, Snapshot } from './$types'
import type { CommitsPage_GitCommitConnection } from './page.gql'
export let data: PageData
// This tracks the number of commits that have been loaded and the current scroll
// position, so both can be restored when the user refreshes the page or navigates
// back to it.
export const snapshot: Snapshot<{ commitCount: number; scroller: ScrollerCapture }> = {
export const snapshot: Snapshot<{
commits: ReturnType<typeof data.commitsQuery.capture>
scroller: ScrollerCapture
}> = {
capture() {
return {
commitCount: commits?.nodes.length ?? 0,
commits: commitsQuery.capture(),
scroller: scroller.capture(),
}
},
async restore(snapshot) {
if (snapshot?.commitCount !== undefined && get(navigating)?.type === 'popstate') {
await commitsQuery?.restore(result => {
const count = result.data?.repository?.commit?.ancestors.nodes?.length
return !!count && count < snapshot.commitCount
})
if (get(navigating)?.type === 'popstate') {
await commitsQuery?.restore(snapshot.commits)
}
scroller.restore(snapshot.scroller)
},
@ -43,14 +42,9 @@
}
let scroller: Scroller
let commits: CommitsPage_GitCommitConnection | null = null
$: commitsQuery = data.commitsQuery
// We conditionally check for the ancestors field to be able to show
// previously loaded commits when an error occurs while fetching more commits.
$: if ($commitsQuery?.data?.repository?.commit?.ancestors) {
commits = $commitsQuery.data.repository.commit.ancestors
}
$: commits = $commitsQuery.data
$: pageTitle = (() => {
const parts = ['Commits']
if (data.path) {
@ -86,9 +80,9 @@
</header>
<section>
<Scroller bind:this={scroller} margin={600} on:more={fetchMore}>
{#if !$commitsQuery.restoring && commits}
{#if commits}
<ul class="commits">
{#each commits.nodes as commit (commit.canonicalURL)}
{#each commits as commit (commit.canonicalURL)}
<li>
<div class="commit">
<Commit {commit} />
@ -122,7 +116,7 @@
{/each}
</ul>
{/if}
{#if $commitsQuery.fetching || $commitsQuery.restoring}
{#if $commitsQuery.fetching}
<div class="footer">
<LoadingSpinner />
</div>

View File

@ -1,6 +1,4 @@
import { from } from 'rxjs'
import { getGraphQLClient, infinityQuery } from '$lib/graphql'
import { IncrementalRestoreStrategy, getGraphQLClient, infinityQuery } from '$lib/graphql'
import { resolveRevision } from '$lib/repo/utils'
import { parseRepoRevision } from '$lib/shared'
@ -18,45 +16,31 @@ export const load: PageLoad = ({ parent, params }) => {
const commitsQuery = infinityQuery({
client,
query: CommitsPage_CommitsQuery,
variables: from(
resolvedRevision.then(revision => ({
repoName,
revision,
first: PAGE_SIZE,
path,
afterCursor: null as string | null,
}))
),
nextVariables: previousResult => {
if (previousResult?.data?.repository?.commit?.ancestors?.pageInfo?.hasNextPage) {
return {
afterCursor: previousResult.data.repository.commit.ancestors.pageInfo.endCursor,
}
}
return undefined
},
combine: (previousResult, nextResult) => {
if (!nextResult.data?.repository?.commit) {
return nextResult
}
const previousNodes = previousResult.data?.repository?.commit?.ancestors?.nodes ?? []
const nextNodes = nextResult.data.repository?.commit?.ancestors.nodes ?? []
variables: resolvedRevision.then(revision => ({
repoName,
revision,
first: PAGE_SIZE,
path,
afterCursor: null as string | null,
})),
map: result => {
const ancestors = result.data?.repository?.commit?.ancestors
return {
...nextResult,
data: {
repository: {
...nextResult.data.repository,
commit: {
...nextResult.data.repository.commit,
ancestors: {
...nextResult.data.repository.commit.ancestors,
nodes: [...previousNodes, ...nextNodes],
},
},
},
},
nextVariables:
ancestors?.pageInfo?.endCursor && ancestors.pageInfo.hasNextPage
? { afterCursor: ancestors.pageInfo.endCursor }
: undefined,
data: ancestors?.nodes,
error: result.error,
}
},
merge: (previous, next) => (previous ?? []).concat(next ?? []),
createRestoreStrategy: api =>
new IncrementalRestoreStrategy(
api,
n => n.length,
n => ({ first: n.length })
),
})
return {

View File

@ -8,39 +8,34 @@
import GitReference from '$lib/repo/GitReference.svelte'
import Scroller, { type Capture as ScrollerCapture } from '$lib/Scroller.svelte'
import { Alert, Button, Input } from '$lib/wildcard'
import type { GitBranchesConnection } from '$testing/graphql-type-mocks'
import type { PageData, Snapshot } from './$types'
export let data: PageData
export const snapshot: Snapshot<{ count: number; scroller: ScrollerCapture }> = {
export const snapshot: Snapshot<{
branches: ReturnType<typeof data.branchesQuery.capture>
scroller: ScrollerCapture
}> = {
capture() {
return {
count: branchesConnection?.nodes.length ?? 0,
branches: data.branchesQuery.capture(),
scroller: scroller.capture(),
}
},
async restore(snapshot) {
if (snapshot?.count && get(navigating)?.type === 'popstate') {
await branchesQuery?.restore(result => {
const count = result.data?.repository?.branches?.nodes?.length
return !!count && count < snapshot.count
})
if (get(navigating)?.type === 'popstate') {
await data.branchesQuery?.restore(snapshot.branches)
}
scroller.restore(snapshot.scroller)
},
}
let scroller: Scroller
let branchesConnection: GitBranchesConnection | undefined
$: query = data.query
$: branchesQuery = data.branchesQuery
$: branchesConnection = $branchesQuery.data?.repository?.branches ?? branchesConnection
$: if (branchesQuery) {
branchesConnection = undefined
}
$: branches = $branchesQuery.data
</script>
<svelte:head>
@ -53,15 +48,15 @@
<Button variant="primary" type="submit">Search</Button>
</form>
<Scroller bind:this={scroller} margin={600} on:more={branchesQuery.fetchMore}>
{#if !$branchesQuery.restoring && branchesConnection}
{#if branches}
<table>
<tbody>
{#each branchesConnection.nodes as tag (tag)}
<GitReference ref={tag} />
{#each branches.nodes as branch (branch)}
<GitReference ref={branch} />
{:else}
<tr>
<td colspan="2">
<Alert variant="info">No tags found</Alert>
<Alert variant="info">No branches found</Alert>
</td>
</tr>
{/each}
@ -69,7 +64,7 @@
</table>
{/if}
<div>
{#if $branchesQuery.fetching || $branchesQuery.restoring}
{#if $branchesQuery.fetching}
<LoadingSpinner />
{:else if $branchesQuery.error}
<Alert variant="danger">
@ -78,12 +73,12 @@
{/if}
</div>
</Scroller>
{#if branchesConnection && branchesConnection.nodes.length > 0}
{#if branches && branches.nodes.length > 0}
<div class="footer">
{branchesConnection.totalCount}
{pluralize('branch', branchesConnection.totalCount)} total
{#if branchesConnection.totalCount > branchesConnection.nodes.length}
(showing {branchesConnection.nodes.length})
{branches.totalCount}
{pluralize('branch', branches.totalCount)} total
{#if branches.totalCount > branches.nodes.length}
(showing {branches.nodes.length})
{/if}
</div>
{/if}

View File

@ -1,4 +1,4 @@
import { getGraphQLClient, infinityQuery } from '$lib/graphql'
import { getGraphQLClient, infinityQuery, OverwriteRestoreStrategy } from '$lib/graphql'
import { parseRepoRevision } from '$lib/shared'
import type { PageLoad } from './$types'
@ -22,17 +22,22 @@ export const load: PageLoad = ({ params, url }) => {
withBehindAhead: true,
query,
},
nextVariables: previousResult => {
if (previousResult?.data?.repository?.branches?.pageInfo?.hasNextPage) {
return {
first: previousResult.data.repository.branches.nodes.length + PAGE_SIZE,
}
map: result => {
const branches = result.data?.repository?.branches
return {
nextVariables: branches?.pageInfo.hasNextPage
? { first: branches.nodes.length + PAGE_SIZE }
: undefined,
data: branches
? {
nodes: branches.nodes,
totalCount: branches.totalCount,
}
: undefined,
error: result.error,
}
return undefined
},
combine: (_previousResult, nextResult) => {
return nextResult
},
createRestoreStrategy: api => new OverwriteRestoreStrategy(api, data => ({ first: data.nodes.length })),
}),
}
}

View File

@ -21,7 +21,7 @@
interface Capture {
scroll: ScrollerCapture
diffCount: number
diffs?: ReturnType<NonNullable<typeof data.diff>['capture']>
expandedDiffs: Array<[number, boolean]>
}
@ -30,16 +30,13 @@
export const snapshot: Snapshot<Capture> = {
capture: () => ({
scroll: scroller.capture(),
diffCount: diffs?.nodes.length ?? 0,
diffs: diffQuery?.capture(),
expandedDiffs: Array.from(expandedDiffs.entries()),
}),
restore: async capture => {
expandedDiffs = new Map(capture.expandedDiffs)
if (capture?.diffCount !== undefined && get(navigating)?.type === 'popstate') {
await data.diff?.restore(result => {
const count = result.data?.repository?.comparison.fileDiffs.nodes.length
return !!count && count < capture.diffCount
})
if (get(navigating)?.type === 'popstate') {
await data.diff?.restore(capture.diffs)
}
scroller.restore(capture.scroll)
},
@ -50,7 +47,6 @@
let expandedDiffs = new Map<number, boolean>()
$: diffQuery = data.diff
$: diffs = $diffQuery?.data?.repository?.comparison.fileDiffs ?? null
afterNavigate(() => {
repositoryContext.set({ revision: data.commit.abbreviatedOID })
@ -66,7 +62,7 @@
<section>
{#if data.commit}
<Scroller bind:this={scroller} margin={600} on:more={data.diff?.fetchMore}>
<Scroller bind:this={scroller} margin={600} on:more={diffQuery?.fetchMore}>
<div class="header">
<div class="info"><Commit commit={data.commit} alwaysExpanded /></div>
<div class="parents">
@ -104,9 +100,9 @@
</div>
</div>
<hr />
{#if !$diffQuery?.restoring && diffs}
{#if $diffQuery?.data}
<ul class="diffs">
{#each diffs.nodes as node, index}
{#each $diffQuery.data as node, index}
<li>
<FileDiff
fileDiff={node}
@ -117,7 +113,7 @@
{/each}
</ul>
{/if}
{#if $diffQuery?.fetching || $diffQuery?.restoring}
{#if $diffQuery?.fetching}
<LoadingSpinner />
{:else if $diffQuery?.error}
<div class="error">

View File

@ -1,6 +1,6 @@
import { error } from '@sveltejs/kit'
import { getGraphQLClient, infinityQuery } from '$lib/graphql'
import { IncrementalRestoreStrategy, getGraphQLClient, infinityQuery } from '$lib/graphql'
import { parseRepoRevision } from '$lib/shared'
import type { PageLoad } from './$types'
@ -38,44 +38,21 @@ export const load: PageLoad = async ({ params }) => {
first: PAGE_SIZE,
after: null as string | null,
},
nextVariables: previousResult => {
if (
!previousResult.error &&
previousResult?.data?.repository?.comparison?.fileDiffs?.pageInfo?.hasNextPage
) {
return {
after: previousResult.data.repository.comparison.fileDiffs.pageInfo.endCursor,
}
}
return undefined
},
combine: (previousResult, nextResult) => {
if (!nextResult.data?.repository?.comparison) {
return {
...nextResult,
// When this code path is executed we probably have an error.
// We still want to show the data that was loaded before the error occurred.
data: previousResult.data,
}
}
const previousNodes = previousResult.data?.repository?.comparison?.fileDiffs?.nodes ?? []
const nextNodes = nextResult.data.repository?.comparison?.fileDiffs?.nodes ?? []
map: result => {
const diffs = result.data?.repository?.comparison.fileDiffs
return {
...nextResult,
data: {
repository: {
...nextResult.data.repository,
comparison: {
...nextResult.data.repository.comparison,
fileDiffs: {
...nextResult.data.repository.comparison.fileDiffs,
nodes: [...previousNodes, ...nextNodes],
},
},
},
},
nextVariables: diffs?.pageInfo.hasNextPage ? { after: diffs?.pageInfo.endCursor } : undefined,
data: diffs?.nodes,
error: result.error,
}
},
merge: (previous, next) => (previous ?? []).concat(next ?? []),
createRestoreStrategy: api =>
new IncrementalRestoreStrategy(
api,
n => n.length,
n => ({ first: n.length })
),
})
: null

View File

@ -10,37 +10,29 @@
import { Alert, Button, Input } from '$lib/wildcard'
import type { PageData, Snapshot } from './$types'
import type { GitTagsConnection } from './page.gql'
export let data: PageData
export const snapshot: Snapshot<{ count: number; scroller: ScrollerCapture }> = {
export const snapshot: Snapshot<{ tags: ReturnType<typeof data.tagsQuery.capture>; scroller: ScrollerCapture }> = {
capture() {
return {
count: tagsConnection?.nodes.length ?? 0,
tags: data.tagsQuery.capture(),
scroller: scroller.capture(),
}
},
async restore(snapshot) {
if (snapshot?.count && get(navigating)?.type === 'popstate') {
await tagsQuery?.restore(result => {
const count = result.data?.repository?.gitRefs?.nodes?.length
return !!count && count < snapshot.count
})
if (snapshot?.tags && get(navigating)?.type === 'popstate') {
await data.tagsQuery?.restore(snapshot.tags)
}
scroller.restore(snapshot.scroller)
},
}
let scroller: Scroller
let tagsConnection: GitTagsConnection | undefined
$: query = data.query
$: tagsQuery = data.tagsQuery
$: tagsConnection = $tagsQuery.data?.repository?.gitRefs ?? tagsConnection
$: if (tagsQuery) {
tagsConnection = undefined
}
$: tags = $tagsQuery.data
</script>
<svelte:head>
@ -53,10 +45,10 @@
<Button variant="primary" type="submit">Search</Button>
</form>
<Scroller bind:this={scroller} margin={600} on:more={tagsQuery.fetchMore}>
{#if !$tagsQuery.restoring && tagsConnection}
{#if tags}
<table>
<tbody>
{#each tagsConnection.nodes as tag (tag)}
{#each tags.nodes as tag (tag)}
<GitReference ref={tag} />
{:else}
<tr>
@ -69,7 +61,7 @@
</table>
{/if}
<div>
{#if $tagsQuery.fetching || $tagsQuery.restoring}
{#if $tagsQuery.fetching}
<LoadingSpinner />
{:else if $tagsQuery.error}
<Alert variant="danger">
@ -78,12 +70,12 @@
{/if}
</div>
</Scroller>
{#if tagsConnection && tagsConnection.nodes.length > 0}
{#if tags && tags.nodes.length > 0}
<div class="footer">
{tagsConnection.totalCount}
{pluralize('tag', tagsConnection.totalCount)} total
{#if tagsConnection.totalCount > tagsConnection.nodes.length}
(showing {tagsConnection.nodes.length})
{tags.totalCount}
{pluralize('tag', tags.totalCount)} total
{#if tags.totalCount > tags.nodes.length}
(showing {tags.nodes.length})
{/if}
</div>
{/if}

View File

@ -1,4 +1,4 @@
import { getGraphQLClient, infinityQuery } from '$lib/graphql'
import { OverwriteRestoreStrategy, getGraphQLClient, infinityQuery } from '$lib/graphql'
import { parseRepoRevision } from '$lib/shared'
import type { PageLoad } from './$types'
@ -22,17 +22,22 @@ export const load: PageLoad = ({ params, url }) => {
withBehindAhead: false,
query,
},
nextVariables: previousResult => {
if (previousResult?.data?.repository?.gitRefs?.pageInfo?.hasNextPage) {
return {
first: previousResult.data.repository.gitRefs.nodes.length + PAGE_SIZE,
}
map: result => {
const gitRefs = result.data?.repository?.gitRefs
return {
nextVariables: gitRefs?.pageInfo.hasNextPage
? { first: gitRefs.nodes.length + PAGE_SIZE }
: undefined,
data: gitRefs
? {
nodes: gitRefs.nodes,
totalCount: gitRefs.totalCount,
}
: undefined,
error: result.error,
}
return undefined
},
combine: (_previousResult, nextResult) => {
return nextResult
},
createRestoreStrategy: api => new OverwriteRestoreStrategy(api, data => ({ first: data.nodes.length })),
}),
}
}