mirror of
https://github.com/sourcegraph/sourcegraph.git
synced 2026-02-06 13:31:54 +00:00
svelte: Change GraphQL library and add repo pages error handling (#60567)
Most notable changes: - Switch to urql library. In it's current form it's a simpler implementation because it uses _document caching_ instead of _normalized caching_. That means that individual requests/responses are cached, there is no "shared cache" that queries can draw from. However so far most of our queries are quite "disjoint" so at least for now the reduced complexity (no need to define interface->type mappings, key fields, cache normalization) seems to be worth the tradeoff. - Infinity scrolling response consolidation is now handled by a helper function/store and is not in background in the cache. The new API is quite verbose but I didn't want to spent too much time optimizing for this use case yet. It's already more complex than I prefer, but I wanted it to properly handle the case of restoring the right scroll position when navigating back to it, without triggering new network requests. - Added simple version of `Alert` component to show error and info messages. - Added error handling to repository pages. When data loading fails for any reason or when a resource is not available we surface that information accordingly. Note that it doesn't look great UI wise yet! I'm just adding `<Alert>`s where needed. My main goal was to not silently ignore errors anymore. - Fixed various issues that I stumbled upon while working on this (for example an issue with GraphQL mocking)
This commit is contained in:
parent
da85c8a832
commit
5d2fd2538a
@ -60,6 +60,7 @@ BUILD_DEPS = [
|
||||
":node_modules/@sveltejs/kit",
|
||||
":node_modules/@sveltejs/vite-plugin-svelte",
|
||||
":node_modules/@types/prismjs",
|
||||
":node_modules/@urql/core",
|
||||
":node_modules/graphql",
|
||||
":node_modules/prismjs",
|
||||
":node_modules/sass",
|
||||
@ -67,6 +68,7 @@ BUILD_DEPS = [
|
||||
":node_modules/ts-key-enum",
|
||||
":node_modules/vite",
|
||||
":node_modules/vite-plugin-inspect",
|
||||
":node_modules/wonka",
|
||||
"//:node_modules/@apollo/client",
|
||||
"//:node_modules/@codemirror/autocomplete",
|
||||
"//:node_modules/@codemirror/commands",
|
||||
|
||||
@ -74,9 +74,11 @@
|
||||
"@sourcegraph/shared": "workspace:*",
|
||||
"@sourcegraph/web": "workspace:*",
|
||||
"@sourcegraph/wildcard": "workspace:*",
|
||||
"@urql/core": "^4.2.3",
|
||||
"highlight.js": "^10.0.0",
|
||||
"prismjs": "^1.29.0",
|
||||
"ts-key-enum": "^2.0.12"
|
||||
"ts-key-enum": "^2.0.12",
|
||||
"wonka": "^6.3.4"
|
||||
},
|
||||
"msw": {
|
||||
"workerDirectory": "static"
|
||||
|
||||
@ -13,6 +13,6 @@
|
||||
%sveltekit.head%
|
||||
</head>
|
||||
<body data-sveltekit-preload-data data-sveltekit-preload-code="hover">
|
||||
%sveltekit.body%
|
||||
<div style="display: contents">%sveltekit.body%</div>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
@ -13,13 +13,15 @@
|
||||
interface $$Props extends SVGAttributes<SVGElement> {
|
||||
svgPath: string
|
||||
inline?: boolean
|
||||
size?: number
|
||||
}
|
||||
|
||||
export let svgPath: string
|
||||
export let inline: boolean = false
|
||||
export let size: number = 24
|
||||
</script>
|
||||
|
||||
<svg class:icon-inline={inline} height="24" width="24" viewBox="0 0 24 24" {...$$restProps}>
|
||||
<svg class:icon-inline={inline} height={size} width={size} viewBox="0 0 24 24" {...$$restProps}>
|
||||
<path d={svgPath} />
|
||||
</svg>
|
||||
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
export enum Param {
|
||||
before = '$before',
|
||||
after = '$after',
|
||||
last = '$last',
|
||||
before = '_before',
|
||||
after = '_after',
|
||||
last = '_last',
|
||||
}
|
||||
|
||||
export function getPaginationParams(
|
||||
@ -10,13 +10,13 @@ export function getPaginationParams(
|
||||
):
|
||||
| { first: number; last: null; before: null; after: string | null }
|
||||
| { first: null; last: number; before: string | null; after: null } {
|
||||
if (searchParams.has('$before')) {
|
||||
if (searchParams.has(Param.before)) {
|
||||
return { first: null, last: pageSize, before: searchParams.get(Param.before), after: null }
|
||||
}
|
||||
if (searchParams.has('$after')) {
|
||||
if (searchParams.has(Param.after)) {
|
||||
return { first: pageSize, last: null, before: null, after: searchParams.get(Param.after) }
|
||||
}
|
||||
if (searchParams.has('$last')) {
|
||||
if (searchParams.has(Param.last)) {
|
||||
return { first: null, last: pageSize, before: null, after: null }
|
||||
}
|
||||
return { first: pageSize, last: null, before: null, after: null }
|
||||
|
||||
@ -76,6 +76,8 @@
|
||||
</svelte:self>
|
||||
{/each}
|
||||
</ul>
|
||||
{:catch error}
|
||||
<slot name="error" {error} />
|
||||
{/await}
|
||||
{/if}
|
||||
</li>
|
||||
@ -86,7 +88,7 @@
|
||||
margin: 0.25rem 0;
|
||||
border-radius: var(--border-radius);
|
||||
|
||||
&[aria-expanded='true'][tabindex='0']:focus {
|
||||
&[tabindex='0']:focus {
|
||||
box-shadow: none;
|
||||
|
||||
> .label {
|
||||
|
||||
@ -243,6 +243,9 @@
|
||||
<svelte:fragment let:entry let:toggle let:expanded>
|
||||
<slot {entry} {toggle} {expanded} />
|
||||
</svelte:fragment>
|
||||
<svelte:fragment slot="error" let:error>
|
||||
<slot name="error" {error} />
|
||||
</svelte:fragment>
|
||||
</TreeNode>
|
||||
{/each}
|
||||
</ul>
|
||||
|
||||
@ -1,24 +1,18 @@
|
||||
import type { KeyArgsFunction, KeySpecifier } from '@apollo/client/cache/inmemory/policies'
|
||||
import {
|
||||
gql,
|
||||
ApolloClient,
|
||||
InMemoryCache,
|
||||
createHttpLink,
|
||||
from,
|
||||
type HttpOptions,
|
||||
type NormalizedCacheObject,
|
||||
type OperationVariables,
|
||||
type QueryOptions,
|
||||
type DocumentNode,
|
||||
type FetchPolicy,
|
||||
type FieldPolicy,
|
||||
} from '@apollo/client/core/index'
|
||||
import { trimEnd, once } from 'lodash'
|
||||
|
||||
import { dev } from '$app/environment'
|
||||
import { createAggregateError } from '$lib/common'
|
||||
import { GRAPHQL_URI, checkOk } from '$lib/http-client'
|
||||
|
||||
import { getHeaders } from './shared'
|
||||
|
||||
interface BuildGraphQLUrlOptions {
|
||||
request?: string
|
||||
baseUrl?: string
|
||||
@ -32,80 +26,18 @@ function buildGraphQLUrl({ request, baseUrl }: BuildGraphQLUrlOptions): string {
|
||||
return baseUrl ? new URL(trimEnd(baseUrl, '/') + apiURL).href : apiURL
|
||||
}
|
||||
|
||||
function getHeaders(): { [header: string]: string } {
|
||||
const headers: { [header: string]: string } = {
|
||||
...window?.context?.xhrHeaders,
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
const parameters = new URLSearchParams(window.location.search)
|
||||
const trace = parameters.get('trace')
|
||||
if (trace) {
|
||||
headers['X-Sourcegraph-Should-Trace'] = trace
|
||||
}
|
||||
const feat = parameters.getAll('feat')
|
||||
if (feat.length) {
|
||||
headers['X-Sourcegraph-Override-Feature'] = feat.join(',')
|
||||
}
|
||||
return headers
|
||||
}
|
||||
const customFetch: HttpOptions['fetch'] = (uri, options) => fetch(uri, options).then(checkOk)
|
||||
|
||||
export type GraphQLClient = ApolloClient<NormalizedCacheObject>
|
||||
|
||||
/**
|
||||
* Creates a field policy for a list-like forward connections. It concatenates the
|
||||
* incoming nodes with the existing nodes, and updates the pageInfo.
|
||||
* @deprecated Use `getGraphQLClient` from @lib/graphql instead.
|
||||
*
|
||||
* This is only used for compatibility with APIs that expect an ApolloClient.
|
||||
*/
|
||||
function listLikeForwardConnection({ keyArgs }: { keyArgs: KeySpecifier | KeyArgsFunction | false }): FieldPolicy {
|
||||
return {
|
||||
keyArgs,
|
||||
|
||||
merge(existing, incoming) {
|
||||
if (!existing) {
|
||||
return incoming
|
||||
}
|
||||
|
||||
if (existing.pageInfo.endCursor === incoming.pageInfo.endCursor) {
|
||||
// If the endCursor is the same, we assume that the incoming
|
||||
// nodes are the same as the existing nodes. This can happen
|
||||
// when the same query is executed multiple times in a row.
|
||||
// In this case, we return the existing nodes to prevent
|
||||
// incorrect cache updates.
|
||||
return existing
|
||||
}
|
||||
|
||||
return {
|
||||
...incoming,
|
||||
nodes: [...existing.nodes, ...incoming.nodes],
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export const getGraphQLClient = once(async (): Promise<GraphQLClient> => {
|
||||
export const getGraphQLClient = once((): GraphQLClient => {
|
||||
const cache = new InMemoryCache({
|
||||
typePolicies: {
|
||||
GitCommit: {
|
||||
fields: {
|
||||
ancestors: listLikeForwardConnection({
|
||||
keyArgs: args => {
|
||||
// This key function treats an empty path the same as an
|
||||
// omitted path.
|
||||
// keyArgs: ['query', 'path', 'follow', 'after'],
|
||||
const keyArgs: Record<string, any> = {}
|
||||
if (args) {
|
||||
for (const key of ['query', 'path', 'follow', 'after']) {
|
||||
if (key in args && (key !== 'path' || args[key] !== '')) {
|
||||
keyArgs[key] = args[key]
|
||||
}
|
||||
}
|
||||
}
|
||||
return JSON.stringify(keyArgs)
|
||||
},
|
||||
}),
|
||||
},
|
||||
},
|
||||
GitTree: {
|
||||
// GitTree object's don't have an ID, but canonicalURL is unique
|
||||
keyFields: ['canonicalURL'],
|
||||
@ -132,13 +64,6 @@ export const getGraphQLClient = once(async (): Promise<GraphQLClient> => {
|
||||
Person: {
|
||||
merge: true,
|
||||
},
|
||||
RepositoryComparison: {
|
||||
fields: {
|
||||
fileDiffs: listLikeForwardConnection({
|
||||
keyArgs: ['paths'],
|
||||
}),
|
||||
},
|
||||
},
|
||||
},
|
||||
possibleTypes: {
|
||||
TreeEntry: ['GitTree', 'GitBlob'],
|
||||
@ -161,19 +86,3 @@ export const getGraphQLClient = once(async (): Promise<GraphQLClient> => {
|
||||
]),
|
||||
})
|
||||
})
|
||||
|
||||
export async function query<T, V extends OperationVariables = OperationVariables>(
|
||||
query: DocumentNode,
|
||||
variables?: V,
|
||||
options?: Omit<QueryOptions<T, V>, 'query' | 'variables'>
|
||||
): Promise<T> {
|
||||
return (await getGraphQLClient()).query<T, V>({ query, variables, ...options }).then(result => {
|
||||
if (result.errors && result.errors.length > 0) {
|
||||
throw createAggregateError(result.errors)
|
||||
}
|
||||
return result.data
|
||||
})
|
||||
}
|
||||
|
||||
export type { FetchPolicy }
|
||||
export { gql }
|
||||
|
||||
@ -1,6 +1 @@
|
||||
export * from './apollo'
|
||||
|
||||
// Helper type for extracting node() query related type information
|
||||
export type NodeFromResult<T extends { __typename: string } | null, N extends string> = T extends { __typename: N }
|
||||
? NonNullable<T>
|
||||
: never
|
||||
export * from './urql'
|
||||
|
||||
17
client/web-sveltekit/src/lib/graphql/shared.ts
Normal file
17
client/web-sveltekit/src/lib/graphql/shared.ts
Normal file
@ -0,0 +1,17 @@
|
||||
export function getHeaders(): { [header: string]: string } {
|
||||
const headers: { [header: string]: string } = {
|
||||
...window?.context?.xhrHeaders,
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
const parameters = new URLSearchParams(window.location.search)
|
||||
const trace = parameters.get('trace')
|
||||
if (trace) {
|
||||
headers['X-Sourcegraph-Should-Trace'] = trace
|
||||
}
|
||||
const feat = parameters.getAll('feat')
|
||||
if (feat.length) {
|
||||
headers['X-Sourcegraph-Override-Feature'] = feat.join(',')
|
||||
}
|
||||
return headers
|
||||
}
|
||||
215
client/web-sveltekit/src/lib/graphql/urql.test.ts
Normal file
215
client/web-sveltekit/src/lib/graphql/urql.test.ts
Normal file
@ -0,0 +1,215 @@
|
||||
import { type AnyVariables, Client, type OperationResult, CombinedError, cacheExchange } from '@urql/core'
|
||||
import { test, expect, vi, beforeEach } from 'vitest'
|
||||
import { pipe, filter, map, merge } from 'wonka'
|
||||
|
||||
import { infinityQuery } from './urql'
|
||||
|
||||
function getMockClient(responses: Partial<OperationResult<any, AnyVariables>>[]): Client {
|
||||
return new Client({
|
||||
url: '#testingonly',
|
||||
exchanges: [
|
||||
cacheExchange, // This is required because infiniteQuery expects that a cache exchange is present
|
||||
({ forward }) =>
|
||||
operations$ => {
|
||||
const mockResults$ = pipe(
|
||||
operations$,
|
||||
filter(operation => {
|
||||
switch (operation.kind) {
|
||||
case 'query':
|
||||
case 'mutation':
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}),
|
||||
map((operation): OperationResult<any, AnyVariables> => {
|
||||
const response = responses.shift()
|
||||
if (!response) {
|
||||
return {
|
||||
operation,
|
||||
error: new CombinedError({
|
||||
networkError: new Error('No more responses'),
|
||||
}),
|
||||
stale: false,
|
||||
hasNext: false,
|
||||
}
|
||||
}
|
||||
return {
|
||||
...response,
|
||||
operation,
|
||||
data: response.data ?? undefined,
|
||||
error: response.error ?? undefined,
|
||||
stale: false,
|
||||
hasNext: false,
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
const forward$ = pipe(
|
||||
operations$,
|
||||
filter(operation => {
|
||||
switch (operation.kind) {
|
||||
case 'query':
|
||||
case 'mutation':
|
||||
return false
|
||||
default:
|
||||
return true
|
||||
}
|
||||
}),
|
||||
forward
|
||||
)
|
||||
|
||||
return merge([mockResults$, forward$])
|
||||
},
|
||||
],
|
||||
})
|
||||
}
|
||||
|
||||
function getQuery(client: Client) {
|
||||
return infinityQuery({
|
||||
client,
|
||||
query: 'query { list { nodes { id } } pageInfo { hasNextPage, endCursor } } }',
|
||||
variables: {
|
||||
first: 2,
|
||||
afterCursor: null as string | null,
|
||||
},
|
||||
nextVariables: previousResult => {
|
||||
if (previousResult?.data?.list?.pageInfo?.hasNextPage) {
|
||||
return {
|
||||
afterCursor: previousResult.data.list.pageInfo.endCursor,
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
combine: (previousResult, nextResult) => {
|
||||
if (!nextResult.data?.list) {
|
||||
return nextResult
|
||||
}
|
||||
const previousNodes = previousResult.data?.list?.nodes ?? []
|
||||
const nextNodes = nextResult.data.list?.nodes ?? []
|
||||
return {
|
||||
...nextResult,
|
||||
data: {
|
||||
list: {
|
||||
...nextResult.data.list,
|
||||
nodes: [...previousNodes, ...nextNodes],
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
let query: ReturnType<typeof infinityQuery>
|
||||
|
||||
beforeEach(() => {
|
||||
vi.useFakeTimers()
|
||||
|
||||
const client = getMockClient([
|
||||
{
|
||||
data: {
|
||||
list: {
|
||||
nodes: [{ id: 1 }, { id: 2 }],
|
||||
pageInfo: {
|
||||
hasNextPage: true,
|
||||
endCursor: '2',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
data: {
|
||||
list: {
|
||||
nodes: [{ id: 3 }, { id: 4 }],
|
||||
pageInfo: {
|
||||
hasNextPage: true,
|
||||
endCursor: '4',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
data: {
|
||||
list: {
|
||||
nodes: [{ id: 5 }, { id: 6 }],
|
||||
pageInfo: {
|
||||
hasNextPage: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
])
|
||||
query = getQuery(client)
|
||||
})
|
||||
|
||||
test('fetch more', async () => {
|
||||
const subscribe = vi.fn()
|
||||
query.subscribe(subscribe)
|
||||
|
||||
await vi.runAllTimersAsync()
|
||||
|
||||
// 1. call: fetching -> true
|
||||
// 2. call: result
|
||||
expect(subscribe).toHaveBeenCalledTimes(2)
|
||||
expect(subscribe.mock.calls[0][0]).toMatchObject({
|
||||
fetching: true,
|
||||
})
|
||||
expect(subscribe.mock.calls[1][0]).toMatchObject({
|
||||
fetching: false,
|
||||
data: {
|
||||
list: {
|
||||
nodes: [{ id: 1 }, { id: 2 }],
|
||||
pageInfo: {
|
||||
hasNextPage: true,
|
||||
endCursor: '2',
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
// Fetch more data
|
||||
query.fetchMore()
|
||||
await vi.runAllTimersAsync()
|
||||
|
||||
// 3. call: fetching -> true
|
||||
// 4. call: result
|
||||
expect(subscribe).toHaveBeenCalledTimes(4)
|
||||
expect(subscribe.mock.calls[2][0]).toMatchObject({
|
||||
fetching: true,
|
||||
})
|
||||
expect(subscribe.mock.calls[3][0]).toMatchObject({
|
||||
fetching: false,
|
||||
data: {
|
||||
list: {
|
||||
nodes: [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }],
|
||||
pageInfo: {
|
||||
hasNextPage: true,
|
||||
endCursor: '4',
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test('restoring state', async () => {
|
||||
const subscribe = vi.fn()
|
||||
query.subscribe(subscribe)
|
||||
await vi.runAllTimersAsync()
|
||||
await query.restore(result => (result.data as any).list.nodes.length < 5)
|
||||
|
||||
expect(subscribe).toHaveBeenCalledTimes(6)
|
||||
expect(subscribe.mock.calls[4][0]).toMatchObject({
|
||||
restoring: true,
|
||||
})
|
||||
expect(subscribe.mock.calls[5][0]).toMatchObject({
|
||||
restoring: false,
|
||||
data: {
|
||||
list: {
|
||||
nodes: [{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }, { id: 5 }, { id: 6 }],
|
||||
pageInfo: {
|
||||
hasNextPage: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
})
|
||||
314
client/web-sveltekit/src/lib/graphql/urql.ts
Normal file
314
client/web-sveltekit/src/lib/graphql/urql.ts
Normal file
@ -0,0 +1,314 @@
|
||||
import {
|
||||
Client,
|
||||
cacheExchange,
|
||||
fetchExchange,
|
||||
mapExchange,
|
||||
type Exchange,
|
||||
makeOperation,
|
||||
type AnyVariables,
|
||||
type OperationResult,
|
||||
createRequest,
|
||||
type DocumentInput,
|
||||
} from '@urql/core'
|
||||
import type { OperationDefinitionNode } from 'graphql'
|
||||
import { once } from 'lodash'
|
||||
import { from, isObservable, Subject, type Observable, concat, of } from 'rxjs'
|
||||
import { map, switchMap, scan, startWith } from 'rxjs/operators'
|
||||
import { type Readable, readable, get } from 'svelte/store'
|
||||
|
||||
import type { GraphQLResult } from '@sourcegraph/http-client'
|
||||
|
||||
import { GRAPHQL_URI } from '$lib/http-client'
|
||||
|
||||
import { getHeaders } from './shared'
|
||||
|
||||
export type GraphQLClient = Client
|
||||
|
||||
export { gql, createRequest, type DocumentInput, type OperationResult } from '@urql/core'
|
||||
|
||||
/**
|
||||
* This exchange appends the operation name to the URL for each operation.
|
||||
*/
|
||||
const appendOperationName: Exchange = mapExchange({
|
||||
onOperation: op => {
|
||||
const operationName = op.query.definitions.find(
|
||||
(def): def is OperationDefinitionNode => def.kind === 'OperationDefinition'
|
||||
)?.name?.value
|
||||
if (operationName) {
|
||||
return makeOperation(op.kind, op, {
|
||||
...op.context,
|
||||
url: `${op.context.url}?${operationName}`,
|
||||
})
|
||||
}
|
||||
return op
|
||||
},
|
||||
})
|
||||
|
||||
export const getGraphQLClient = once((): Client => {
|
||||
return new Client({
|
||||
url: GRAPHQL_URI,
|
||||
fetchOptions: () => ({
|
||||
headers: getHeaders(),
|
||||
}),
|
||||
exchanges: [cacheExchange, appendOperationName, fetchExchange],
|
||||
})
|
||||
})
|
||||
|
||||
// TODO: Refactor to eliminate the need for this function
|
||||
/**
|
||||
* @deprecated Initiated GraphQL requests in data loader functions instead
|
||||
*/
|
||||
export function query<TData = any, TVariables extends AnyVariables = AnyVariables>(
|
||||
query: DocumentInput<TData, TVariables>,
|
||||
variables: TVariables
|
||||
): Promise<OperationResult<TData, TVariables>> {
|
||||
return getGraphQLClient().query<TData, TVariables>(query, variables).toPromise()
|
||||
}
|
||||
|
||||
interface InfinityQueryArgs<TData = any, TVariables extends AnyVariables = AnyVariables> {
|
||||
/**
|
||||
* The {@link Client} instance to use for the query.
|
||||
*/
|
||||
client: Client
|
||||
|
||||
/**
|
||||
* The GraphQL query to execute.
|
||||
*/
|
||||
query: DocumentInput<TData, TVariables>
|
||||
|
||||
/**
|
||||
* The initial variables to use for the query.
|
||||
*/
|
||||
variables: TVariables | Observable<TVariables>
|
||||
|
||||
/**
|
||||
* A function that returns the next set of variables to use for the query.
|
||||
*
|
||||
* @param previousResult - The previous result of the query.
|
||||
*
|
||||
* @remarks
|
||||
* `nextVariables` is called when {@link InfinityQueryStore.fetchMore} is called to get the next set
|
||||
* of variables to fetch the next page of data. This function to extract the cursor for the next
|
||||
* page from the previous result.
|
||||
*/
|
||||
nextVariables: (previousResult: OperationResult<TData, TVariables>) => Partial<TVariables> | undefined
|
||||
|
||||
/**
|
||||
* A function to combine the previous result with the next result.
|
||||
*
|
||||
* @param previousResult - The previous result of the query.
|
||||
* @param nextResult - The next result of the query.
|
||||
* @returns The combined result of the query.
|
||||
*
|
||||
* @remarks
|
||||
* `combine` is called when the next result is received to merge the previous result with the new
|
||||
* result. This function is used to append the new data to the previous data.
|
||||
*/
|
||||
combine: (
|
||||
previousResult: OperationResultState<TData, TVariables>,
|
||||
nextResult: OperationResultState<TData, TVariables>
|
||||
) => OperationResultState<TData, TVariables>
|
||||
}
|
||||
|
||||
interface OperationResultState<TData = any, TVariables extends AnyVariables = AnyVariables>
|
||||
extends OperationResult<TData, TVariables> {
|
||||
/**
|
||||
* Whether a GraphQL request is currently in flight.
|
||||
*/
|
||||
fetching: boolean
|
||||
/**
|
||||
* Whether the store is currently restoring data.
|
||||
*/
|
||||
restoring: boolean
|
||||
}
|
||||
|
||||
interface InfinityQueryStore<TData = any, TVariables extends AnyVariables = AnyVariables>
|
||||
extends Readable<OperationResultState<TData, TVariables>> {
|
||||
/**
|
||||
* Reruns the query with the next set of variables returned by {@link InfinityQueryArgs.nextVariables}.
|
||||
*
|
||||
* @remarks
|
||||
* A new query will only be executed if there is no query currently in flight and {@link InfinityQueryArgs.nextVariables}
|
||||
* returns a value different from `undefined`.
|
||||
*/
|
||||
fetchMore: () => void
|
||||
|
||||
/**
|
||||
* Fetches more data until the given restoreHandler returns `false`.
|
||||
*
|
||||
* @param restoreHandler - A function that returns `true` if more data should be fetched.
|
||||
*
|
||||
* @remarks
|
||||
* When navigating back to a page that was previously fetched with `infinityQuery`, the page
|
||||
* should call `restore` until the previous data state is restored.
|
||||
*/
|
||||
restore: (restoreHandler: (result: OperationResultState<TData, TVariables>) => boolean) => Promise<void>
|
||||
}
|
||||
|
||||
/**
|
||||
* Function to create a store to manage "infinity scroll" style queries.
|
||||
*
|
||||
* @param args - a {@link InfinityQueryArgs} object to pass a `query, `variables` and other options to manage infinity scroll.
|
||||
* @returns a {@link InfinityQueryStore} of query results.
|
||||
*
|
||||
* @remarks
|
||||
* `infinityQuery` uses {@link InfinityQueryArgs.client} to execute {@link InfinityQueryArgs.query}
|
||||
* with the given {@link InfinityQueryArgs.variables}.
|
||||
*
|
||||
* The caller can call {@link InfinityQueryStore.fetchMore} to fetch more data. The store will
|
||||
* call {@link InfinityQueryArgs.nextVariables} to get the next set of variables to use for the query
|
||||
* and merge it into the initial variables.
|
||||
* When the result is received, the store will call {@link InfinityQueryArgs.combine} to merge the
|
||||
* previous result with the new result.
|
||||
*
|
||||
* Calling this function will prefetch the initial data, i.e. the data is fetch before the store is
|
||||
* subscribed to.
|
||||
*/
|
||||
export function infinityQuery<TData = any, TVariables extends AnyVariables = AnyVariables>(
|
||||
args: InfinityQueryArgs<TData, TVariables>
|
||||
): InfinityQueryStore<TData, TVariables> {
|
||||
// This is a hacky workaround to create an initialState. The empty object is
|
||||
// invalid but the request will never be executed with these variables anyway.
|
||||
const initialVariables = isObservable(args.variables) ? args.variables : of(args.variables)
|
||||
const operation = args.client.createRequestOperation(
|
||||
'query',
|
||||
isObservable(args.variables)
|
||||
? createRequest(args.query, {} as TVariables)
|
||||
: createRequest(args.query, args.variables)
|
||||
)
|
||||
const initialState: OperationResultState<TData, TVariables> = {
|
||||
operation,
|
||||
error: undefined,
|
||||
data: undefined,
|
||||
extensions: undefined,
|
||||
stale: false,
|
||||
fetching: false,
|
||||
restoring: false,
|
||||
hasNext: false,
|
||||
}
|
||||
const nextVariables = new Subject<Partial<TVariables>>()
|
||||
let shouldRestore: ((result: OperationResultState<TData, TVariables>) => boolean) | null = null
|
||||
|
||||
// Prefetch data. We don't want to wait until the store is subscribed to. That allows us to use this function
|
||||
// inside a data loader and the data will be prefetched before the component is rendered.
|
||||
initialVariables.subscribe(variables => {
|
||||
void args.client.query(args.query, variables).toPromise()
|
||||
})
|
||||
|
||||
const result = readable(initialState, set => {
|
||||
const subscription = initialVariables
|
||||
.pipe(
|
||||
switchMap(initialVariables =>
|
||||
nextVariables.pipe(
|
||||
startWith(initialVariables), // nextVaribles will not emit until the first fetchMore is called
|
||||
switchMap(variables => {
|
||||
const operation = args.client.createRequestOperation(
|
||||
'query',
|
||||
createRequest(args.query, { ...initialVariables, ...variables })
|
||||
)
|
||||
return concat<Partial<OperationResultState<TData, TVariables>>>(
|
||||
of({ fetching: true, stale: false, restoring: false }),
|
||||
from(args.client.executeRequestOperation(operation).toPromise()).pipe(
|
||||
map(({ data, stale, operation, error, extensions }) => ({
|
||||
fetching: false,
|
||||
data,
|
||||
stale: !!stale,
|
||||
operation,
|
||||
error,
|
||||
extensions,
|
||||
}))
|
||||
)
|
||||
)
|
||||
})
|
||||
)
|
||||
),
|
||||
scan((result, update) => {
|
||||
const newResult = { ...result, ...update }
|
||||
return update.fetching ? newResult : args.combine(result, newResult)
|
||||
}, initialState)
|
||||
)
|
||||
.subscribe(result => {
|
||||
if (shouldRestore) {
|
||||
result.restoring = Boolean(
|
||||
(result.data || result.error) && shouldRestore(result) && args.nextVariables(result)
|
||||
)
|
||||
}
|
||||
set(result)
|
||||
})
|
||||
|
||||
return () => subscription.unsubscribe()
|
||||
})
|
||||
|
||||
return {
|
||||
...result,
|
||||
fetchMore: () => {
|
||||
const current = get(result)
|
||||
if (current.fetching || current.restoring) {
|
||||
return
|
||||
}
|
||||
const newVariables = args.nextVariables(current)
|
||||
if (!newVariables) {
|
||||
return
|
||||
}
|
||||
nextVariables.next(newVariables)
|
||||
},
|
||||
restore: restoreHandler => {
|
||||
shouldRestore = result => {
|
||||
return Boolean((result.data || result.error) && restoreHandler(result) && args.nextVariables(result))
|
||||
}
|
||||
return new Promise(resolve => {
|
||||
const unsubscribe = result.subscribe(result => {
|
||||
if (result.fetching) {
|
||||
return
|
||||
}
|
||||
if (result.data || result.error) {
|
||||
const newVariables = args.nextVariables(result)
|
||||
if (restoreHandler(result) && newVariables) {
|
||||
shouldRestore = restoreHandler
|
||||
nextVariables.next(newVariables)
|
||||
} else {
|
||||
unsubscribe()
|
||||
shouldRestore = null
|
||||
resolve()
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts an OperationResult (urlql) to a GraphQLResult (sourcegraph/http-client).
|
||||
*/
|
||||
export function toGraphQLResult<TData = any, TVariables extends AnyVariables = AnyVariables>(
|
||||
result: OperationResult<TData, TVariables>
|
||||
): GraphQLResult<TData> {
|
||||
return result.error || !result.data
|
||||
? {
|
||||
...result,
|
||||
data: result.data ?? null,
|
||||
errors: result.error?.graphQLErrors ?? [],
|
||||
}
|
||||
: {
|
||||
...result,
|
||||
data: result.data,
|
||||
errors: undefined,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Given an {@link OperationResult}, this function returns the mapped data or throws the error.
|
||||
* To be used toghether with a promise that resolves to a GraphQL response. This ensures
|
||||
* that the promise rejects when the GraphQL response contains an error.
|
||||
*
|
||||
* @param mapper - A function to map the data from the result.
|
||||
*/
|
||||
export function mapOrThrow<T extends OperationResult, U>(mapper: (result: T) => U): (result: T) => U {
|
||||
return (result: T) => {
|
||||
if (result.error) {
|
||||
throw result.error
|
||||
}
|
||||
return mapper(result)
|
||||
}
|
||||
}
|
||||
30
client/web-sveltekit/src/lib/repo/api/tree.gql
Normal file
30
client/web-sveltekit/src/lib/repo/api/tree.gql
Normal file
@ -0,0 +1,30 @@
|
||||
query TreeEntries($repoName: String!, $revision: String!, $filePath: String!, $first: Int) {
|
||||
repository(name: $repoName) {
|
||||
id
|
||||
... on Repository {
|
||||
commit(rev: $revision) {
|
||||
...GitCommitFieldsWithTree
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fragment GitCommitFieldsWithTree on GitCommit {
|
||||
id
|
||||
tree(path: $filePath) {
|
||||
canonicalURL
|
||||
isRoot
|
||||
name
|
||||
path
|
||||
isDirectory
|
||||
entries(first: $first) {
|
||||
canonicalURL
|
||||
name
|
||||
path
|
||||
isDirectory
|
||||
... on GitBlob {
|
||||
languages
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,57 +1,32 @@
|
||||
import { dirname } from 'path'
|
||||
|
||||
import { query, gql } from '$lib/graphql'
|
||||
import type { TreeEntriesResult, GitCommitFieldsWithTree, TreeEntriesVariables, Scalars } from '$lib/graphql-operations'
|
||||
import { mapOrThrow, query } from '$lib/graphql'
|
||||
import type { Scalars } from '$lib/graphql-types'
|
||||
import type { TreeProvider } from '$lib/TreeView'
|
||||
|
||||
import { type GitCommitFieldsWithTree, type TreeEntriesVariables, TreeEntries } from './tree.gql'
|
||||
|
||||
const MAX_FILE_TREE_ENTRIES = 1000
|
||||
|
||||
const treeEntriesQuery = gql`
|
||||
query TreeEntries($repoName: String!, $revision: String!, $filePath: String!, $first: Int) {
|
||||
repository(name: $repoName) {
|
||||
id
|
||||
... on Repository {
|
||||
commit(rev: $revision) {
|
||||
...GitCommitFieldsWithTree
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fragment GitCommitFieldsWithTree on GitCommit {
|
||||
id
|
||||
tree(path: $filePath) {
|
||||
canonicalURL
|
||||
isRoot
|
||||
name
|
||||
path
|
||||
isDirectory
|
||||
entries(first: $first) {
|
||||
canonicalURL
|
||||
name
|
||||
path
|
||||
isDirectory
|
||||
... on GitBlob {
|
||||
languages
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`
|
||||
|
||||
export async function fetchTreeEntries(args: TreeEntriesVariables): Promise<GitCommitFieldsWithTree> {
|
||||
const data = await query<TreeEntriesResult, TreeEntriesVariables>(
|
||||
treeEntriesQuery,
|
||||
export function fetchTreeEntries(args: TreeEntriesVariables): Promise<GitCommitFieldsWithTree> {
|
||||
return query(
|
||||
TreeEntries,
|
||||
{
|
||||
...args,
|
||||
first: args.first ?? MAX_FILE_TREE_ENTRIES,
|
||||
}
|
||||
// mightContainPrivateInfo: true,
|
||||
).then(
|
||||
mapOrThrow(result => {
|
||||
if (!result.data?.repository) {
|
||||
throw new Error('Unable to fetch repository information')
|
||||
}
|
||||
if (!result.data.repository.commit) {
|
||||
throw new Error('Unable to fetch commit information')
|
||||
}
|
||||
return result.data.repository.commit
|
||||
})
|
||||
)
|
||||
if (!data.repository?.commit) {
|
||||
throw new Error('Unable to fetch repository information')
|
||||
}
|
||||
return data.repository.commit
|
||||
}
|
||||
|
||||
export const NODE_LIMIT: unique symbol = Symbol()
|
||||
@ -60,6 +35,7 @@ type TreeRoot = NonNullable<GitCommitFieldsWithTree['tree']>
|
||||
export type TreeEntryFields = NonNullable<GitCommitFieldsWithTree['tree']>['entries'][number]
|
||||
type ExpandableFileTreeNodeValues = TreeEntryFields
|
||||
export type FileTreeNodeValue = ExpandableFileTreeNodeValues | typeof NODE_LIMIT
|
||||
export type FileTreeData = { root: TreeRoot; values: FileTreeNodeValue[] }
|
||||
|
||||
export async function fetchSidebarFileTree({
|
||||
repoName,
|
||||
@ -69,7 +45,7 @@ export async function fetchSidebarFileTree({
|
||||
repoName: Scalars['ID']['input']
|
||||
revision: string
|
||||
filePath: string
|
||||
}): Promise<{ root: TreeRoot; values: FileTreeNodeValue[] }> {
|
||||
}): Promise<FileTreeData> {
|
||||
const result = await fetchTreeEntries({
|
||||
repoName,
|
||||
revision,
|
||||
@ -87,18 +63,11 @@ export async function fetchSidebarFileTree({
|
||||
return { root, values }
|
||||
}
|
||||
|
||||
export type FileTreeLoader = (args: {
|
||||
repoName: string
|
||||
revision: string
|
||||
filePath: string
|
||||
parent?: FileTreeProvider
|
||||
}) => Promise<FileTreeProvider>
|
||||
export type FileTreeLoader = (args: { filePath: string; parent?: FileTreeProvider }) => Promise<FileTreeData>
|
||||
|
||||
interface FileTreeProviderArgs {
|
||||
root: NonNullable<GitCommitFieldsWithTree['tree']>
|
||||
values: FileTreeNodeValue[]
|
||||
repoName: string
|
||||
revision: string
|
||||
loader: FileTreeLoader
|
||||
parent?: TreeProvider<FileTreeNodeValue>
|
||||
}
|
||||
@ -110,10 +79,6 @@ export class FileTreeProvider implements TreeProvider<FileTreeNodeValue> {
|
||||
return this.args.root
|
||||
}
|
||||
|
||||
public getRepoName(): string {
|
||||
return this.args.repoName
|
||||
}
|
||||
|
||||
public getEntries(): FileTreeNodeValue[] {
|
||||
if (this.args.parent || this.args.root.isRoot) {
|
||||
return this.args.values
|
||||
@ -129,21 +94,19 @@ export class FileTreeProvider implements TreeProvider<FileTreeNodeValue> {
|
||||
throw new Error('Cannot fetch children for non-expandable tree entry')
|
||||
}
|
||||
|
||||
return this.args.loader({
|
||||
repoName: this.args.repoName,
|
||||
revision: this.args.revision,
|
||||
const args = await this.args.loader({
|
||||
filePath: entry.path,
|
||||
parent: this,
|
||||
})
|
||||
return new FileTreeProvider({ ...args, loader: this.args.loader, parent: this })
|
||||
}
|
||||
|
||||
public async fetchParent(): Promise<FileTreeProvider> {
|
||||
const parentPath = dirname(this.args.root.path)
|
||||
return this.args.loader({
|
||||
repoName: this.args.repoName,
|
||||
revision: this.args.revision,
|
||||
filePath: parentPath,
|
||||
const args = await this.args.loader({
|
||||
filePath: dirname(this.args.root.path),
|
||||
parent: this,
|
||||
})
|
||||
return new FileTreeProvider({ ...args, loader: this.args.loader })
|
||||
}
|
||||
|
||||
public getNodeID(entry: FileTreeNodeValue): string {
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import type { EditorView } from '@codemirror/view'
|
||||
import { from, type Subscription } from 'rxjs'
|
||||
import { switchMap, map, startWith } from 'rxjs/operators'
|
||||
import { get, writable, type Readable, readonly } from 'svelte/store'
|
||||
import { Subject, from, of } from 'rxjs'
|
||||
import { switchMap, map, startWith, catchError } from 'rxjs/operators'
|
||||
import { get, type Readable, readable } from 'svelte/store'
|
||||
|
||||
import { goto as svelteGoto } from '$app/navigation'
|
||||
import { page } from '$app/stores'
|
||||
@ -141,43 +141,77 @@ export function openImplementations(
|
||||
|
||||
interface CombinedBlobData {
|
||||
blob: BlobPage_Blob | null
|
||||
highlights: string | undefined
|
||||
/**
|
||||
* JSON encoded highlighting information. Can be an empty string.
|
||||
*/
|
||||
highlights: string
|
||||
blobPending: boolean
|
||||
highlightsPending: boolean
|
||||
blobError: Error | null
|
||||
highlightsError: Error | null
|
||||
}
|
||||
|
||||
interface BlobDataHandler {
|
||||
set(blob: Promise<BlobPage_Blob | null>, highlight: Promise<string | undefined>): void
|
||||
combinedBlobData: Readable<CombinedBlobData>
|
||||
loading: Readable<boolean>
|
||||
interface BlobDataHandler extends Readable<CombinedBlobData> {
|
||||
set(blob: PromiseLike<BlobPage_Blob | null>, highlight: PromiseLike<string | undefined>): void
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper store to synchronize blob data and highlighting data handling.
|
||||
* This store synchronizes the state of the blob data and the highlights. While new blob data is
|
||||
* loading, the old blob and highlighs data is still available. Once the blob data is loaded, the
|
||||
* highlights are updated.
|
||||
*/
|
||||
export function createBlobDataHandler(): BlobDataHandler {
|
||||
const combinedBlobData = writable<CombinedBlobData>({ blob: null, highlights: undefined })
|
||||
const loading = writable<boolean>(false)
|
||||
|
||||
let subscription: Subscription | undefined
|
||||
const input = new Subject<{ blob: PromiseLike<BlobPage_Blob | null>; highlight: PromiseLike<string | undefined> }>()
|
||||
|
||||
return {
|
||||
set(blob: Promise<BlobPage_Blob | null>, highlight: Promise<string | undefined>): void {
|
||||
subscription?.unsubscribe()
|
||||
loading.set(true)
|
||||
subscription = from(blob)
|
||||
.pipe(
|
||||
switchMap(blob =>
|
||||
from(highlight).pipe(
|
||||
startWith(undefined),
|
||||
map(highlights => ({ blob, highlights }))
|
||||
)
|
||||
...readable<CombinedBlobData>(
|
||||
{
|
||||
blob: null,
|
||||
highlights: '',
|
||||
blobPending: false,
|
||||
highlightsPending: false,
|
||||
blobError: null,
|
||||
highlightsError: null,
|
||||
},
|
||||
(_set, update) => {
|
||||
const subscription = input
|
||||
.pipe(
|
||||
switchMap(({ blob, highlight }) => {
|
||||
return from(blob).pipe(
|
||||
switchMap(blob => {
|
||||
return from(highlight).pipe(
|
||||
map((highlights = '') => ({
|
||||
highlights,
|
||||
highlightsPending: false,
|
||||
highlightsError: null,
|
||||
})),
|
||||
startWith({
|
||||
blob,
|
||||
blobPending: false,
|
||||
blobError: null,
|
||||
highlights: '',
|
||||
highlightsPending: true,
|
||||
highlightsError: null,
|
||||
}),
|
||||
catchError(error =>
|
||||
of({ highlights: '', highlightsPending: false, highlightsError: error })
|
||||
)
|
||||
)
|
||||
}),
|
||||
startWith({ blobPending: true }),
|
||||
catchError(error => of({ blob: null, blobPending: false, blobError: error }))
|
||||
)
|
||||
})
|
||||
)
|
||||
)
|
||||
.subscribe(result => {
|
||||
combinedBlobData.set(result)
|
||||
loading.set(false)
|
||||
})
|
||||
.subscribe(updatedCombinedData => {
|
||||
update(combinedData => ({ ...combinedData, ...updatedCombinedData }))
|
||||
})
|
||||
return () => subscription.unsubscribe()
|
||||
}
|
||||
),
|
||||
|
||||
set(blob, highlight) {
|
||||
input.next({ blob, highlight })
|
||||
},
|
||||
combinedBlobData: readonly(combinedBlobData),
|
||||
loading: readonly(loading),
|
||||
}
|
||||
}
|
||||
|
||||
@ -47,7 +47,7 @@ export const fetchFileRangeMatches = async (args: {
|
||||
format?: HighlightResponseFormat
|
||||
ranges: HighlightLineRange[]
|
||||
}): Promise<string[][]> => {
|
||||
const data = await query<HighlightedFileResult, HighlightedFileVariables>(HIGHLIGHTED_FILE_QUERY, {
|
||||
const result = await query<HighlightedFileResult, HighlightedFileVariables>(HIGHLIGHTED_FILE_QUERY, {
|
||||
repoName: args.result.repository,
|
||||
commitID: args.result.commit ?? '',
|
||||
filePath: args.result.path,
|
||||
@ -56,11 +56,11 @@ export const fetchFileRangeMatches = async (args: {
|
||||
disableTimeout: true,
|
||||
})
|
||||
|
||||
if (!data?.repository?.commit?.blob?.highlight) {
|
||||
if (!result.data?.repository?.commit?.blob?.highlight) {
|
||||
throw new Error('Unable to highlight file range')
|
||||
}
|
||||
|
||||
const file = data.repository.commit.blob
|
||||
const file = result.data.repository.commit.blob
|
||||
if (file?.isDirectory) {
|
||||
return []
|
||||
}
|
||||
|
||||
@ -13,7 +13,7 @@
|
||||
import { submitSearch, type QueryStateStore } from '../state'
|
||||
import BaseCodeMirrorQueryInput from '$lib/search/BaseQueryInput.svelte'
|
||||
import { createSuggestionsSource } from '$lib/web'
|
||||
import { gql, query } from '$lib/graphql'
|
||||
import { query, type DocumentInput } from '$lib/graphql'
|
||||
import Suggestions from './Suggestions.svelte'
|
||||
import { user } from '$lib/stores'
|
||||
|
||||
@ -99,8 +99,10 @@
|
||||
}),
|
||||
]
|
||||
|
||||
function graphqlQuery<T, V extends Record<string, any>>(request: string, variables: V) {
|
||||
return query<T, V>(gql(request), variables)
|
||||
async function graphqlQuery<T, V extends Record<string, any>>(request: DocumentInput, variables: V) {
|
||||
const result = await query<T, V>(request, variables)
|
||||
// This is a hack to make urlq work with the API that createSuggestionsSource expects
|
||||
return result.data ?? ({} as any)
|
||||
}
|
||||
</script>
|
||||
|
||||
|
||||
@ -5,8 +5,6 @@ import type { Settings, TemporarySettingsStorage } from '$lib/shared'
|
||||
|
||||
import type { AuthenticatedUser, FeatureFlag } from '../routes/layout.gql'
|
||||
|
||||
import type { GraphQLClient } from './graphql'
|
||||
|
||||
export { isLightTheme } from './theme'
|
||||
|
||||
export interface SourcegraphContext {
|
||||
@ -14,14 +12,13 @@ export interface SourcegraphContext {
|
||||
user: Readable<AuthenticatedUser | null>
|
||||
temporarySettingsStorage: Readable<TemporarySettingsStorage>
|
||||
featureFlags: Readable<FeatureFlag[]>
|
||||
client: Readable<GraphQLClient>
|
||||
}
|
||||
|
||||
export const KEY = '__sourcegraph__'
|
||||
|
||||
export function getStores(): SourcegraphContext {
|
||||
const { settings, user, temporarySettingsStorage, featureFlags, client } = getContext<SourcegraphContext>(KEY)
|
||||
return { settings, user, temporarySettingsStorage, featureFlags, client }
|
||||
const { settings, user, temporarySettingsStorage, featureFlags } = getContext<SourcegraphContext>(KEY)
|
||||
return { settings, user, temporarySettingsStorage, featureFlags }
|
||||
}
|
||||
|
||||
export const user = {
|
||||
@ -38,13 +35,6 @@ export const settings = {
|
||||
},
|
||||
}
|
||||
|
||||
export const graphqlClient = {
|
||||
subscribe(subscriber: (client: GraphQLClient) => void) {
|
||||
const { client } = getStores()
|
||||
return client.subscribe(subscriber)
|
||||
},
|
||||
}
|
||||
|
||||
/**
|
||||
* A store that updates every second to return the current time.
|
||||
*/
|
||||
|
||||
@ -12,96 +12,122 @@ afterAll(() => {
|
||||
|
||||
describe('createPromiseStore', () => {
|
||||
describe('initial promise', () => {
|
||||
it('correctly updates each store for resolved initial promises', async () => {
|
||||
const { pending, value, error, set } = createPromiseStore<number>()
|
||||
set(Promise.resolve(1))
|
||||
it('correctly updates store for resolved initial promises', async () => {
|
||||
const store = createPromiseStore<number>()
|
||||
store.set(Promise.resolve(1))
|
||||
|
||||
expect(get(pending)).toBe(true)
|
||||
expect(get(value)).toBe(null)
|
||||
expect(get(error)).toBe(null)
|
||||
expect(get(store)).toMatchObject({
|
||||
pending: true,
|
||||
value: null,
|
||||
error: null,
|
||||
})
|
||||
|
||||
await vi.runOnlyPendingTimersAsync()
|
||||
|
||||
expect(get(pending)).toBe(false)
|
||||
expect(get(value)).toBe(1)
|
||||
expect(get(error)).toBe(null)
|
||||
expect(get(store)).toMatchObject({
|
||||
pending: false,
|
||||
value: 1,
|
||||
error: null,
|
||||
})
|
||||
})
|
||||
|
||||
it('correctly updates each store for rejected initial promises', async () => {
|
||||
const { pending, value, error, set } = createPromiseStore<number>()
|
||||
set(Promise.reject(1))
|
||||
const store = createPromiseStore<number>()
|
||||
store.set(Promise.reject(1))
|
||||
|
||||
expect(get(pending)).toBe(true)
|
||||
expect(get(value)).toBe(null)
|
||||
expect(get(error)).toBe(null)
|
||||
expect(get(store)).toMatchObject({
|
||||
pending: true,
|
||||
value: null,
|
||||
error: null,
|
||||
})
|
||||
|
||||
await vi.runOnlyPendingTimersAsync()
|
||||
|
||||
expect(get(pending)).toBe(false)
|
||||
expect(get(value)).toBe(null)
|
||||
expect(get(error)).toBe(1)
|
||||
expect(get(store)).toMatchObject({
|
||||
pending: false,
|
||||
value: null,
|
||||
error: 1,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('updates', () => {
|
||||
it('updates the store values when a new promise is set', async () => {
|
||||
const { pending, value, error, set } = createPromiseStore<number>()
|
||||
set(Promise.resolve(1))
|
||||
it('updates the store when a new promise is set', async () => {
|
||||
const store = createPromiseStore<number>()
|
||||
store.set(Promise.resolve(1))
|
||||
await vi.runOnlyPendingTimersAsync()
|
||||
expect(get(pending)).toBe(false)
|
||||
expect(get(store).pending).toBe(false)
|
||||
|
||||
set(Promise.reject(2))
|
||||
expect(get(pending)).toBe(true)
|
||||
store.set(Promise.reject(2))
|
||||
expect(get(store).pending).toBe(true)
|
||||
|
||||
await vi.runOnlyPendingTimersAsync()
|
||||
|
||||
expect(get(pending)).toBe(false)
|
||||
expect(get(value)).toBe(null)
|
||||
expect(get(error)).toBe(2)
|
||||
expect(get(store)).toMatchObject({
|
||||
pending: false,
|
||||
value: null,
|
||||
error: 2,
|
||||
})
|
||||
|
||||
set(Promise.resolve(3))
|
||||
expect(get(pending)).toBe(true)
|
||||
store.set(Promise.resolve(3))
|
||||
expect(get(store).pending).toBe(true)
|
||||
|
||||
await vi.runOnlyPendingTimersAsync()
|
||||
|
||||
expect(get(pending)).toBe(false)
|
||||
expect(get(value)).toBe(3)
|
||||
expect(get(error)).toBe(null)
|
||||
expect(get(store)).toMatchObject({
|
||||
pending: false,
|
||||
value: 3,
|
||||
error: null,
|
||||
})
|
||||
})
|
||||
|
||||
it('updates the store with the latest resolved promise', async () => {
|
||||
const { pending, value, set } = createPromiseStore<number>()
|
||||
set(Promise.resolve(1))
|
||||
set(Promise.resolve(2))
|
||||
const store = createPromiseStore<number>()
|
||||
store.set(Promise.resolve(1))
|
||||
store.set(Promise.resolve(2))
|
||||
|
||||
await vi.runOnlyPendingTimersAsync()
|
||||
|
||||
expect(get(pending)).toBe(false)
|
||||
expect(get(value)).toBe(2)
|
||||
expect(get(store)).toMatchObject({
|
||||
pending: false,
|
||||
value: 2,
|
||||
error: null,
|
||||
})
|
||||
})
|
||||
|
||||
it('retains the old value while a new promise is resolved', async () => {
|
||||
const { pending, value, latestValue, set } = createPromiseStore<number>()
|
||||
set(Promise.resolve(1))
|
||||
const store = createPromiseStore<number>()
|
||||
store.set(Promise.resolve(1))
|
||||
await vi.runOnlyPendingTimersAsync()
|
||||
|
||||
set(Promise.resolve(2))
|
||||
store.set(Promise.resolve(2))
|
||||
|
||||
expect(get(pending)).toBe(true)
|
||||
expect(get(value)).toBe(null)
|
||||
expect(get(latestValue)).toBe(1)
|
||||
expect(get(store)).toMatchObject({
|
||||
pending: true,
|
||||
value: 1,
|
||||
error: null,
|
||||
})
|
||||
})
|
||||
|
||||
it('retains the old error while a new promise is resolved', async () => {
|
||||
const { pending, error, latestError, set } = createPromiseStore<number>()
|
||||
set(Promise.reject(1))
|
||||
const store = createPromiseStore<number>()
|
||||
store.set(Promise.reject(1))
|
||||
await vi.runOnlyPendingTimersAsync()
|
||||
|
||||
set(Promise.resolve(2))
|
||||
store.set(Promise.resolve(2))
|
||||
|
||||
expect(get(pending)).toBe(true)
|
||||
expect(get(error)).toBe(null)
|
||||
expect(get(latestError)).toBe(1)
|
||||
expect(get(store)).toMatchObject({
|
||||
pending: true,
|
||||
value: null,
|
||||
error: 1,
|
||||
})
|
||||
|
||||
await vi.runOnlyPendingTimersAsync()
|
||||
expect(get(store)).toMatchObject({
|
||||
pending: false,
|
||||
value: 2,
|
||||
error: null,
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@ -1,75 +1,70 @@
|
||||
import { type Readable, writable, readonly, derived } from 'svelte/store'
|
||||
import { type Readable, writable, readonly } from 'svelte/store'
|
||||
|
||||
interface PromiseStore<D, E = Error> {
|
||||
/**
|
||||
* True when the promise is pending, false otherwise.
|
||||
* Initial value: false
|
||||
*/
|
||||
pending: Readable<boolean>
|
||||
/**
|
||||
* The current value or null if the current promise was rejected or is pending.
|
||||
* Initial value: null
|
||||
*/
|
||||
value: Readable<D | null>
|
||||
/**
|
||||
* The current error or null if the current promise was resolved or is pending.
|
||||
* Initial value: null
|
||||
*/
|
||||
error: Readable<E | null>
|
||||
/**
|
||||
* The value of the latest settled promise. While a new promise is pending this will contain
|
||||
* the value of the previously settled promise (or null if the promise was rejected).
|
||||
* Initial value: null
|
||||
*/
|
||||
latestValue: Readable<D | null>
|
||||
/**
|
||||
* The value of the latest promise. While a new promise is pending this will contain
|
||||
* the value of the previously settled promise (or null if the promise was resolved).
|
||||
* Initial value: null
|
||||
*/
|
||||
latestError: Readable<E | null>
|
||||
/**
|
||||
* Successful result of a promise.
|
||||
*/
|
||||
interface ResultSuccess<T> {
|
||||
value: T
|
||||
error: null
|
||||
pending: false
|
||||
}
|
||||
|
||||
/**
|
||||
* Rejected result of a promise.
|
||||
*/
|
||||
interface ResultError<E> {
|
||||
value: null
|
||||
error: E
|
||||
pending: false
|
||||
}
|
||||
|
||||
/**
|
||||
* Pending result of a promise. `value` and `error` can contain the
|
||||
* latest resolved value or rejection error.
|
||||
*/
|
||||
interface ResultPending<T, E> {
|
||||
value: T | null
|
||||
error: E | null
|
||||
pending: true
|
||||
}
|
||||
|
||||
type Result<T, E = Error> = ResultSuccess<T> | ResultError<E> | ResultPending<T, E>
|
||||
|
||||
interface PromiseStore<D, E = Error> extends Readable<Result<D | null, E>> {
|
||||
/**
|
||||
* Sets the passed promise as the current promise and tracks its status.
|
||||
* Does nothing if the same promise as the current one is passed. The argument
|
||||
* is optional to make it easier to work with optional data coming from loaders.
|
||||
*/
|
||||
set: (promise?: Promise<D> | null) => void
|
||||
set: (promise?: PromiseLike<D> | null) => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns multiple stores to track the promises state, resolved value and rejection error.
|
||||
* The store ensures that `value` is updated with latest resolved promise.
|
||||
*/
|
||||
export function createPromiseStore<D, E = Error>(): PromiseStore<Awaited<D>, E> {
|
||||
let currentPromise: Promise<Awaited<D>> | null | undefined
|
||||
export function createPromiseStore<D, E = Error>(): PromiseStore<D, E> {
|
||||
let currentPromise: PromiseLike<D> | null | undefined
|
||||
|
||||
const pending = writable<boolean>(false)
|
||||
const value = writable<Awaited<D> | null>(null)
|
||||
const error = writable<E | null>(null)
|
||||
const resultStore = writable<Result<D | null, E>>({ value: null, error: null, pending: true })
|
||||
|
||||
function resolve(promise?: Promise<Awaited<D>> | null) {
|
||||
function resolve(promise?: PromiseLike<D> | null) {
|
||||
currentPromise = promise
|
||||
if (!promise) {
|
||||
value.set(null)
|
||||
error.set(null)
|
||||
pending.set(false)
|
||||
resultStore.set({ value: null, error: null, pending: false })
|
||||
return
|
||||
}
|
||||
|
||||
pending.set(true)
|
||||
resultStore.update($result => ({ ...$result, pending: true }))
|
||||
promise.then(
|
||||
result => {
|
||||
if (currentPromise === promise) {
|
||||
value.set(result)
|
||||
error.set(null)
|
||||
pending.set(false)
|
||||
resultStore.update(() => ({ value: result, error: null, pending: false }))
|
||||
}
|
||||
},
|
||||
error_ => {
|
||||
error => {
|
||||
if (currentPromise === promise) {
|
||||
value.set(null)
|
||||
error.set(error_)
|
||||
pending.set(false)
|
||||
resultStore.update(() => ({ value: null, error: error, pending: false }))
|
||||
}
|
||||
}
|
||||
)
|
||||
@ -78,11 +73,7 @@ export function createPromiseStore<D, E = Error>(): PromiseStore<Awaited<D>, E>
|
||||
resolve(currentPromise)
|
||||
|
||||
return {
|
||||
pending: readonly(pending),
|
||||
value: derived([pending, value], ([$pending, $value]) => ($pending ? null : $value)),
|
||||
error: derived([pending, error], ([$pending, $error]) => ($pending ? null : $error)),
|
||||
latestValue: readonly(value),
|
||||
latestError: readonly(error),
|
||||
...readonly(resultStore),
|
||||
set: promise => {
|
||||
if (promise !== currentPromise) {
|
||||
resolve(promise)
|
||||
|
||||
13
client/web-sveltekit/src/lib/wildcard/Alert.stories.svelte
Normal file
13
client/web-sveltekit/src/lib/wildcard/Alert.stories.svelte
Normal file
@ -0,0 +1,13 @@
|
||||
<script lang="ts" context="module">
|
||||
import Alert from './Alert.svelte'
|
||||
import { Story } from '@storybook/addon-svelte-csf'
|
||||
|
||||
export const meta = {
|
||||
component: Alert,
|
||||
}
|
||||
</script>
|
||||
|
||||
<Story name="Default">
|
||||
<Alert variant="info">This is an info alert</Alert>
|
||||
<Alert variant="danger">This is a danger alert</Alert>
|
||||
</Story>
|
||||
90
client/web-sveltekit/src/lib/wildcard/Alert.svelte
Normal file
90
client/web-sveltekit/src/lib/wildcard/Alert.svelte
Normal file
@ -0,0 +1,90 @@
|
||||
<script lang="ts">
|
||||
export let variant: 'info' | 'danger'
|
||||
</script>
|
||||
|
||||
<div class:danger={variant === 'danger'} class:info={variant === 'info'}>
|
||||
<slot />
|
||||
</div>
|
||||
|
||||
<style lang="scss">
|
||||
div {
|
||||
--alert-icon-display: block;
|
||||
--alert-icon-block-width: 2.5rem;
|
||||
--alert-content-padding: 0.5rem;
|
||||
--alert-background-color: var(--color-bg-1);
|
||||
|
||||
overflow: hidden;
|
||||
position: relative;
|
||||
margin-bottom: 1rem;
|
||||
color: var(--body-color);
|
||||
border-radius: var(--border-radius);
|
||||
border: 1px solid var(--alert-border-color);
|
||||
|
||||
background-color: var(--alert-background-color);
|
||||
padding: var(--alert-content-padding) var(--alert-content-padding) var(--alert-content-padding)
|
||||
calc(var(--alert-icon-block-width) + var(--alert-content-padding));
|
||||
|
||||
&::before,
|
||||
&::after {
|
||||
display: var(--alert-icon-display);
|
||||
content: '';
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
width: var(--alert-icon-block-width);
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
/* Alert icon background. */
|
||||
&::before {
|
||||
border: 2px solid var(--color-bg-1);
|
||||
border-top-left-radius: var(--border-radius);
|
||||
border-bottom-left-radius: var(--border-radius);
|
||||
background-color: var(--alert-icon-background-color);
|
||||
}
|
||||
|
||||
&::after {
|
||||
mask-repeat: no-repeat;
|
||||
mask-size: 1rem;
|
||||
mask-position: 50% 50%;
|
||||
|
||||
/* Applied as a fill color for SVG icon because of the mask-image. */
|
||||
background-color: var(--alert-icon-color);
|
||||
}
|
||||
}
|
||||
|
||||
.danger {
|
||||
--alert-border-color: var(--danger);
|
||||
--alert-icon-background-color: var(--danger-4);
|
||||
|
||||
:global(.theme-light) & {
|
||||
--alert-icon-color: var(--danger-3);
|
||||
}
|
||||
|
||||
:global(.theme-dark) & {
|
||||
--alert-icon-color: var(--danger);
|
||||
}
|
||||
|
||||
&::after {
|
||||
/* Icon: mdi/AlertCircle */
|
||||
mask-image: url("data:image/svg+xml,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M13 13h-2V7h2m0 10h-2v-2h2M12 2A10 10 0 002 12a10 10 0 0010 10 10 10 0 0010-10A10 10 0 0012 2z'/></svg>");
|
||||
}
|
||||
}
|
||||
|
||||
.info {
|
||||
--alert-border-color: var(--primary);
|
||||
--alert-icon-background-color: var(--primary-4);
|
||||
|
||||
:global(.theme-light) & {
|
||||
--alert-icon-color: var(--primary-3);
|
||||
}
|
||||
|
||||
:global(.theme-dark) & {
|
||||
--alert-icon-color: var(--primary);
|
||||
}
|
||||
&::after {
|
||||
// Icon: mdi/Information
|
||||
mask-image: url("data:image/svg+xml,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M13 9h-2V7h2m0 10h-2v-6h2m-1-9A10 10 0 002 12a10 10 0 0010 10 10 10 0 0010-10A10 10 0 0012 2z'/></svg>");
|
||||
}
|
||||
}
|
||||
</style>
|
||||
@ -9,3 +9,4 @@ export { default as MenuSeparator } from './menu/MenuSeparator.svelte'
|
||||
export { default as Submenu } from './menu/Submenu.svelte'
|
||||
export { default as MenuRadioGroup } from './menu/MenuRadioGroup.svelte'
|
||||
export { getFileIconInfo } from '@sourcegraph/wildcard/src/components/Icon'
|
||||
export { default as Alert } from './Alert.svelte'
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
<script lang="ts">
|
||||
import { setContext } from 'svelte'
|
||||
import { readable, writable } from 'svelte/store'
|
||||
import { writable } from 'svelte/store'
|
||||
|
||||
import { browser } from '$app/environment'
|
||||
import { isErrorLike } from '$lib/common'
|
||||
@ -14,11 +14,10 @@
|
||||
|
||||
import './styles.scss'
|
||||
|
||||
import { beforeNavigate } from '$app/navigation'
|
||||
|
||||
import type { LayoutData, Snapshot } from './$types'
|
||||
import type { LayoutData } from './$types'
|
||||
import { createFeatureFlagStore } from '$lib/featureflags'
|
||||
import InfoBanner from './InfoBanner.svelte'
|
||||
import { getGraphQLClient } from '$lib/graphql/apollo'
|
||||
|
||||
export let data: LayoutData
|
||||
|
||||
@ -27,7 +26,7 @@
|
||||
// It's OK to set the temporary storage during initialization time because
|
||||
// sign-in/out currently performs a full page refresh
|
||||
const temporarySettingsStorage = createTemporarySettingsStorage(
|
||||
data.user ? new TemporarySettingsStorage(data.graphqlClient, true) : undefined
|
||||
data.user ? new TemporarySettingsStorage(getGraphQLClient(), true) : undefined
|
||||
)
|
||||
|
||||
setContext<SourcegraphContext>(KEY, {
|
||||
@ -35,7 +34,6 @@
|
||||
settings,
|
||||
temporarySettingsStorage,
|
||||
featureFlags: createFeatureFlagStore(data.featureFlags, data.fetchEvaluatedFeatureFlags),
|
||||
client: readable(data.graphqlClient),
|
||||
})
|
||||
|
||||
// Update stores when data changes
|
||||
@ -54,35 +52,6 @@
|
||||
document.documentElement.classList.toggle('theme-light', $isLightTheme)
|
||||
document.documentElement.classList.toggle('theme-dark', !$isLightTheme)
|
||||
}
|
||||
|
||||
let main: HTMLElement | null = null
|
||||
let scrollTop = 0
|
||||
beforeNavigate(() => {
|
||||
// It looks like `snapshot.capture` is called "too late", i.e. after the
|
||||
// content has been updated. beforeNavigate is used to capture the correct
|
||||
// scroll offset
|
||||
scrollTop = main?.scrollTop ?? 0
|
||||
})
|
||||
export const snapshot: Snapshot<{ x: number }> = {
|
||||
capture() {
|
||||
return { x: scrollTop }
|
||||
},
|
||||
restore(value) {
|
||||
restoreScrollPosition(value.x)
|
||||
},
|
||||
}
|
||||
|
||||
function restoreScrollPosition(y: number) {
|
||||
const start = Date.now()
|
||||
requestAnimationFrame(function scroll() {
|
||||
if (main) {
|
||||
main.scrollTo(0, y)
|
||||
}
|
||||
if ((!main || main.scrollTop !== y) && Date.now() - start < 3000) {
|
||||
requestAnimationFrame(scroll)
|
||||
}
|
||||
})
|
||||
}
|
||||
</script>
|
||||
|
||||
<svelte:head>
|
||||
@ -95,7 +64,7 @@
|
||||
<InfoBanner />
|
||||
<Header authenticatedUser={$user} />
|
||||
|
||||
<main bind:this={main}>
|
||||
<main>
|
||||
<slot />
|
||||
</main>
|
||||
|
||||
|
||||
@ -23,9 +23,12 @@ if (browser) {
|
||||
}
|
||||
}
|
||||
|
||||
export const load: LayoutLoad = async () => {
|
||||
const graphqlClient = await getGraphQLClient()
|
||||
const result = await graphqlClient.query({ query: Init, fetchPolicy: 'no-cache' })
|
||||
export const load: LayoutLoad = async ({ fetch }) => {
|
||||
const client = getGraphQLClient()
|
||||
const result = await client.query(Init, {}, { fetch, requestPolicy: 'network-only' })
|
||||
if (!result.data || result.error) {
|
||||
error(500, `Failed to initialize app: ${result.error}`)
|
||||
}
|
||||
|
||||
const settings = parseJSONCOrError<Settings>(result.data.viewerSettings.final)
|
||||
if (isErrorLike(settings)) {
|
||||
@ -33,13 +36,15 @@ export const load: LayoutLoad = async () => {
|
||||
}
|
||||
|
||||
return {
|
||||
graphqlClient,
|
||||
user: result.data.currentUser,
|
||||
// Initial user settings
|
||||
settings,
|
||||
featureFlags: result.data.evaluatedFeatureFlags,
|
||||
fetchEvaluatedFeatureFlags: async () => {
|
||||
const result = await graphqlClient.query({ query: EvaluatedFeatureFlagsQuery, fetchPolicy: 'no-cache' })
|
||||
const result = await client.query(EvaluatedFeatureFlagsQuery, {}, { requestPolicy: 'network-only', fetch })
|
||||
if (!result.data || result.error) {
|
||||
throw new Error(`Failed to fetch evaluated feature flags: ${result.error}`)
|
||||
}
|
||||
return result.data.evaluatedFeatureFlags
|
||||
},
|
||||
}
|
||||
|
||||
@ -4,7 +4,6 @@
|
||||
import { afterNavigate, disableScrollHandling, goto } from '$app/navigation'
|
||||
import { page } from '$app/stores'
|
||||
import LoadingSpinner from '$lib/LoadingSpinner.svelte'
|
||||
import { fetchSidebarFileTree, FileTreeProvider, type FileTreeLoader } from '$lib/repo/api/tree'
|
||||
import HistoryPanel, { type Capture as HistoryCapture } from '$lib/repo/HistoryPanel.svelte'
|
||||
import SidebarToggleButton from '$lib/repo/SidebarToggleButton.svelte'
|
||||
import { sidebarOpen } from '$lib/repo/stores'
|
||||
@ -16,6 +15,10 @@
|
||||
import type { GitHistory_HistoryConnection } from './layout.gql'
|
||||
import Tabs from '$lib/Tabs.svelte'
|
||||
import TabPanel from '$lib/TabPanel.svelte'
|
||||
import { createFileTreeStore } from './fileTreeStore'
|
||||
import { isErrorLike } from '$lib/common'
|
||||
import { Alert } from '$lib/wildcard'
|
||||
import { fetchSidebarFileTree } from '$lib/repo/api/tree'
|
||||
|
||||
interface Capture {
|
||||
selectedTab: number | null
|
||||
@ -49,49 +52,6 @@
|
||||
},
|
||||
}
|
||||
|
||||
const fileTreeLoader: FileTreeLoader = args =>
|
||||
fetchSidebarFileTree(args).then(
|
||||
({ root, values }) =>
|
||||
new FileTreeProvider({
|
||||
root,
|
||||
values,
|
||||
loader: fileTreeLoader,
|
||||
...args,
|
||||
})
|
||||
)
|
||||
|
||||
async function updateFileTreeProvider(repoName: string, revision: string, parentPath: string) {
|
||||
const result = await data.fileTree
|
||||
if (!result) {
|
||||
treeProvider = null
|
||||
return
|
||||
}
|
||||
const { root, values } = result
|
||||
|
||||
// Do nothing if update was called with new arguments in the meantime
|
||||
if (repoName !== data.repoName || revision !== (data.revision ?? '') || parentPath !== data.parentPath) {
|
||||
return
|
||||
}
|
||||
treeProvider = new FileTreeProvider({
|
||||
root,
|
||||
values,
|
||||
repoName,
|
||||
revision,
|
||||
loader: fileTreeLoader,
|
||||
})
|
||||
}
|
||||
|
||||
function fetchCommitHistory(afterCursor: string | null) {
|
||||
// Only fetch more commits if there are more commits and if we are not already
|
||||
// fetching more commits.
|
||||
if ($commitHistoryQuery && !$commitHistoryQuery.loading && commitHistory?.pageInfo?.hasNextPage) {
|
||||
data.commitHistory.fetchMore({
|
||||
variables: {
|
||||
afterCursor: afterCursor,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
async function selectTab(event: { detail: number | null }) {
|
||||
if (event.detail === null) {
|
||||
const url = new URL($page.url)
|
||||
@ -101,18 +61,16 @@
|
||||
selectedTab = event.detail
|
||||
}
|
||||
|
||||
let treeProvider: FileTreeProvider | null = null
|
||||
const fileTreeStore = createFileTreeStore({ fetchFileTreeData: fetchSidebarFileTree })
|
||||
let selectedTab: number | null = null
|
||||
let historyPanel: HistoryPanel
|
||||
let rootElement: HTMLElement | null = null
|
||||
|
||||
$: ({ revision = '', parentPath, repoName } = data)
|
||||
// Only update the file tree provider (which causes the tree to rerender) when repo, revision/commit or file path
|
||||
// update
|
||||
$: updateFileTreeProvider(repoName, revision, parentPath)
|
||||
$: commitHistoryQuery = data.commitHistory
|
||||
let commitHistory: GitHistory_HistoryConnection | null
|
||||
$: if (commitHistoryQuery) {
|
||||
|
||||
$: ({ revision = '', parentPath, repoName, resolvedRevision } = data)
|
||||
$: fileTreeStore.set({ repoName, revision: resolvedRevision.commitID, path: parentPath })
|
||||
$: commitHistoryQuery = data.commitHistory
|
||||
$: if (!!commitHistoryQuery) {
|
||||
// Reset commit history when the query observable changes. Without
|
||||
// this we are showing the commit history of the previously selected
|
||||
// file/folder until the new commit history is loaded.
|
||||
@ -157,8 +115,15 @@
|
||||
<h3>
|
||||
<SidebarToggleButton /> Files
|
||||
</h3>
|
||||
{#if treeProvider}
|
||||
<FileTree revision={revision ?? ''} {treeProvider} selectedPath={$page.params.path ?? ''} />
|
||||
{#if $fileTreeStore}
|
||||
{#if isErrorLike($fileTreeStore)}
|
||||
<Alert variant="danger">
|
||||
Unable to fetch file tree data:
|
||||
{$fileTreeStore.message}
|
||||
</Alert>
|
||||
{:else}
|
||||
<FileTree {repoName} {revision} treeProvider={$fileTreeStore} selectedPath={$page.params.path ?? ''} />
|
||||
{/if}
|
||||
{:else}
|
||||
<LoadingSpinner center={false} />
|
||||
{/if}
|
||||
@ -175,8 +140,8 @@
|
||||
<HistoryPanel
|
||||
bind:this={historyPanel}
|
||||
history={commitHistory}
|
||||
loading={$commitHistoryQuery?.loading ?? true}
|
||||
fetchMore={fetchCommitHistory}
|
||||
loading={$commitHistoryQuery?.fetching ?? true}
|
||||
fetchMore={commitHistoryQuery.fetchMore}
|
||||
enableInlineDiffs={$page.route.id?.includes('/blob/') ?? false}
|
||||
/>
|
||||
{/key}
|
||||
|
||||
@ -1,8 +1,10 @@
|
||||
import { dirname } from 'path'
|
||||
|
||||
import { browser } from '$app/environment'
|
||||
import { getGraphQLClient } from '$lib/graphql'
|
||||
import { from } from 'rxjs'
|
||||
|
||||
import { getGraphQLClient, infinityQuery } from '$lib/graphql'
|
||||
import { fetchSidebarFileTree } from '$lib/repo/api/tree'
|
||||
import { resolveRevision } from '$lib/repo/utils'
|
||||
import { parseRepoRevision } from '$lib/shared'
|
||||
|
||||
import type { LayoutLoad } from './$types'
|
||||
@ -10,54 +12,71 @@ import { GitHistoryQuery } from './layout.gql'
|
||||
|
||||
const HISTORY_COMMITS_PER_PAGE = 20
|
||||
|
||||
// Signifies the path of the repository root
|
||||
const REPO_ROOT = '.'
|
||||
|
||||
let getRootPath = (_repo: string, path: string) => path
|
||||
|
||||
// We keep state in the browser to load the tree entries of the "highest" directory that was visited.
|
||||
if (browser) {
|
||||
const topTreePath: Record<string, string> = {}
|
||||
|
||||
getRootPath = (repo: string, path: string) => {
|
||||
const treePath = topTreePath[repo]
|
||||
if (treePath && (treePath === REPO_ROOT || path.startsWith(treePath))) {
|
||||
return topTreePath[repo]
|
||||
}
|
||||
return (topTreePath[repo] = path)
|
||||
}
|
||||
}
|
||||
|
||||
export const load: LayoutLoad = async ({ parent, params }) => {
|
||||
const client = await getGraphQLClient()
|
||||
export const load: LayoutLoad = ({ parent, params }) => {
|
||||
const client = getGraphQLClient()
|
||||
const { repoName, revision = '' } = parseRepoRevision(params.repo)
|
||||
const parentPath = getRootPath(repoName, params.path ? dirname(params.path) : REPO_ROOT)
|
||||
const parentPath = params.path ? dirname(params.path) : ''
|
||||
const resolvedRevision = resolveRevision(parent, revision)
|
||||
|
||||
// Fetches the most recent commits for current blob, tree or repo root
|
||||
const commitHistory = client.watchQuery({
|
||||
query: GitHistoryQuery,
|
||||
variables: {
|
||||
repoName,
|
||||
revspec: revision,
|
||||
filePath: params.path ?? '',
|
||||
first: HISTORY_COMMITS_PER_PAGE,
|
||||
afterCursor: null,
|
||||
},
|
||||
notifyOnNetworkStatusChange: true,
|
||||
})
|
||||
if (!client.readQuery({ query: GitHistoryQuery, variables: commitHistory.variables })) {
|
||||
// Eagerly fetch data if it isn't in the cache already. This ensures that the data is fetched
|
||||
// as soon as possible, not only after the layout subscribes to the query.
|
||||
commitHistory.refetch()
|
||||
}
|
||||
// Prefetch the sidebar file tree for the parent path.
|
||||
// (we don't want to wait for the file tree to execute the query)
|
||||
// This also used by the page to find the readme file
|
||||
const fileTree = resolvedRevision
|
||||
.then(revision =>
|
||||
fetchSidebarFileTree({
|
||||
repoName,
|
||||
revision,
|
||||
filePath: parentPath,
|
||||
})
|
||||
)
|
||||
.catch(() => null)
|
||||
|
||||
return {
|
||||
parentPath,
|
||||
commitHistory,
|
||||
fileTree: fetchSidebarFileTree({
|
||||
repoName,
|
||||
revision,
|
||||
filePath: parentPath,
|
||||
fileTree,
|
||||
// Fetches the most recent commits for current blob, tree or repo root
|
||||
commitHistory: infinityQuery({
|
||||
client,
|
||||
query: GitHistoryQuery,
|
||||
variables: from(
|
||||
resolvedRevision.then(revspec => ({
|
||||
repoName,
|
||||
revspec,
|
||||
filePath: params.path ?? '',
|
||||
first: HISTORY_COMMITS_PER_PAGE,
|
||||
afterCursor: null as string | null,
|
||||
}))
|
||||
),
|
||||
nextVariables: previousResult => {
|
||||
if (previousResult?.data?.repository?.commit?.ancestors?.pageInfo?.hasNextPage) {
|
||||
return {
|
||||
afterCursor: previousResult.data.repository.commit.ancestors.pageInfo.endCursor,
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
combine: (previousResult, nextResult) => {
|
||||
if (!nextResult.data?.repository?.commit) {
|
||||
return nextResult
|
||||
}
|
||||
const previousNodes = previousResult.data?.repository?.commit?.ancestors?.nodes ?? []
|
||||
const nextNodes = nextResult.data.repository?.commit?.ancestors.nodes ?? []
|
||||
return {
|
||||
...nextResult,
|
||||
data: {
|
||||
repository: {
|
||||
...nextResult.data.repository,
|
||||
commit: {
|
||||
...nextResult.data.repository.commit,
|
||||
ancestors: {
|
||||
...nextResult.data.repository.commit.ancestors,
|
||||
nodes: [...previousNodes, ...nextNodes],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
@ -12,26 +12,26 @@
|
||||
|
||||
export let data: PageData
|
||||
|
||||
const { value: readme, set: setReadme, pending: readmePending } = createPromiseStore<RepoPage_Readme | null>()
|
||||
$: setReadme(data.readme)
|
||||
const readme = createPromiseStore<RepoPage_Readme | null>()
|
||||
$: readme.set(data.readme)
|
||||
</script>
|
||||
|
||||
<h3 class="header">
|
||||
<div class="sidebar-button" class:hidden={$sidebarOpen}>
|
||||
<SidebarToggleButton />
|
||||
</div>
|
||||
{#if $readme}
|
||||
{#if $readme.value}
|
||||
<Icon svgPath={mdiFileDocumentOutline} />
|
||||
|
||||
{$readme.name}
|
||||
{:else if !$readmePending}
|
||||
{$readme.value.name}
|
||||
{:else if !$readme.pending}
|
||||
Description
|
||||
{/if}
|
||||
</h3>
|
||||
<div class="content">
|
||||
{#if $readme}
|
||||
<Readme file={$readme} />
|
||||
{:else if !$readmePending}
|
||||
{#if $readme.value}
|
||||
<Readme file={$readme.value} />
|
||||
{:else if !$readme.pending}
|
||||
{data.resolvedRevision.repo.description}
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
@ -1,36 +1,30 @@
|
||||
import { getGraphQLClient, mapOrThrow } from '$lib/graphql'
|
||||
import { findReadme } from '$lib/repo/tree'
|
||||
|
||||
import type { PageLoad } from './$types'
|
||||
import { RepoPageReadmeQuery } from './page.gql'
|
||||
|
||||
export const load: PageLoad = async ({ parent }) => {
|
||||
const { resolvedRevision, graphqlClient, fileTree } = await parent()
|
||||
|
||||
export const load: PageLoad = ({ parent }) => {
|
||||
return {
|
||||
readme: fileTree.then(result => {
|
||||
const readme = findReadme(result.root.entries)
|
||||
if (!readme) {
|
||||
return null
|
||||
}
|
||||
return graphqlClient
|
||||
.query({
|
||||
query: RepoPageReadmeQuery,
|
||||
variables: {
|
||||
repoID: resolvedRevision.repo.id,
|
||||
revspec: resolvedRevision.commitID,
|
||||
path: readme.path,
|
||||
},
|
||||
})
|
||||
readme: parent().then(({ resolvedRevision, fileTree, repoName }) =>
|
||||
fileTree
|
||||
.then(result => {
|
||||
if (result.data.node?.__typename !== 'Repository') {
|
||||
// This page will never render when the repository is not found.
|
||||
// The (validrev) data loader will render an error page instead.
|
||||
// Still, this error will show up as an unhandled promise rejection
|
||||
// in the console. We should find a better way to handle this.
|
||||
throw new Error('Expected Repository')
|
||||
if (!result) {
|
||||
return null
|
||||
}
|
||||
return result.data.node.commit?.blob ?? null
|
||||
const readme = findReadme(result.root.entries)
|
||||
if (!readme) {
|
||||
return null
|
||||
}
|
||||
return getGraphQLClient()
|
||||
.query(RepoPageReadmeQuery, {
|
||||
repoName,
|
||||
revspec: resolvedRevision.commitID,
|
||||
path: readme.path,
|
||||
})
|
||||
.then(mapOrThrow(result => result.data?.repository?.commit?.blob ?? null))
|
||||
})
|
||||
}),
|
||||
.catch(() => null)
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
<svelte:options immutable />
|
||||
|
||||
<script lang="ts">
|
||||
import { mdiCodeBracesBox, mdiFileCodeOutline } from '@mdi/js'
|
||||
import { mdiCodeBracesBox, mdiFileCodeOutline, mdiMapSearch } from '@mdi/js'
|
||||
|
||||
import { page } from '$app/stores'
|
||||
import CodeMirrorBlob from '$lib/CodeMirrorBlob.svelte'
|
||||
@ -20,13 +20,12 @@
|
||||
import { updateSearchParamsWithLineInformation, createBlobDataHandler } from '$lib/repo/blob'
|
||||
import { isErrorLike, type LineOrPositionOrRange } from '$lib/common'
|
||||
import { from } from 'rxjs'
|
||||
import { gql } from '$lib/graphql'
|
||||
import { toGraphQLResult } from '$lib/graphql'
|
||||
import { Alert } from '$lib/wildcard'
|
||||
|
||||
export let data: PageData
|
||||
|
||||
// We use the latest value here because we want to keep showing the old document while loading
|
||||
// the new one.
|
||||
const { loading, combinedBlobData, set: setBlobData } = createBlobDataHandler()
|
||||
const combinedBlobData = createBlobDataHandler()
|
||||
let selectedPosition: LineOrPositionOrRange | null = null
|
||||
|
||||
$: ({
|
||||
@ -35,19 +34,22 @@
|
||||
repoName,
|
||||
filePath,
|
||||
settings,
|
||||
graphqlClient,
|
||||
graphQLClient,
|
||||
} = data)
|
||||
$: setBlobData(data.blob, data.highlights)
|
||||
$: ({ blob, highlights = '' } = $combinedBlobData)
|
||||
$: combinedBlobData.set(data.blob, data.highlights)
|
||||
$: ({ blob, highlights, blobPending } = $combinedBlobData)
|
||||
$: formatted = !!blob?.richHTML
|
||||
$: fileNotFound = !blob && !blobPending
|
||||
$: fileLoadingError = (!blobPending && !blob && $combinedBlobData.blobError) || null
|
||||
$: showRaw = $page.url.searchParams.get('view') === 'raw'
|
||||
$: codeIntelAPI = createCodeIntelAPI({
|
||||
settings: setting => (isErrorLike(settings.final) ? undefined : settings.final?.[setting]),
|
||||
settings: setting => (isErrorLike(settings?.final) ? undefined : settings?.final?.[setting]),
|
||||
requestGraphQL(options) {
|
||||
return from(graphqlClient.query({ query: gql(options.request), variables: options.variables }))
|
||||
return from(graphQLClient.query(options.request, options.variables).then(toGraphQLResult))
|
||||
},
|
||||
})
|
||||
$: if (!$loading) {
|
||||
$: if (!blobPending) {
|
||||
// Update selected position as soon as blob is loaded
|
||||
selectedPosition = parseQueryAndHash($page.url.search, $page.url.hash)
|
||||
}
|
||||
</script>
|
||||
@ -73,7 +75,12 @@
|
||||
</svelte:fragment>
|
||||
</FileHeader>
|
||||
|
||||
<div class="content" class:loading={$loading} class:compare={!!data.compare}>
|
||||
<div class="content" class:loading={blobPending} class:compare={!!data.compare} class:fileNotFound>
|
||||
{#if !$combinedBlobData.highlightsPending && $combinedBlobData.highlightsError}
|
||||
<Alert variant="danger">
|
||||
Unable to load syntax highlighting: {$combinedBlobData.highlightsError.message}
|
||||
</Alert>
|
||||
{/if}
|
||||
{#if data.compare}
|
||||
{#await data.compare.diff}
|
||||
<LoadingSpinner />
|
||||
@ -107,19 +114,38 @@
|
||||
{codeIntelAPI}
|
||||
/>
|
||||
{/if}
|
||||
{:else if !blobPending}
|
||||
{#if fileLoadingError}
|
||||
<Alert variant="danger">
|
||||
Unable to load file data: {fileLoadingError.message}
|
||||
</Alert>
|
||||
{:else if fileNotFound}
|
||||
<div class="circle">
|
||||
<Icon svgPath={mdiMapSearch} size={80} />
|
||||
</div>
|
||||
<h2>File not found</h2>
|
||||
{/if}
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<style lang="scss">
|
||||
.content {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
overflow-x: auto;
|
||||
flex: 1;
|
||||
|
||||
&.compare {
|
||||
flex-direction: column;
|
||||
}
|
||||
|
||||
&.fileNotFound {
|
||||
background-color: var(--body-bg);
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
}
|
||||
}
|
||||
|
||||
.loading {
|
||||
filter: blur(1px);
|
||||
}
|
||||
@ -128,4 +154,11 @@
|
||||
padding: 1rem;
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
.circle {
|
||||
background-color: var(--color-bg-2);
|
||||
border-radius: 50%;
|
||||
padding: 1.5rem;
|
||||
margin: 1rem;
|
||||
}
|
||||
</style>
|
||||
|
||||
@ -1,61 +1,48 @@
|
||||
import { getGraphQLClient } from '$lib/graphql'
|
||||
import { getGraphQLClient, mapOrThrow } from '$lib/graphql'
|
||||
import { resolveRevision } from '$lib/repo/utils'
|
||||
import { parseRepoRevision } from '$lib/shared'
|
||||
|
||||
import type { PageLoad } from './$types'
|
||||
import { BlobDiffQuery, BlobPageQuery, BlobSyntaxHighlightQuery } from './page.gql'
|
||||
|
||||
export const load: PageLoad = async ({ parent, params, url }) => {
|
||||
export const load: PageLoad = ({ parent, params, url }) => {
|
||||
const revisionToCompare = url.searchParams.get('rev')
|
||||
const graphqlClient = await getGraphQLClient()
|
||||
const client = getGraphQLClient()
|
||||
const { repoName, revision = '' } = parseRepoRevision(params.repo)
|
||||
const resolvedRevision = await resolveRevision(parent, revision)
|
||||
const resolvedRevision = resolveRevision(parent, revision)
|
||||
|
||||
return {
|
||||
graphQLClient: client,
|
||||
filePath: params.path,
|
||||
blob: graphqlClient
|
||||
.query({
|
||||
query: BlobPageQuery,
|
||||
variables: {
|
||||
blob: resolvedRevision
|
||||
.then(resolvedRevision =>
|
||||
client.query(BlobPageQuery, {
|
||||
repoName,
|
||||
revspec: resolvedRevision,
|
||||
path: params.path,
|
||||
},
|
||||
})
|
||||
.then(result => {
|
||||
if (!result.data.repository?.commit) {
|
||||
throw new Error('Repository not found')
|
||||
}
|
||||
return result.data.repository.commit.blob
|
||||
}),
|
||||
highlights: graphqlClient
|
||||
.query({
|
||||
query: BlobSyntaxHighlightQuery,
|
||||
variables: {
|
||||
})
|
||||
)
|
||||
.then(mapOrThrow(result => result.data?.repository?.commit?.blob ?? null)),
|
||||
highlights: resolvedRevision
|
||||
.then(resolvedRevision =>
|
||||
client.query(BlobSyntaxHighlightQuery, {
|
||||
repoName,
|
||||
revspec: resolvedRevision,
|
||||
path: params.path,
|
||||
disableTimeout: false,
|
||||
},
|
||||
})
|
||||
.then(result => {
|
||||
return result.data.repository?.commit?.blob?.highlight.lsif
|
||||
}),
|
||||
})
|
||||
)
|
||||
.then(mapOrThrow(result => result.data?.repository?.commit?.blob?.highlight.lsif ?? '')),
|
||||
compare: revisionToCompare
|
||||
? {
|
||||
revisionToCompare,
|
||||
diff: graphqlClient
|
||||
.query({
|
||||
query: BlobDiffQuery,
|
||||
variables: {
|
||||
repoName,
|
||||
revspec: revisionToCompare,
|
||||
paths: [params.path],
|
||||
},
|
||||
diff: client
|
||||
.query(BlobDiffQuery, {
|
||||
repoName,
|
||||
revspec: revisionToCompare,
|
||||
paths: [params.path],
|
||||
})
|
||||
.then(result => {
|
||||
return result.data.repository?.commit?.diff.fileDiffs.nodes[0]
|
||||
}),
|
||||
.then(mapOrThrow(result => result.data?.repository?.commit?.diff.fileDiffs.nodes[0] ?? null)),
|
||||
}
|
||||
: null,
|
||||
}
|
||||
|
||||
@ -0,0 +1,111 @@
|
||||
import { expect, test } from '../../../../../../../testing/integration'
|
||||
|
||||
const repoName = 'github.com/sourcegraph/sourcegraph'
|
||||
const url = `/${repoName}/-/blob/src/index.js`
|
||||
|
||||
test.beforeEach(({ sg }) => {
|
||||
sg.fixture([
|
||||
{
|
||||
__typename: 'Repository',
|
||||
id: '1',
|
||||
name: repoName,
|
||||
mirrorInfo: {
|
||||
cloned: true,
|
||||
cloneInProgress: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
__typename: 'GitTree',
|
||||
name: 'src',
|
||||
path: 'src',
|
||||
canonicalURL: `/${repoName}/-/tree/src`,
|
||||
isDirectory: true,
|
||||
isRoot: false,
|
||||
entries: [
|
||||
{
|
||||
canonicalURL: `/${repoName}/-/blob/src/index.js`,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
__typename: 'GitBlob',
|
||||
name: 'index.js',
|
||||
path: 'src/index.js',
|
||||
canonicalURL: `/${repoName}/-/blob/src/index.js`,
|
||||
isDirectory: false,
|
||||
languages: ['JavaScript'],
|
||||
richHTML: '',
|
||||
content: '"file content"',
|
||||
},
|
||||
])
|
||||
|
||||
sg.mockOperations({
|
||||
ResolveRepoRevision: () => ({
|
||||
repositoryRedirect: {
|
||||
id: '1',
|
||||
},
|
||||
}),
|
||||
TreeEntries: ({}) => ({
|
||||
repository: {
|
||||
commit: {
|
||||
tree: {
|
||||
canonicalURL: `/${repoName}/-/tree/src`,
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
BlobPageQuery: ({}) => ({
|
||||
repository: {
|
||||
commit: {
|
||||
blob: {
|
||||
canonicalURL: `/${repoName}/-/blob/src/index.js`,
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
test('load file', async ({ page }) => {
|
||||
await page.goto(url)
|
||||
await expect(page.getByRole('heading', { name: 'index.js' })).toBeVisible()
|
||||
await expect(page.getByText(/"file content"/)).toBeVisible()
|
||||
})
|
||||
|
||||
test('non-existent file', async ({ page, sg }) => {
|
||||
sg.mockOperations({
|
||||
BlobPageQuery: ({}) => ({
|
||||
repository: {
|
||||
commit: {
|
||||
blob: null,
|
||||
},
|
||||
},
|
||||
}),
|
||||
})
|
||||
await page.goto(url)
|
||||
await expect(page.getByRole('heading', { name: 'index.js' })).toBeVisible()
|
||||
await expect(page.getByText('File not found')).toBeVisible()
|
||||
})
|
||||
|
||||
test('error loading file data', async ({ page, sg }) => {
|
||||
sg.mockOperations({
|
||||
BlobPageQuery: ({}) => {
|
||||
throw new Error('Blob error')
|
||||
},
|
||||
})
|
||||
await page.goto(url)
|
||||
await expect(page.getByRole('heading', { name: 'index.js' })).toBeVisible()
|
||||
await expect(page.getByText(/Blob error/).first()).toBeVisible()
|
||||
})
|
||||
|
||||
test('error loading highlights data', async ({ page, sg }) => {
|
||||
sg.mockOperations({
|
||||
BlobSyntaxHighlightQuery: ({}) => {
|
||||
throw new Error('Highlights error')
|
||||
},
|
||||
})
|
||||
await page.goto(url)
|
||||
await expect(page.getByRole('heading', { name: 'index.js' })).toBeVisible()
|
||||
await expect(page.getByText(/"file content"/)).toBeVisible()
|
||||
await expect(page.getByText(/Highlights error/).first()).toBeVisible()
|
||||
})
|
||||
@ -1,27 +1,23 @@
|
||||
<script lang="ts">
|
||||
import { mdiFileDocumentOutline, mdiFolderOutline } from '@mdi/js'
|
||||
import { mdiFileDocumentOutline, mdiFolderOutline, mdiMapSearch } from '@mdi/js'
|
||||
|
||||
import Icon from '$lib/Icon.svelte'
|
||||
import FileHeader from '$lib/repo/FileHeader.svelte'
|
||||
import Permalink from '$lib/repo/Permalink.svelte'
|
||||
import { createPromiseStore } from '$lib/utils'
|
||||
import type { TreePage_TreeWithCommitInfo, TreePage_Readme } from './page.gql'
|
||||
|
||||
import type { PageData } from './$types'
|
||||
import FileTable from '$lib/repo/FileTable.svelte'
|
||||
import Readme from '$lib/repo/Readme.svelte'
|
||||
import LoadingSpinner from '$lib/LoadingSpinner.svelte'
|
||||
import { Alert } from '$lib/wildcard'
|
||||
import type { TreeEntryWithCommitInfo } from '$lib/repo/FileTable.gql'
|
||||
|
||||
export let data: PageData
|
||||
|
||||
const { value: tree, set: setTree } = createPromiseStore<PageData['treeEntries']>()
|
||||
const { value: commitInfo, set: setCommitInfo } = createPromiseStore<Promise<TreePage_TreeWithCommitInfo | null>>()
|
||||
const { value: readme, set: setReadme } = createPromiseStore<Promise<TreePage_Readme | null>>()
|
||||
const treeEntriesWithCommitInfo = createPromiseStore<TreeEntryWithCommitInfo[]>()
|
||||
|
||||
$: setTree(data.treeEntries)
|
||||
$: setCommitInfo(data.commitInfo)
|
||||
$: setReadme(data.readme)
|
||||
$: entries = $tree?.entries ?? []
|
||||
$: entriesWithCommitInfo = $commitInfo?.entries ?? []
|
||||
$: treeEntriesWithCommitInfo.set(data.treeEntriesWithCommitInfo)
|
||||
</script>
|
||||
|
||||
<svelte:head>
|
||||
@ -36,17 +32,54 @@
|
||||
</FileHeader>
|
||||
|
||||
<div class="content">
|
||||
<FileTable revision={data.revision ?? ''} {entries} commitInfo={entriesWithCommitInfo} />
|
||||
{#if $readme}
|
||||
<h4 class="header">
|
||||
<Icon svgPath={mdiFileDocumentOutline} />
|
||||
|
||||
{$readme.name}
|
||||
</h4>
|
||||
<div class="readme">
|
||||
<Readme file={$readme} />
|
||||
</div>
|
||||
{/if}
|
||||
{#await data.treeEntries}
|
||||
<LoadingSpinner />
|
||||
{:then result}
|
||||
<!-- File path does not exist -->
|
||||
{#if result === null}
|
||||
<div class="error-wrapper">
|
||||
<div class="circle">
|
||||
<Icon svgPath={mdiMapSearch} size={80} />
|
||||
</div>
|
||||
<h2>Directory not found</h2>
|
||||
</div>
|
||||
{:else if result.entries.length === 0}
|
||||
<Alert variant="info">This directory is empty.</Alert>
|
||||
{:else}
|
||||
{#if $treeEntriesWithCommitInfo}
|
||||
{#if $treeEntriesWithCommitInfo.error}
|
||||
<Alert variant="danger">
|
||||
Unable to load commit information: {$treeEntriesWithCommitInfo.error.message}
|
||||
</Alert>
|
||||
{/if}
|
||||
{/if}
|
||||
<FileTable
|
||||
revision={data.revision ?? ''}
|
||||
entries={result.entries}
|
||||
commitInfo={$treeEntriesWithCommitInfo.value ?? []}
|
||||
/>
|
||||
{/if}
|
||||
{:catch error}
|
||||
<Alert variant="danger">
|
||||
Unable to load directory information: {error.message}
|
||||
</Alert>
|
||||
{/await}
|
||||
{#await data.readme then readme}
|
||||
{#if readme}
|
||||
<h4 class="header">
|
||||
<Icon svgPath={mdiFileDocumentOutline} />
|
||||
|
||||
{readme.name}
|
||||
</h4>
|
||||
<div class="readme">
|
||||
<Readme file={readme} />
|
||||
</div>
|
||||
{/if}
|
||||
{:catch error}
|
||||
<Alert variant="danger">
|
||||
Unable to load README: {error.message}
|
||||
</Alert>
|
||||
{/await}
|
||||
</div>
|
||||
|
||||
<style lang="scss">
|
||||
@ -68,4 +101,17 @@
|
||||
padding: 1rem;
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
.error-wrapper {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.circle {
|
||||
background-color: var(--color-bg-2);
|
||||
border-radius: 50%;
|
||||
padding: 1.5rem;
|
||||
margin: 1rem;
|
||||
}
|
||||
</style>
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import { getGraphQLClient } from '$lib/graphql'
|
||||
import { getGraphQLClient, mapOrThrow } from '$lib/graphql'
|
||||
import { fetchTreeEntries } from '$lib/repo/api/tree'
|
||||
import { findReadme } from '$lib/repo/tree'
|
||||
import { resolveRevision } from '$lib/repo/utils'
|
||||
@ -7,37 +7,45 @@ import { parseRepoRevision } from '$lib/shared'
|
||||
import type { PageLoad } from './$types'
|
||||
import { TreePageCommitInfoQuery, TreePageReadmeQuery } from './page.gql'
|
||||
|
||||
export const load: PageLoad = async ({ parent, params }) => {
|
||||
const client = await getGraphQLClient()
|
||||
export const load: PageLoad = ({ parent, params }) => {
|
||||
const client = getGraphQLClient()
|
||||
const { repoName, revision = '' } = parseRepoRevision(params.repo)
|
||||
const resolvedRevision = await resolveRevision(parent, revision)
|
||||
const resolvedRevision = resolveRevision(parent, revision)
|
||||
|
||||
const treeEntries = fetchTreeEntries({
|
||||
repoName,
|
||||
revision: resolvedRevision,
|
||||
filePath: params.path,
|
||||
first: null,
|
||||
}).then(
|
||||
commit => commit.tree,
|
||||
() => null
|
||||
)
|
||||
const treeEntries = resolvedRevision
|
||||
.then(resolvedRevision =>
|
||||
fetchTreeEntries({
|
||||
repoName,
|
||||
revision: resolvedRevision,
|
||||
filePath: params.path,
|
||||
first: null,
|
||||
})
|
||||
)
|
||||
.then(commit => commit.tree)
|
||||
|
||||
return {
|
||||
filePath: params.path,
|
||||
treeEntries,
|
||||
commitInfo: client
|
||||
.query({
|
||||
query: TreePageCommitInfoQuery,
|
||||
variables: {
|
||||
treeEntriesWithCommitInfo: resolvedRevision
|
||||
.then(resolvedRevision =>
|
||||
client.query(TreePageCommitInfoQuery, {
|
||||
repoName,
|
||||
revision: resolvedRevision,
|
||||
filePath: params.path,
|
||||
first: null,
|
||||
},
|
||||
})
|
||||
.then(result => {
|
||||
return result.data.repository?.commit?.tree ?? null
|
||||
}),
|
||||
})
|
||||
)
|
||||
.then(
|
||||
mapOrThrow(result => {
|
||||
if (!result.data?.repository) {
|
||||
throw new Error('Unable to fetch repository information')
|
||||
}
|
||||
if (!result.data.repository.commit) {
|
||||
throw new Error('Unable to fetch commit information')
|
||||
}
|
||||
return result.data.repository.commit.tree?.entries ?? []
|
||||
})
|
||||
),
|
||||
readme: treeEntries.then(result => {
|
||||
if (!result) {
|
||||
return null
|
||||
@ -46,18 +54,25 @@ export const load: PageLoad = async ({ parent, params }) => {
|
||||
if (!readme) {
|
||||
return null
|
||||
}
|
||||
return client
|
||||
.query({
|
||||
query: TreePageReadmeQuery,
|
||||
variables: {
|
||||
return resolvedRevision
|
||||
.then(resolvedRevision =>
|
||||
client.query(TreePageReadmeQuery, {
|
||||
repoName,
|
||||
revision: resolvedRevision,
|
||||
path: readme.path,
|
||||
},
|
||||
})
|
||||
.then(result => {
|
||||
return result.data.repository?.commit?.blob ?? null
|
||||
})
|
||||
})
|
||||
)
|
||||
.then(
|
||||
mapOrThrow(result => {
|
||||
if (!result.data?.repository) {
|
||||
throw new Error('Unable to fetch repository information')
|
||||
}
|
||||
if (!result.data.repository.commit) {
|
||||
throw new Error('Unable to fetch commit information')
|
||||
}
|
||||
return result.data.repository.commit.blob
|
||||
})
|
||||
)
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
@ -0,0 +1,189 @@
|
||||
import { expect, test } from '../../../../../../../testing/integration'
|
||||
|
||||
const repoName = 'github.com/sourcegraph/sourcegraph'
|
||||
const url = `/${repoName}/-/tree/src`
|
||||
|
||||
test.beforeEach(({ sg }) => {
|
||||
sg.fixture([
|
||||
{
|
||||
__typename: 'Repository',
|
||||
id: '1',
|
||||
name: repoName,
|
||||
mirrorInfo: {
|
||||
cloned: true,
|
||||
cloneInProgress: false,
|
||||
},
|
||||
},
|
||||
{
|
||||
__typename: 'GitTree',
|
||||
name: 'src',
|
||||
path: 'src',
|
||||
canonicalURL: `/${repoName}/-/tree/src`,
|
||||
isDirectory: true,
|
||||
isRoot: false,
|
||||
entries: [
|
||||
{
|
||||
canonicalURL: `/${repoName}/-/tree/src/notes`,
|
||||
},
|
||||
{
|
||||
canonicalURL: `/${repoName}/-/blob/src/index.js`,
|
||||
},
|
||||
{
|
||||
canonicalURL: `/${repoName}/-/blob/src/README.md`,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
__typename: 'GitTree',
|
||||
name: 'notes',
|
||||
path: 'src/notes',
|
||||
canonicalURL: `/${repoName}/-/tree/src/notes`,
|
||||
isDirectory: true,
|
||||
isRoot: false,
|
||||
},
|
||||
{
|
||||
__typename: 'GitBlob',
|
||||
name: 'index.js',
|
||||
path: 'src/index.js',
|
||||
canonicalURL: `/${repoName}/-/blob/src/index.js`,
|
||||
isDirectory: false,
|
||||
languages: ['JavaScript'],
|
||||
richHTML: '',
|
||||
content: 'var hello = "world"',
|
||||
},
|
||||
{
|
||||
__typename: 'GitBlob',
|
||||
canonicalURL: `/${repoName}/-/blob/src/README.md`,
|
||||
name: 'README.md',
|
||||
path: 'src/README.md',
|
||||
isDirectory: false,
|
||||
richHTML: 'Example readme content',
|
||||
},
|
||||
])
|
||||
|
||||
sg.mockOperations({
|
||||
ResolveRepoRevision: () => ({
|
||||
repositoryRedirect: {
|
||||
id: '1',
|
||||
},
|
||||
}),
|
||||
TreeEntries: ({}) => ({
|
||||
repository: {
|
||||
commit: {
|
||||
tree: {
|
||||
canonicalURL: `/${repoName}/-/tree/src`,
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
TreePageCommitInfoQuery: ({}) => ({
|
||||
repository: {
|
||||
commit: {
|
||||
tree: {
|
||||
canonicalURL: `/${repoName}/-/tree/src`,
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
TreePageReadmeQuery: ({ path }) => ({
|
||||
repository: {
|
||||
commit: {
|
||||
blob: {
|
||||
canonicalURL: `/${repoName}/-/blob/${path}`,
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
test('list files in a directory', async ({ page }) => {
|
||||
await page.goto(url)
|
||||
await expect(page.getByRole('heading', { name: 'src' })).toBeVisible()
|
||||
|
||||
await expect(page.getByRole('cell', { name: 'notes' })).toBeVisible()
|
||||
await expect(page.getByRole('cell', { name: 'index.js' })).toBeVisible()
|
||||
})
|
||||
|
||||
test('shows README if available', async ({ page, sg }) => {
|
||||
await page.goto(url)
|
||||
await expect(page.getByRole('heading', { name: 'README.md' })).toBeVisible()
|
||||
await expect(page.getByText('Example readme content')).toBeVisible()
|
||||
|
||||
// Not available
|
||||
|
||||
sg.mockOperations({
|
||||
TreeEntries: ({}) => ({
|
||||
repository: {
|
||||
commit: {
|
||||
tree: {
|
||||
canonicalURL: `/${repoName}/-/tree/src`,
|
||||
entries: [
|
||||
{
|
||||
canonicalURL: `/${repoName}/-/blob/src/index.js`,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
})
|
||||
await page.goto(url)
|
||||
await expect(page.getByRole('heading', { name: 'src' })).toBeVisible()
|
||||
await expect(page.getByRole('heading', { name: 'README.md' })).not.toBeVisible()
|
||||
})
|
||||
|
||||
test('empty tree', async ({ page, sg }) => {
|
||||
sg.mockOperations({
|
||||
TreeEntries: ({}) => ({
|
||||
repository: {
|
||||
commit: {
|
||||
tree: {
|
||||
canonicalURL: `/${repoName}/-/tree/src`,
|
||||
entries: [],
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
})
|
||||
await page.goto(url)
|
||||
await expect(page.getByRole('heading', { name: 'src' })).toBeVisible()
|
||||
await expect(page.getByText('This directory is empty')).toBeVisible()
|
||||
})
|
||||
|
||||
test('non-existent tree', async ({ page, sg }) => {
|
||||
sg.mockOperations({
|
||||
TreeEntries: ({}) => ({
|
||||
repository: {
|
||||
commit: {
|
||||
tree: null,
|
||||
},
|
||||
},
|
||||
}),
|
||||
})
|
||||
await page.goto(url)
|
||||
await expect(page.getByRole('heading', { name: 'src' })).toBeVisible()
|
||||
await expect(page.getByText('Directory not found')).toBeVisible()
|
||||
})
|
||||
|
||||
test('error loading tree data', async ({ page, sg }) => {
|
||||
sg.mockOperations({
|
||||
TreeEntries: ({}) => {
|
||||
throw new Error('Tree error')
|
||||
},
|
||||
})
|
||||
await page.goto(url)
|
||||
await expect(page.getByRole('heading', { name: 'src' })).toBeVisible()
|
||||
await expect(page.getByText(/Tree error/).first()).toBeVisible()
|
||||
})
|
||||
|
||||
test('error loading commit data', async ({ page, sg }) => {
|
||||
sg.mockOperations({
|
||||
TreePageCommitInfoQuery: ({}) => {
|
||||
throw new Error('Commit info error')
|
||||
},
|
||||
})
|
||||
await page.goto(url)
|
||||
await expect(page.getByRole('heading', { name: 'src' })).toBeVisible()
|
||||
await expect(page.getByText(/Commit info error/)).toBeVisible()
|
||||
})
|
||||
@ -5,7 +5,7 @@
|
||||
import { onMount } from 'svelte'
|
||||
|
||||
import { afterNavigate, goto } from '$app/navigation'
|
||||
import { getFileIconInfo } from '$lib/wildcard'
|
||||
import { getFileIconInfo, Alert } from '$lib/wildcard'
|
||||
import Icon from '$lib/Icon.svelte'
|
||||
import { type FileTreeProvider, NODE_LIMIT, type FileTreeNodeValue, type TreeEntryFields } from '$lib/repo/api/tree'
|
||||
import { getSidebarFileTreeStateForRepo } from '$lib/repo/stores'
|
||||
@ -13,6 +13,7 @@
|
||||
import { createForwardStore } from '$lib/utils'
|
||||
import { replaceRevisionInURL } from '$lib/web'
|
||||
|
||||
export let repoName: string
|
||||
export let treeProvider: FileTreeProvider
|
||||
export let selectedPath: string
|
||||
export let revision: string
|
||||
@ -81,17 +82,15 @@
|
||||
}
|
||||
|
||||
let treeView: TreeView<FileTreeNodeValue>
|
||||
let repoName = treeProvider.getRepoName()
|
||||
// Since context is only set once when the component is created
|
||||
// we need to dynamically sync any changes to the corresponding
|
||||
// file tree state store
|
||||
const treeState = createForwardStore(getSidebarFileTreeStateForRepo(treeProvider.getRepoName()))
|
||||
const treeState = createForwardStore(getSidebarFileTreeStateForRepo(repoName))
|
||||
// Propagating the tree state via context yielded better performance than passing
|
||||
// it via props.
|
||||
setTreeContext(treeState)
|
||||
|
||||
$: treeRoot = treeProvider.getRoot()
|
||||
$: repoName = treeProvider.getRepoName()
|
||||
$: treeState.updateStore(getSidebarFileTreeStateForRepo(repoName))
|
||||
// Update open and selected nodes when the path changes.
|
||||
$: markSelected(selectedPath)
|
||||
@ -137,14 +136,15 @@
|
||||
</a>
|
||||
{/if}
|
||||
</svelte:fragment>
|
||||
<Alert slot="error" let:error variant="danger">
|
||||
Unable to fetch file tree data: {error.message}
|
||||
</Alert>
|
||||
</TreeView>
|
||||
</div>
|
||||
|
||||
<style lang="scss">
|
||||
div {
|
||||
overflow: auto;
|
||||
// Don't scroll file/folder page when scrolling to the top or bottom of the file tree
|
||||
overscroll-behavior-y: contain;
|
||||
|
||||
:global(.treeitem.selectable) > :global(.label) {
|
||||
cursor: pointer;
|
||||
|
||||
@ -0,0 +1,141 @@
|
||||
import { basename } from 'path'
|
||||
|
||||
import { test, expect, vi, beforeEach, afterEach } from 'vitest'
|
||||
|
||||
import { createFileTreeStore, clearTopTreePathCache_testingOnly } from './fileTreeStore'
|
||||
|
||||
vi.mock('$app/environment', () => ({ browser: true }))
|
||||
|
||||
let store: ReturnType<typeof createFileTreeStore>
|
||||
let fetchFileTreeData: Parameters<typeof createFileTreeStore>[0]['fetchFileTreeData'] = async ({ filePath }) => ({
|
||||
// The exact values don't matter for this test
|
||||
root: {
|
||||
name: basename(filePath),
|
||||
path: filePath,
|
||||
isRoot: filePath === '',
|
||||
entries: [],
|
||||
isDirectory: true,
|
||||
canonicalURL: '',
|
||||
},
|
||||
values: [],
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
vi.useFakeTimers()
|
||||
store = createFileTreeStore({
|
||||
fetchFileTreeData,
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
vi.useRealTimers()
|
||||
clearTopTreePathCache_testingOnly()
|
||||
})
|
||||
|
||||
test('caches top-level tree paths in the browser', async () => {
|
||||
const subscriber = vi.fn()
|
||||
store.subscribe(subscriber)
|
||||
|
||||
// Start a specific file tree
|
||||
store.set({ repoName: 'repo', revision: 'rev', path: 'a/b/c/d' })
|
||||
await vi.runAllTimersAsync()
|
||||
|
||||
expect(subscriber.mock.lastCall?.[0]?.getRoot()).toMatchObject({ path: 'a/b/c/d' })
|
||||
|
||||
// Navigate up the tree -> a new provider should be created
|
||||
store.set({ repoName: 'repo', revision: 'rev', path: 'a/b' })
|
||||
await vi.runAllTimersAsync()
|
||||
|
||||
expect(subscriber.mock.lastCall?.[0]?.getRoot()).toMatchObject({ path: 'a/b' })
|
||||
|
||||
// Navigate down the tree -> the previous provider should be reused
|
||||
store.set({ repoName: 'repo', revision: 'rev', path: 'a/b/c' })
|
||||
await vi.runAllTimersAsync()
|
||||
|
||||
expect(subscriber.mock.lastCall?.[0]?.getRoot()).toMatchObject({ path: 'a/b' })
|
||||
})
|
||||
|
||||
test('per repository caching', async () => {
|
||||
const subscriber = vi.fn()
|
||||
store.subscribe(subscriber)
|
||||
|
||||
// Start a specific file tree
|
||||
store.set({ repoName: 'repo', revision: 'rev', path: 'a/b/c' })
|
||||
await vi.runAllTimersAsync()
|
||||
|
||||
expect(subscriber.mock.lastCall?.[0]?.getRoot()).toMatchObject({ path: 'a/b/c' })
|
||||
|
||||
// Navigate to different repo -> a new provider should be created
|
||||
store.set({ repoName: 'repo2', revision: 'rev', path: 'a/b/c/d/e' })
|
||||
await vi.runAllTimersAsync()
|
||||
|
||||
expect(subscriber.mock.lastCall?.[0]?.getRoot()).toMatchObject({ path: 'a/b/c/d/e' })
|
||||
|
||||
// Navigate back to the first repo -> the previous provider should be reused
|
||||
store.set({ repoName: 'repo', revision: 'rev', path: 'a/b/c/d' })
|
||||
await vi.runAllTimersAsync()
|
||||
|
||||
expect(subscriber.mock.lastCall?.[0]?.getRoot()).toMatchObject({ path: 'a/b/c' })
|
||||
})
|
||||
|
||||
test('per revision caching', async () => {
|
||||
const subscriber = vi.fn()
|
||||
store.subscribe(subscriber)
|
||||
|
||||
// Start a specific file tree
|
||||
store.set({ repoName: 'repo', revision: 'rev', path: 'a/b/c' })
|
||||
await vi.runAllTimersAsync()
|
||||
|
||||
expect(subscriber.mock.lastCall?.[0]?.getRoot()).toMatchObject({ path: 'a/b/c' })
|
||||
|
||||
// Navigate to different revision -> a new provider should be created
|
||||
store.set({ repoName: 'repo', revision: 'rev1', path: 'a/b/c/d/e' })
|
||||
await vi.runAllTimersAsync()
|
||||
|
||||
expect(subscriber.mock.lastCall?.[0]?.getRoot()).toMatchObject({ path: 'a/b/c/d/e' })
|
||||
|
||||
// Navigate back to the prevision revision -> the previous provider should be reused
|
||||
store.set({ repoName: 'repo', revision: 'rev', path: 'a/b/c/d' })
|
||||
await vi.runAllTimersAsync()
|
||||
|
||||
expect(subscriber.mock.lastCall?.[0]?.getRoot()).toMatchObject({ path: 'a/b/c' })
|
||||
})
|
||||
|
||||
test('error recovery', async () => {
|
||||
function getStore(): ReturnType<typeof createFileTreeStore> {
|
||||
return createFileTreeStore({
|
||||
fetchFileTreeData: vi.fn(fetchFileTreeData).mockRejectedValueOnce(new Error('Error fetching file tree')),
|
||||
})
|
||||
}
|
||||
let store = getStore()
|
||||
|
||||
const subscriber = vi.fn()
|
||||
store.subscribe(subscriber)
|
||||
|
||||
// Start a specific file tree
|
||||
store.set({ repoName: 'repo', revision: 'rev', path: 'a/b/c' })
|
||||
await vi.runAllTimersAsync()
|
||||
expect(subscriber.mock.lastCall?.[0]).toBeInstanceOf(Error)
|
||||
|
||||
// Navigate up the tree
|
||||
store.set({ repoName: 'repo', revision: 'rev', path: 'a/b' })
|
||||
await vi.runAllTimersAsync()
|
||||
expect(subscriber.mock.lastCall?.[0]).not.toBeInstanceOf(Error)
|
||||
expect(subscriber.mock.lastCall?.[0]?.getRoot()).toMatchObject({ path: 'a/b' })
|
||||
|
||||
// Reset to test error recovery when switching to a different path
|
||||
store = getStore()
|
||||
subscriber.mockClear()
|
||||
store.subscribe(subscriber)
|
||||
|
||||
// Start a specific file tree
|
||||
store.set({ repoName: 'repo', revision: 'rev', path: 'a/b/c' })
|
||||
await vi.runAllTimersAsync()
|
||||
expect(subscriber.mock.lastCall?.[0]).toBeInstanceOf(Error)
|
||||
|
||||
// Navigate to other path
|
||||
store.set({ repoName: 'repo', revision: 'rev', path: 'a/x/y' })
|
||||
await vi.runAllTimersAsync()
|
||||
expect(subscriber.mock.lastCall?.[0]).not.toBeInstanceOf(Error)
|
||||
expect(subscriber.mock.lastCall?.[0]?.getRoot()).toMatchObject({ path: 'a/x/y' })
|
||||
})
|
||||
@ -0,0 +1,103 @@
|
||||
import { from, of, Subject } from 'rxjs'
|
||||
import { catchError, distinctUntilChanged, map, switchMap } from 'rxjs/operators'
|
||||
import { readable, type Readable } from 'svelte/store'
|
||||
|
||||
import { browser } from '$app/environment'
|
||||
import { FileTreeProvider, type FileTreeData, type FileTreeLoader } from '$lib/repo/api/tree'
|
||||
|
||||
/**
|
||||
* Keeps track of the top-level directory that has been visited for each repository and revision.
|
||||
*/
|
||||
const topTreePathByRepoAndRevision = new Map<string, Map<string, string>>()
|
||||
|
||||
/**
|
||||
* Clears the cache of top-level directories that have been visited.
|
||||
* This should only be used in tests.
|
||||
*/
|
||||
export function clearTopTreePathCache_testingOnly(): void {
|
||||
topTreePathByRepoAndRevision.clear()
|
||||
}
|
||||
|
||||
/**
|
||||
* Manages the state of the sidebar file tree.
|
||||
*
|
||||
* @remarks
|
||||
* This store ensures that we always show the most top-level directory that has been visited so far.
|
||||
*/
|
||||
interface FileTreeStore extends Readable<FileTreeProvider | Error | null> {
|
||||
/**
|
||||
* Sets the current repo, revision, and path for the file tree.
|
||||
*/
|
||||
set(args: { repoName: string; revision: string; path: string }): void
|
||||
}
|
||||
|
||||
interface FileTreeStoreOptions {
|
||||
/**
|
||||
* Fetches the file tree for the given repo, revision, and path.
|
||||
*/
|
||||
fetchFileTreeData: (args: { repoName: string; revision: string; filePath: string }) => Promise<FileTreeData>
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper function for managing the sidebar file tree state.
|
||||
* Specifically it ensures that we always show the most top-level directory
|
||||
* that has been visited so far.
|
||||
*/
|
||||
export function createFileTreeStore(options: FileTreeStoreOptions): FileTreeStore {
|
||||
const repoRevPath = new Subject<{ repoName: string; revision: string; path: string }>()
|
||||
const { subscribe } = readable<FileTreeProvider | Error | null>(null, set => {
|
||||
const subscription = repoRevPath
|
||||
.pipe(
|
||||
// We need to create a new file tree provider in the following cases:
|
||||
// - The repo changes
|
||||
// - The revision changes
|
||||
// - The path is not a subdirectory of the top path
|
||||
distinctUntilChanged(
|
||||
({ repoName, revision }, { repoName: nextRepoName, revision: nextRevision, path: nextPath }) => {
|
||||
if (browser && repoName === nextRepoName && revision === nextRevision) {
|
||||
const topPath = topTreePathByRepoAndRevision.get(repoName)?.get(revision)
|
||||
return topPath ? topPath === '.' || nextPath.startsWith(topPath) : false
|
||||
}
|
||||
return false
|
||||
}
|
||||
),
|
||||
// If the path is not a subdirectory of the top path, we need to update the top path, otherwise we use the top path
|
||||
map(({ repoName, revision, path }) => {
|
||||
if (browser) {
|
||||
const topPath = topTreePathByRepoAndRevision.get(repoName)?.get(revision)
|
||||
if (topPath && (topPath === '.' || path.startsWith(topPath))) {
|
||||
return { repoName, revision, path: topPath }
|
||||
} else {
|
||||
// new path is new top path
|
||||
const topPaths = topTreePathByRepoAndRevision.get(repoName) || new Map()
|
||||
topPaths.set(revision, path)
|
||||
topTreePathByRepoAndRevision.set(repoName, topPaths)
|
||||
}
|
||||
}
|
||||
return { repoName, revision, path }
|
||||
}),
|
||||
// Fetch the file tree for the given repo, revision, and path
|
||||
switchMap(({ repoName, revision, path }) => {
|
||||
const loader: FileTreeLoader = args =>
|
||||
options.fetchFileTreeData({ repoName, revision, filePath: args.filePath })
|
||||
return from(loader({ filePath: path })).pipe(
|
||||
map(data => new FileTreeProvider({ ...data, loader })),
|
||||
// If an observable errors the subscription is closed, so we need to catch
|
||||
// the error here to ensure that the (outer) subscription stays open
|
||||
catchError(error => of(error))
|
||||
)
|
||||
})
|
||||
)
|
||||
.subscribe(set)
|
||||
return () => {
|
||||
subscription.unsubscribe()
|
||||
}
|
||||
})
|
||||
|
||||
return {
|
||||
subscribe,
|
||||
set(args) {
|
||||
repoRevPath.next(args)
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -4,12 +4,6 @@ query GitHistoryQuery($repoName: String!, $revspec: String!, $first: Int, $after
|
||||
commit(rev: $revspec) {
|
||||
id
|
||||
ancestors(first: $first, path: $filePath, afterCursor: $afterCursor) {
|
||||
# This is a bit hacky, but by fetching all the data needed by both
|
||||
# the history panel and the commits page we ensure that our custom
|
||||
# Apollo infinitiy scroll cache handling for this field works
|
||||
# correctly. Eventually we should revsisit the use of infinity scroll
|
||||
# on the commits page.
|
||||
...CommitsPage_GitCommitConnection
|
||||
...GitHistory_HistoryConnection
|
||||
}
|
||||
}
|
||||
|
||||
@ -2,16 +2,14 @@ fragment RepoPage_ResolvedRevision on Repository {
|
||||
description
|
||||
}
|
||||
|
||||
query RepoPageReadmeQuery($repoID: ID!, $revspec: String!, $path: String!) {
|
||||
node(id: $repoID) {
|
||||
... on Repository {
|
||||
query RepoPageReadmeQuery($repoName: String!, $revspec: String!, $path: String!) {
|
||||
repository(name: $repoName) {
|
||||
id
|
||||
commit(rev: $revspec) {
|
||||
id
|
||||
commit(rev: $revspec) {
|
||||
id
|
||||
blob(path: $path) {
|
||||
canonicalURL # key field
|
||||
...RepoPage_Readme
|
||||
}
|
||||
blob(path: $path) {
|
||||
canonicalURL # key field
|
||||
...RepoPage_Readme
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -25,6 +25,9 @@ test.beforeEach(({ sg }) => {
|
||||
canonicalURL: `/${repoName}/-/tree/`,
|
||||
isDirectory: true,
|
||||
entries: [
|
||||
{
|
||||
canonicalURL: `/${repoName}/-/tree/src`,
|
||||
},
|
||||
{
|
||||
canonicalURL: `/${repoName}/-/blob/index.js`,
|
||||
},
|
||||
@ -34,6 +37,19 @@ test.beforeEach(({ sg }) => {
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
__typename: 'GitTree',
|
||||
path: 'src',
|
||||
name: 'src',
|
||||
canonicalURL: `/${repoName}/-/tree/src`,
|
||||
isDirectory: true,
|
||||
isRoot: false,
|
||||
entries: [
|
||||
{
|
||||
canonicalURL: `/${repoName}/-/blob/src/notes.txt`,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
__typename: 'GitBlob',
|
||||
path: 'index.js',
|
||||
@ -52,10 +68,19 @@ test.beforeEach(({ sg }) => {
|
||||
isDirectory: false,
|
||||
richHTML: 'Example readme content',
|
||||
},
|
||||
{
|
||||
__typename: 'GitBlob',
|
||||
canonicalURL: `/${repoName}/-/blob/src/notes.txt`,
|
||||
name: 'notes.txt',
|
||||
path: 'src/notes.txt',
|
||||
isDirectory: false,
|
||||
content: 'Some notes',
|
||||
richHTML: '',
|
||||
},
|
||||
])
|
||||
|
||||
sg.mockOperations({
|
||||
ResolveRepoRevison: () => ({
|
||||
ResolveRepoRevision: () => ({
|
||||
repositoryRedirect: {
|
||||
id: '1',
|
||||
},
|
||||
@ -65,9 +90,9 @@ test.beforeEach(({ sg }) => {
|
||||
id: '1',
|
||||
},
|
||||
}),
|
||||
RepoPageReadmeQuery: ({ repoID, path }) => ({
|
||||
node: {
|
||||
id: repoID,
|
||||
RepoPageReadmeQuery: ({ path }) => ({
|
||||
repository: {
|
||||
id: '1',
|
||||
commit: {
|
||||
blob: {
|
||||
canonicalURL: `/${repoName}/-/blob/${path}`,
|
||||
@ -78,30 +103,86 @@ test.beforeEach(({ sg }) => {
|
||||
})
|
||||
})
|
||||
|
||||
test('file sidebar', async ({ page }) => {
|
||||
const readmeEntry = page.getByRole('treeitem', { name: 'README.md' })
|
||||
test.describe('file sidebar', () => {
|
||||
test('basic functionality', async ({ page }) => {
|
||||
const readmeEntry = page.getByRole('treeitem', { name: 'README.md' })
|
||||
|
||||
await page.goto(`/${repoName}`)
|
||||
await expect(readmeEntry).toBeVisible()
|
||||
await page.goto(`/${repoName}`)
|
||||
await expect(readmeEntry).toBeVisible()
|
||||
|
||||
// Close file sidebar
|
||||
await page.getByRole('button', { name: 'Hide sidebar' }).click()
|
||||
await expect(readmeEntry).toBeHidden()
|
||||
// Close file sidebar
|
||||
await page.getByRole('button', { name: 'Hide sidebar' }).click()
|
||||
await expect(readmeEntry).toBeHidden()
|
||||
|
||||
// Open sidebar
|
||||
await page.getByRole('button', { name: 'Show sidebar' }).click()
|
||||
// Open sidebar
|
||||
await page.getByRole('button', { name: 'Show sidebar' }).click()
|
||||
|
||||
// Go to a file
|
||||
await readmeEntry.click()
|
||||
await expect(page).toHaveURL(`/${repoName}/-/blob/README.md`)
|
||||
// Verify that entry is selected
|
||||
await expect(page.getByRole('treeitem', { name: 'README.md', selected: true })).toBeVisible()
|
||||
// Go to a file
|
||||
await readmeEntry.click()
|
||||
await expect(page).toHaveURL(`/${repoName}/-/blob/README.md`)
|
||||
// Verify that entry is selected
|
||||
await expect(page.getByRole('treeitem', { name: 'README.md', selected: true })).toBeVisible()
|
||||
|
||||
// Go other file
|
||||
await page.getByRole('treeitem', { name: 'index.js' }).click()
|
||||
await expect(page).toHaveURL(`/${repoName}/-/blob/index.js`)
|
||||
// Verify that entry is selected
|
||||
await expect(page.getByRole('treeitem', { name: 'index.js', selected: true })).toBeVisible()
|
||||
// Go other file
|
||||
await page.getByRole('treeitem', { name: 'index.js' }).click()
|
||||
await expect(page).toHaveURL(`/${repoName}/-/blob/index.js`)
|
||||
// Verify that entry is selected
|
||||
await expect(page.getByRole('treeitem', { name: 'index.js', selected: true })).toBeVisible()
|
||||
})
|
||||
|
||||
test('error handling root', async ({ page, sg }) => {
|
||||
sg.mockOperations({
|
||||
TreeEntries: () => {
|
||||
throw new Error('Sidebar error')
|
||||
},
|
||||
})
|
||||
|
||||
await page.goto(`/${repoName}`)
|
||||
await expect(page.getByText(/Sidebar error/)).toBeVisible()
|
||||
})
|
||||
|
||||
test('error handling children', async ({ page, sg }) => {
|
||||
await page.goto(`/${repoName}`)
|
||||
|
||||
const treeItem = page.getByRole('treeitem', { name: 'src' })
|
||||
// For some reason we need to wait for the tree to be rendered
|
||||
// before we mock the GraphQL response to throw an error
|
||||
await expect(treeItem).toBeVisible()
|
||||
|
||||
sg.mockOperations({
|
||||
TreeEntries: () => {
|
||||
throw new Error('Child error')
|
||||
},
|
||||
})
|
||||
// Clicks the toggle button next to the tree entry, to expand the tree
|
||||
// and _not_ follow the link
|
||||
await treeItem.getByRole('button').click()
|
||||
await expect(page.getByText(/Child error/)).toBeVisible()
|
||||
})
|
||||
|
||||
test('error handling non-existing directory -> root', async ({ page, sg }) => {
|
||||
// Here we expect the sidebar to show an error message, and after navigigating
|
||||
// to an existing directory, the directory contents
|
||||
sg.mockOperations({
|
||||
TreeEntries: () => {
|
||||
throw new Error('Sidebar error')
|
||||
},
|
||||
})
|
||||
|
||||
await page.goto(`/${repoName}/-/tree/non-existing-directory`)
|
||||
await expect(page.getByText(/Sidebar error/).first()).toBeVisible()
|
||||
|
||||
sg.mockOperations({
|
||||
TreeEntries: () => ({
|
||||
repository: {
|
||||
id: '1',
|
||||
},
|
||||
}),
|
||||
})
|
||||
|
||||
await page.goto(`/${repoName}`)
|
||||
await expect(page.getByRole('treeitem', { name: 'README.md' })).toBeVisible()
|
||||
})
|
||||
})
|
||||
|
||||
test('repo readme', async ({ page }) => {
|
||||
|
||||
@ -1,46 +1,47 @@
|
||||
<script lang="ts">
|
||||
import LoadingSpinner from '$lib/LoadingSpinner.svelte'
|
||||
import GitReference from '$lib/repo/GitReference.svelte'
|
||||
import { createPromiseStore } from '$lib/utils'
|
||||
import type { GitBranchesOverview } from './page.gql'
|
||||
|
||||
import type { PageData } from './$types'
|
||||
import { Alert } from '$lib/wildcard'
|
||||
|
||||
export let data: PageData
|
||||
|
||||
const { pending, value: branches, set } = createPromiseStore<GitBranchesOverview>()
|
||||
$: set(data.overview)
|
||||
$: defaultBranch = $branches?.defaultBranch
|
||||
$: activeBranches = $branches?.branches.nodes.filter(branch => branch.id !== defaultBranch?.id)
|
||||
</script>
|
||||
|
||||
<svelte:head>
|
||||
<title>Branches - {data.displayRepoName} - Sourcegraph</title>
|
||||
</svelte:head>
|
||||
|
||||
{#if $pending}
|
||||
{#await data.overview}
|
||||
<LoadingSpinner />
|
||||
{/if}
|
||||
{:then result}
|
||||
{@const activeBranches = result.branches.nodes.filter(branch => branch.id !== result.defaultBranch?.id)}
|
||||
|
||||
{#if defaultBranch}
|
||||
<table class="mb-3">
|
||||
<thead><tr><th colspan="3">Default branch</th></tr></thead>
|
||||
<tbody>
|
||||
<GitReference ref={defaultBranch} />
|
||||
</tbody>
|
||||
</table>
|
||||
{/if}
|
||||
{#if result.defaultBranch}
|
||||
<table class="mb-3">
|
||||
<thead><tr><th colspan="3">Default branch</th></tr></thead>
|
||||
<tbody>
|
||||
<GitReference ref={result.defaultBranch} />
|
||||
</tbody>
|
||||
</table>
|
||||
{/if}
|
||||
|
||||
{#if activeBranches && activeBranches.length > 0}
|
||||
<table>
|
||||
<thead><tr><th colspan="3">Active branches</th></tr></thead>
|
||||
<tbody>
|
||||
{#each activeBranches as branch (branch.id)}
|
||||
<GitReference ref={branch} />
|
||||
{/each}
|
||||
</tbody>
|
||||
</table>
|
||||
{/if}
|
||||
{#if activeBranches.length > 0}
|
||||
<table>
|
||||
<thead><tr><th colspan="3">Active branches</th></tr></thead>
|
||||
<tbody>
|
||||
{#each activeBranches as branch (branch.id)}
|
||||
<GitReference ref={branch} />
|
||||
{/each}
|
||||
</tbody>
|
||||
</table>
|
||||
{/if}
|
||||
{:catch error}
|
||||
<Alert variant="danger">
|
||||
Unable to fetch branches:
|
||||
{error.message}
|
||||
</Alert>
|
||||
{/await}
|
||||
|
||||
<style lang="scss">
|
||||
table {
|
||||
|
||||
@ -1,32 +1,29 @@
|
||||
import { getGraphQLClient } from '$lib/graphql'
|
||||
import { getGraphQLClient, mapOrThrow } from '$lib/graphql'
|
||||
import { parseRepoRevision } from '$lib/shared'
|
||||
|
||||
import type { PageLoad } from './$types'
|
||||
import { BranchesPage_OverviewQuery } from './page.gql'
|
||||
|
||||
export const load: PageLoad = async ({ params }) => {
|
||||
const client = await getGraphQLClient()
|
||||
export const load: PageLoad = ({ params }) => {
|
||||
const client = getGraphQLClient()
|
||||
const { repoName } = parseRepoRevision(params.repo)
|
||||
|
||||
return {
|
||||
overview: client
|
||||
.query({
|
||||
query: BranchesPage_OverviewQuery,
|
||||
variables: {
|
||||
first: 20,
|
||||
repoName,
|
||||
withBehindAhead: true,
|
||||
},
|
||||
.query(BranchesPage_OverviewQuery, {
|
||||
first: 20,
|
||||
repoName,
|
||||
withBehindAhead: true,
|
||||
})
|
||||
.then(result => {
|
||||
if (!result.data.repository) {
|
||||
// This page will never render when the repository is not found.
|
||||
// The (validrev) data loader will render an error page instead.
|
||||
// Still, this error will show up as an unhandled promise rejection
|
||||
// in the console. We should find a better way to handle this.
|
||||
throw new Error('Expected Repository')
|
||||
}
|
||||
return result.data.repository
|
||||
}),
|
||||
.then(
|
||||
mapOrThrow(result => {
|
||||
if (!result.data?.repository) {
|
||||
// This page will never render when the repository is not found.
|
||||
// The (validrev) data loader will render an error page instead.
|
||||
throw new Error('Unable to load repository data.')
|
||||
}
|
||||
return result.data.repository
|
||||
})
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,39 +1,36 @@
|
||||
<script lang="ts">
|
||||
import LoadingSpinner from '$lib/LoadingSpinner.svelte'
|
||||
import GitReference from '$lib/repo/GitReference.svelte'
|
||||
import { createPromiseStore } from '$lib/utils'
|
||||
import type { GitBranchesConnection } from './page.gql'
|
||||
|
||||
import type { PageData } from './$types'
|
||||
import { Alert } from '$lib/wildcard'
|
||||
|
||||
export let data: PageData
|
||||
|
||||
const { pending, value: connection, set } = createPromiseStore<GitBranchesConnection>()
|
||||
$: set(data.branches)
|
||||
$: nodes = $connection?.nodes
|
||||
$: totalCount = $connection?.totalCount
|
||||
</script>
|
||||
|
||||
<svelte:head>
|
||||
<title>All branches - {data.displayRepoName} - Sourcegraph</title>
|
||||
</svelte:head>
|
||||
|
||||
{#if $pending}
|
||||
{#await data.branches}
|
||||
<LoadingSpinner />
|
||||
{:else if nodes}
|
||||
{:then branches}
|
||||
<!-- TODO: Search input to filter branches by name -->
|
||||
<!-- TODO: Pagination -->
|
||||
<table>
|
||||
<tbody>
|
||||
{#each nodes as node (node.id)}
|
||||
{#each branches.nodes as node (node.id)}
|
||||
<GitReference ref={node} />
|
||||
{/each}
|
||||
</tbody>
|
||||
</table>
|
||||
{#if totalCount !== null}
|
||||
<small class="text-muted">{totalCount} branches total</small>
|
||||
{/if}
|
||||
{/if}
|
||||
<small class="text-muted">{branches.totalCount} branches total</small>
|
||||
{:catch error}
|
||||
<Alert variant="danger">
|
||||
Unable to fetch branches information:
|
||||
{error.message}
|
||||
</Alert>
|
||||
{/await}
|
||||
|
||||
<style lang="scss">
|
||||
table {
|
||||
|
||||
@ -1,32 +1,29 @@
|
||||
import { getGraphQLClient } from '$lib/graphql'
|
||||
import { getGraphQLClient, mapOrThrow } from '$lib/graphql'
|
||||
import { parseRepoRevision } from '$lib/shared'
|
||||
|
||||
import type { PageLoad } from './$types'
|
||||
import { AllBranchesPage_BranchesQuery } from './page.gql'
|
||||
|
||||
export const load: PageLoad = async ({ params }) => {
|
||||
const client = await getGraphQLClient()
|
||||
export const load: PageLoad = ({ params }) => {
|
||||
const client = getGraphQLClient()
|
||||
const { repoName } = parseRepoRevision(params.repo)
|
||||
|
||||
return {
|
||||
branches: client
|
||||
.query({
|
||||
query: AllBranchesPage_BranchesQuery,
|
||||
variables: {
|
||||
repoName,
|
||||
first: 20,
|
||||
withBehindAhead: true,
|
||||
},
|
||||
.query(AllBranchesPage_BranchesQuery, {
|
||||
repoName,
|
||||
first: 20,
|
||||
withBehindAhead: true,
|
||||
})
|
||||
.then(result => {
|
||||
if (!result.data.repository) {
|
||||
// This page will never render when the repository is not found.
|
||||
// The (validrev) data loader will render an error page instead.
|
||||
// Still, this error will show up as an unhandled promise rejection
|
||||
// in the console. We should find a better way to handle this.
|
||||
throw new Error('Expected Repository')
|
||||
}
|
||||
return result.data.repository.branches
|
||||
}),
|
||||
.then(
|
||||
mapOrThrow(result => {
|
||||
if (!result.data?.repository) {
|
||||
// This page will never render when the repository is not found.
|
||||
// The (validrev) data loader will render an error page instead.
|
||||
throw new Error('Expected Repository')
|
||||
}
|
||||
return result.data.repository.branches
|
||||
})
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
@ -0,0 +1,42 @@
|
||||
import { expect, test } from '../../../../../../testing/integration'
|
||||
|
||||
const repoName = 'github.com/sourcegraph/sourcegraph'
|
||||
const url = `/${repoName}/-/branches/all`
|
||||
|
||||
test.beforeEach(async ({ sg }) => {
|
||||
sg.mockOperations({
|
||||
ResolveRepoRevision: () => ({
|
||||
repositoryRedirect: {
|
||||
__typename: 'Repository',
|
||||
mirrorInfo: {
|
||||
cloned: true,
|
||||
cloneInProgress: false,
|
||||
},
|
||||
},
|
||||
}),
|
||||
AllBranchesPage_BranchesQuery: () => ({
|
||||
repository: {
|
||||
branches: {
|
||||
nodes: [{ displayName: 'main' }, { displayName: 'feature/branch' }],
|
||||
},
|
||||
},
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
test('list branches', async ({ page }) => {
|
||||
await page.goto(url)
|
||||
|
||||
await expect(page.getByRole('link', { name: 'main' })).toBeVisible()
|
||||
await expect(page.getByRole('link', { name: 'feature/branch' })).toBeVisible()
|
||||
})
|
||||
|
||||
test('error loading branches', async ({ page, sg }) => {
|
||||
sg.mockOperations({
|
||||
AllBranchesPage_BranchesQuery: () => {
|
||||
throw new Error('Test error')
|
||||
},
|
||||
})
|
||||
await page.goto(url)
|
||||
await expect(page.getByText(/Test error/)).toBeVisible()
|
||||
})
|
||||
@ -0,0 +1,46 @@
|
||||
import { expect, test } from '../../../../../testing/integration'
|
||||
|
||||
const repoName = 'github.com/sourcegraph/sourcegraph'
|
||||
const url = `/${repoName}/-/branches`
|
||||
|
||||
test.beforeEach(async ({ sg }) => {
|
||||
sg.mockOperations({
|
||||
ResolveRepoRevision: () => ({
|
||||
repositoryRedirect: {
|
||||
__typename: 'Repository',
|
||||
mirrorInfo: {
|
||||
cloned: true,
|
||||
cloneInProgress: false,
|
||||
},
|
||||
},
|
||||
}),
|
||||
BranchesPage_OverviewQuery: () => ({
|
||||
repository: {
|
||||
defaultBranch: {
|
||||
id: '1',
|
||||
displayName: 'main',
|
||||
},
|
||||
branches: {
|
||||
nodes: [{ displayName: 'main', id: '1' }, { displayName: 'feature/branch' }],
|
||||
},
|
||||
},
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
test('list branches', async ({ page }) => {
|
||||
await page.goto(url)
|
||||
|
||||
await expect(page.getByRole('link', { name: 'main' })).toBeVisible()
|
||||
await expect(page.getByRole('link', { name: 'feature/branch' })).toBeVisible()
|
||||
})
|
||||
|
||||
test('error loading branches', async ({ page, sg }) => {
|
||||
sg.mockOperations({
|
||||
BranchesPage_OverviewQuery: () => {
|
||||
throw new Error('Test error')
|
||||
},
|
||||
})
|
||||
await page.goto(url)
|
||||
await expect(page.getByText(/Test error/)).toBeVisible()
|
||||
})
|
||||
@ -5,39 +5,46 @@
|
||||
import type { PageData, Snapshot } from './$types'
|
||||
import FileDiff from '$lib/repo/FileDiff.svelte'
|
||||
import Scroller, { type Capture as ScrollerCapture } from '$lib/Scroller.svelte'
|
||||
import { get } from 'svelte/store'
|
||||
import { navigating } from '$app/stores'
|
||||
import type { CommitPage_DiffConnection } from './page.gql'
|
||||
import { Alert } from '$lib/wildcard'
|
||||
|
||||
interface Capture {
|
||||
scroll: ScrollerCapture
|
||||
diffCount: number
|
||||
expandedDiffs: Array<[number, boolean]>
|
||||
}
|
||||
|
||||
export let data: PageData
|
||||
|
||||
export const snapshot: Snapshot<{ scroll: ScrollerCapture; expandedDiffs: Array<[number, boolean]> }> = {
|
||||
export const snapshot: Snapshot<Capture> = {
|
||||
capture: () => ({
|
||||
scroll: scroller.capture(),
|
||||
diffCount: diffs?.nodes.length ?? 0,
|
||||
expandedDiffs: Array.from(expandedDiffs.entries()),
|
||||
}),
|
||||
restore: capture => {
|
||||
scroller.restore(capture.scroll)
|
||||
restore: async capture => {
|
||||
expandedDiffs = new Map(capture.expandedDiffs)
|
||||
if (capture?.diffCount !== undefined && get(navigating)?.type === 'popstate') {
|
||||
await data.diff?.restore(result => {
|
||||
const count = result.data?.repository?.comparison.fileDiffs.nodes.length
|
||||
return !!count && count < capture.diffCount
|
||||
})
|
||||
}
|
||||
scroller.restore(capture.scroll)
|
||||
},
|
||||
}
|
||||
|
||||
const diff = data.diff
|
||||
let scroller: Scroller
|
||||
let loading = true
|
||||
let expandedDiffs = new Map<number, boolean>()
|
||||
let diffs: CommitPage_DiffConnection | null = null
|
||||
|
||||
$: fileDiffConnection = $diff?.data.repository?.comparison.fileDiffs ?? null
|
||||
$: if ($diff?.data.repository) {
|
||||
loading = false
|
||||
}
|
||||
|
||||
function fetchMore() {
|
||||
if (fileDiffConnection?.pageInfo.hasNextPage) {
|
||||
loading = true
|
||||
diff?.fetchMore({
|
||||
variables: {
|
||||
after: fileDiffConnection.pageInfo.endCursor,
|
||||
},
|
||||
})
|
||||
}
|
||||
$: diffQuery = data.diff
|
||||
// We conditionally check for the ancestors field to be able to show
|
||||
// previously loaded commits when an error occurs while fetching more commits.
|
||||
$: if ($diffQuery?.data?.repository) {
|
||||
diffs = $diffQuery.data.repository.comparison.fileDiffs
|
||||
}
|
||||
</script>
|
||||
|
||||
@ -47,7 +54,7 @@
|
||||
|
||||
<section>
|
||||
{#if data.commit}
|
||||
<Scroller bind:this={scroller} margin={600} on:more={fetchMore}>
|
||||
<Scroller bind:this={scroller} margin={600} on:more={data.diff?.fetchMore}>
|
||||
<div class="header">
|
||||
<div class="info"><Commit commit={data.commit} alwaysExpanded /></div>
|
||||
<div>
|
||||
@ -60,9 +67,9 @@
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
{#if fileDiffConnection}
|
||||
{#if !$diffQuery?.restoring && diffs}
|
||||
<ul>
|
||||
{#each fileDiffConnection.nodes as node, index}
|
||||
{#each diffs.nodes as node, index}
|
||||
<li>
|
||||
<FileDiff
|
||||
fileDiff={node}
|
||||
@ -73,8 +80,14 @@
|
||||
{/each}
|
||||
</ul>
|
||||
{/if}
|
||||
{#if loading}
|
||||
{#if $diffQuery?.fetching || $diffQuery?.restoring}
|
||||
<LoadingSpinner />
|
||||
{:else if $diffQuery?.error}
|
||||
<div class="m-4">
|
||||
<Alert variant="danger">
|
||||
Unable to fetch file diffs: {$diffQuery.error.message}
|
||||
</Alert>
|
||||
</div>
|
||||
{/if}
|
||||
</Scroller>
|
||||
{/if}
|
||||
@ -82,12 +95,13 @@
|
||||
|
||||
<style lang="scss">
|
||||
section {
|
||||
padding: 1rem;
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
.header {
|
||||
display: flex;
|
||||
padding: 1rem;
|
||||
border-bottom: 1px solid var(--border-color);
|
||||
}
|
||||
|
||||
.parents {
|
||||
@ -99,6 +113,7 @@
|
||||
|
||||
ul {
|
||||
list-style: none;
|
||||
padding: 1rem;
|
||||
|
||||
li {
|
||||
margin-bottom: 1rem;
|
||||
|
||||
@ -1,4 +1,6 @@
|
||||
import { getGraphQLClient } from '$lib/graphql'
|
||||
import { error } from '@sveltejs/kit'
|
||||
|
||||
import { getGraphQLClient, infinityQuery } from '$lib/graphql'
|
||||
import { parseRepoRevision } from '$lib/shared'
|
||||
|
||||
import type { PageLoad } from './$types'
|
||||
@ -7,35 +9,68 @@ import { CommitPage_CommitQuery, CommitPage_DiffQuery } from './page.gql'
|
||||
const PAGE_SIZE = 20
|
||||
|
||||
export const load: PageLoad = async ({ params }) => {
|
||||
const client = await getGraphQLClient()
|
||||
const client = getGraphQLClient()
|
||||
const { repoName } = parseRepoRevision(params.repo)
|
||||
|
||||
const commit = await client
|
||||
.query({ query: CommitPage_CommitQuery, variables: { repoName, revspec: params.revspec } })
|
||||
.then(result => {
|
||||
return result.data.repository?.commit ?? null
|
||||
})
|
||||
const result = await client.query(CommitPage_CommitQuery, { repoName, revspec: params.revspec })
|
||||
|
||||
if (result.error) {
|
||||
error(500, `Unable to load commit data: ${result.error}`)
|
||||
}
|
||||
|
||||
const commit = result.data?.repository?.commit
|
||||
|
||||
if (!commit) {
|
||||
error(404, 'Commit not found')
|
||||
}
|
||||
|
||||
// parents is an empty array for the initial commit
|
||||
// We currently don't support diffs for the initial commit on the backend
|
||||
const diff =
|
||||
commit?.oid && commit?.parents[0]?.oid
|
||||
? client.watchQuery({
|
||||
? infinityQuery({
|
||||
client,
|
||||
query: CommitPage_DiffQuery,
|
||||
variables: {
|
||||
repoName,
|
||||
base: commit.parents[0].oid,
|
||||
head: commit.oid,
|
||||
first: PAGE_SIZE,
|
||||
after: null,
|
||||
after: null as string | null,
|
||||
},
|
||||
nextVariables: previousResult => {
|
||||
if (previousResult?.data?.repository?.comparison?.fileDiffs?.pageInfo?.hasNextPage) {
|
||||
return {
|
||||
after: previousResult.data.repository.comparison.fileDiffs.pageInfo.endCursor,
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
combine: (previousResult, nextResult) => {
|
||||
if (!nextResult.data?.repository?.comparison) {
|
||||
return nextResult
|
||||
}
|
||||
const previousNodes = previousResult.data?.repository?.comparison?.fileDiffs?.nodes ?? []
|
||||
const nextNodes = nextResult.data.repository?.comparison?.fileDiffs?.nodes ?? []
|
||||
return {
|
||||
...nextResult,
|
||||
data: {
|
||||
repository: {
|
||||
...nextResult.data.repository,
|
||||
comparison: {
|
||||
...nextResult.data.repository.comparison,
|
||||
fileDiffs: {
|
||||
...nextResult.data.repository.comparison.fileDiffs,
|
||||
nodes: [...previousNodes, ...nextNodes],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
})
|
||||
: null
|
||||
|
||||
if (diff && !client.readQuery({ query: CommitPage_DiffQuery, variables: diff.variables })) {
|
||||
// Eagerly fetch data if it isn't in the cache already. This ensures that the data is fetched
|
||||
// as soon as possible, not only after the layout subscribes to the query.
|
||||
diff.refetch()
|
||||
}
|
||||
|
||||
return {
|
||||
commit,
|
||||
diff,
|
||||
|
||||
@ -21,14 +21,18 @@ query CommitPage_DiffQuery($repoName: String!, $base: String, $head: String, $fi
|
||||
id
|
||||
comparison(base: $base, head: $head) {
|
||||
fileDiffs(first: $first, after: $after) {
|
||||
nodes {
|
||||
...FileDiff_Diff
|
||||
}
|
||||
pageInfo {
|
||||
endCursor
|
||||
hasNextPage
|
||||
}
|
||||
...CommitPage_DiffConnection
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fragment CommitPage_DiffConnection on FileDiffConnection {
|
||||
nodes {
|
||||
...FileDiff_Diff
|
||||
}
|
||||
pageInfo {
|
||||
endCursor
|
||||
hasNextPage
|
||||
}
|
||||
}
|
||||
|
||||
@ -0,0 +1,58 @@
|
||||
import { expect, test } from '../../../../../../testing/integration'
|
||||
|
||||
const repoName = 'github.com/sourcegraph/sourcegraph'
|
||||
const url = `/${repoName}/-/commit/1234567890abcdef`
|
||||
|
||||
test.beforeEach(async ({ sg }) => {
|
||||
sg.mockOperations({
|
||||
ResolveRepoRevision: () => ({
|
||||
repositoryRedirect: {
|
||||
__typename: 'Repository',
|
||||
mirrorInfo: {
|
||||
cloned: true,
|
||||
cloneInProgress: false,
|
||||
},
|
||||
},
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
test('commit not found', async ({ page, sg }) => {
|
||||
sg.mockOperations({
|
||||
ResolveRepoRevision: () => ({
|
||||
repositoryRedirect: {
|
||||
mirrorInfo: {
|
||||
cloned: true,
|
||||
cloneInProgress: false,
|
||||
},
|
||||
},
|
||||
}),
|
||||
CommitPage_CommitQuery: () => ({
|
||||
repository: {
|
||||
commit: null,
|
||||
},
|
||||
}),
|
||||
})
|
||||
await page.goto(url)
|
||||
await expect(page.getByText(/Commit not found/)).toBeVisible()
|
||||
})
|
||||
|
||||
test('error loading commit information', async ({ page, sg }) => {
|
||||
sg.mockOperations({
|
||||
CommitPage_CommitQuery: () => {
|
||||
throw new Error('Test error')
|
||||
},
|
||||
})
|
||||
await page.goto(url)
|
||||
await expect(page.getByText(/Test error/)).toBeVisible()
|
||||
})
|
||||
|
||||
test('error loading diff information', async ({ page, sg }) => {
|
||||
sg.mockOperations({
|
||||
CommitPage_DiffQuery: () => {
|
||||
throw new Error('Test error')
|
||||
},
|
||||
})
|
||||
await page.goto(url)
|
||||
await expect(page.getByText(/Test error/)).toBeVisible()
|
||||
})
|
||||
@ -4,6 +4,9 @@
|
||||
import type { PageData, Snapshot } from './$types'
|
||||
import LoadingSpinner from '$lib/LoadingSpinner.svelte'
|
||||
import Scroller, { type Capture as ScrollerCapture } from '$lib/Scroller.svelte'
|
||||
import { get } from 'svelte/store'
|
||||
import { navigating } from '$app/stores'
|
||||
import { Alert } from '$lib/wildcard'
|
||||
import type { CommitsPage_GitCommitConnection } from './page.gql'
|
||||
|
||||
export let data: PageData
|
||||
@ -18,77 +21,29 @@
|
||||
scroller: scroller.capture(),
|
||||
}
|
||||
},
|
||||
restore(snapshot) {
|
||||
restoredCommitCount = snapshot.commitCount
|
||||
restoredScroller = snapshot.scroller
|
||||
async restore(snapshot) {
|
||||
if (snapshot?.commitCount !== undefined && get(navigating)?.type === 'popstate') {
|
||||
await commitsQuery?.restore(result => {
|
||||
const count = result.data?.repository?.commit?.ancestors.nodes?.length
|
||||
return !!count && count < snapshot.commitCount
|
||||
})
|
||||
}
|
||||
scroller.restore(snapshot.scroller)
|
||||
},
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches more commits when the user scrolls to the bottom of the page.
|
||||
*/
|
||||
function fetchMore() {
|
||||
// Only fetch more commits if there are more commits and if we are not already
|
||||
// fetching more commits.
|
||||
if (commits?.pageInfo.hasNextPage && $commitsQuery && !$commitsQuery.loading) {
|
||||
commitsQuery.fetchMore({
|
||||
variables: {
|
||||
afterCursor: commits.pageInfo.endCursor,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Restores the previous scroll position when the user refreshes the page. Normally
|
||||
* this would bring the user back to the top of the page, but we keep track of how
|
||||
* many commits were previously loaded and fetch the missing commits if necessary.
|
||||
* It's not ideal because we can only start fetching the remaining data when the
|
||||
* component mounts, but it's better than nothing.
|
||||
*/
|
||||
async function restoreCommits(
|
||||
commits: CommitsPage_GitCommitConnection | undefined,
|
||||
commitCount: number,
|
||||
scrollerCapture: ScrollerCapture | undefined
|
||||
) {
|
||||
// Fetch more commits to restore the previous scroll position
|
||||
if (commits) {
|
||||
if (commits.nodes.length < commitCount && !restoring) {
|
||||
restoring = true
|
||||
await commitsQuery.fetchMore({
|
||||
variables: {
|
||||
afterCursor: commits.pageInfo.endCursor,
|
||||
first: restoredCommitCount - commits.nodes.length,
|
||||
},
|
||||
})
|
||||
if (scrollerCapture) {
|
||||
scroller.restore(scrollerCapture)
|
||||
}
|
||||
restoring = false
|
||||
}
|
||||
restored = true
|
||||
}
|
||||
commitsQuery?.fetchMore()
|
||||
}
|
||||
|
||||
let scroller: Scroller
|
||||
// The number of commits that were previously loaded. This is only comes into
|
||||
// play when the user refreshes the page and thus the Apollo cache is empty.
|
||||
let restoredCommitCount: number = 0
|
||||
// The previous scroll position. Similiarly this is only used when the user
|
||||
// refreshes the page.
|
||||
let restoredScroller: ScrollerCapture | undefined
|
||||
// Restoring a large number of commits can take a while. This flag is used to
|
||||
// show a loading spinner instead of the first page of commits while restoring.
|
||||
let restoring = false
|
||||
// This flag is used to prevent retrying restoring commits in case of unexpected
|
||||
// issues with restoring.
|
||||
let restored = false
|
||||
let commits: CommitsPage_GitCommitConnection | null = null
|
||||
|
||||
$: commitsQuery = data.commitsQuery
|
||||
$: commits = $commitsQuery?.data.repository?.commit?.ancestors
|
||||
$: if (!restored) {
|
||||
restoreCommits(commits, restoredCommitCount, restoredScroller)
|
||||
// We conditionally check for the ancestors field to be able to show
|
||||
// previously loaded commits when an error occurs while fetching more commits.
|
||||
$: if ($commitsQuery?.data?.repository?.commit?.ancestors) {
|
||||
commits = $commitsQuery.data.repository.commit.ancestors
|
||||
}
|
||||
</script>
|
||||
|
||||
@ -98,17 +53,27 @@
|
||||
|
||||
<section>
|
||||
<Scroller bind:this={scroller} margin={600} on:more={fetchMore}>
|
||||
{#if commits && !restoring}
|
||||
{#if !$commitsQuery.restoring && commits}
|
||||
<ul>
|
||||
{#each commits.nodes as commit (commit.canonicalURL)}
|
||||
<li><Commit {commit} /></li>
|
||||
{:else}
|
||||
<li>
|
||||
<Alert variant="info">No commits found</Alert>
|
||||
</li>
|
||||
{/each}
|
||||
</ul>
|
||||
{/if}
|
||||
{#if !$commitsQuery || $commitsQuery.loading || restoring}
|
||||
{#if $commitsQuery.fetching || $commitsQuery.restoring}
|
||||
<div>
|
||||
<LoadingSpinner />
|
||||
</div>
|
||||
{:else if $commitsQuery.error}
|
||||
<div>
|
||||
<Alert variant="danger">
|
||||
Unable to fetch commits: {$commitsQuery.error.message}
|
||||
</Alert>
|
||||
</div>
|
||||
{/if}
|
||||
</Scroller>
|
||||
</section>
|
||||
@ -120,11 +85,15 @@
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
ul {
|
||||
list-style: none;
|
||||
ul,
|
||||
div {
|
||||
padding: 1rem;
|
||||
max-width: var(--viewport-xl);
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
ul {
|
||||
list-style: none;
|
||||
--avatar-size: 2.5rem;
|
||||
}
|
||||
|
||||
|
||||
@ -1,4 +1,6 @@
|
||||
import { getGraphQLClient } from '$lib/graphql'
|
||||
import { from } from 'rxjs'
|
||||
|
||||
import { getGraphQLClient, infinityQuery } from '$lib/graphql'
|
||||
import { resolveRevision } from '$lib/repo/utils'
|
||||
import { parseRepoRevision } from '$lib/shared'
|
||||
|
||||
@ -7,28 +9,54 @@ import { CommitsPage_CommitsQuery } from './page.gql'
|
||||
|
||||
const PAGE_SIZE = 20
|
||||
|
||||
export const load: PageLoad = async ({ parent, params }) => {
|
||||
const client = await getGraphQLClient()
|
||||
export const load: PageLoad = ({ parent, params }) => {
|
||||
const client = getGraphQLClient()
|
||||
const { repoName, revision = '' } = parseRepoRevision(params.repo)
|
||||
const resolvedRevision = await resolveRevision(parent, revision)
|
||||
const resolvedRevision = resolveRevision(parent, revision)
|
||||
|
||||
const commitsQuery = client.watchQuery({
|
||||
const commitsQuery = infinityQuery({
|
||||
client,
|
||||
query: CommitsPage_CommitsQuery,
|
||||
variables: {
|
||||
repoName,
|
||||
revision: resolvedRevision,
|
||||
first: PAGE_SIZE,
|
||||
afterCursor: null,
|
||||
variables: from(
|
||||
resolvedRevision.then(revision => ({
|
||||
repoName,
|
||||
revision,
|
||||
first: PAGE_SIZE,
|
||||
afterCursor: null as string | null,
|
||||
}))
|
||||
),
|
||||
nextVariables: previousResult => {
|
||||
if (previousResult?.data?.repository?.commit?.ancestors?.pageInfo?.hasNextPage) {
|
||||
return {
|
||||
afterCursor: previousResult.data.repository.commit.ancestors.pageInfo.endCursor,
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
},
|
||||
combine: (previousResult, nextResult) => {
|
||||
if (!nextResult.data?.repository?.commit) {
|
||||
return nextResult
|
||||
}
|
||||
const previousNodes = previousResult.data?.repository?.commit?.ancestors?.nodes ?? []
|
||||
const nextNodes = nextResult.data.repository?.commit?.ancestors.nodes ?? []
|
||||
return {
|
||||
...nextResult,
|
||||
data: {
|
||||
repository: {
|
||||
...nextResult.data.repository,
|
||||
commit: {
|
||||
...nextResult.data.repository.commit,
|
||||
ancestors: {
|
||||
...nextResult.data.repository.commit.ancestors,
|
||||
nodes: [...previousNodes, ...nextNodes],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
notifyOnNetworkStatusChange: true,
|
||||
})
|
||||
|
||||
if (!client.readQuery({ query: CommitsPage_CommitsQuery, variables: commitsQuery.variables })) {
|
||||
// Eagerly fetch data if it isn't in the cache already. This ensures that the data is fetched
|
||||
// as soon as possible, not only after the layout subscribes to the query.
|
||||
commitsQuery.refetch()
|
||||
}
|
||||
|
||||
return {
|
||||
commitsQuery,
|
||||
}
|
||||
|
||||
@ -14,12 +14,6 @@ query CommitsPage_CommitsQuery($repoName: String!, $revision: String!, $first: I
|
||||
commit(rev: $revision) {
|
||||
id
|
||||
ancestors(first: $first, afterCursor: $afterCursor) {
|
||||
# This is a bit hacky, but by fetching all the data needed by both
|
||||
# the history panel and the commits page we ensure that our custom
|
||||
# Apollo infinitiy scroll cache handling for this field works
|
||||
# correctly. Eventually we should revisit the use of infinity scroll
|
||||
# on the commits page.
|
||||
...HistoryPanel_HistoryConnection
|
||||
...CommitsPage_GitCommitConnection
|
||||
}
|
||||
}
|
||||
|
||||
@ -3,10 +3,11 @@ import type { GitCommitMock } from '$testing/graphql-type-mocks'
|
||||
import { expect, test } from '../../../../../testing/integration'
|
||||
|
||||
const repoName = 'github.com/sourcegraph/sourcegraph'
|
||||
const url = `/${repoName}/-/commits`
|
||||
|
||||
test.beforeEach(async ({ sg }) => {
|
||||
sg.mockOperations({
|
||||
ResolveRepoRevison: () => ({
|
||||
ResolveRepoRevision: () => ({
|
||||
repositoryRedirect: {
|
||||
__typename: 'Repository',
|
||||
mirrorInfo: {
|
||||
@ -20,9 +21,7 @@ test.beforeEach(async ({ sg }) => {
|
||||
const to = from ? (first ?? 20) - 5 : first ?? 20
|
||||
return {
|
||||
repository: {
|
||||
id: '1',
|
||||
commit: {
|
||||
id: '1',
|
||||
ancestors: {
|
||||
nodes: Array.from(
|
||||
{ length: to },
|
||||
@ -44,23 +43,55 @@ test.beforeEach(async ({ sg }) => {
|
||||
})
|
||||
})
|
||||
|
||||
test('infinity scroll', async ({ page }) => {
|
||||
await page.goto(`/${repoName}/-/commits`)
|
||||
test('infinity scroll', async ({ page, utils }) => {
|
||||
await page.goto(url)
|
||||
// First page of commits is loaded
|
||||
const firstCommit = page.getByRole('link', { name: 'Commit 0' })
|
||||
await expect(firstCommit).toBeVisible()
|
||||
await expect(page.getByRole('link', { name: 'Commit 19' })).toBeVisible()
|
||||
|
||||
// Position mouse over list of commits so that whell events will scroll
|
||||
// the list
|
||||
const { x, y } = (await firstCommit.boundingBox()) ?? { x: 0, y: 0 }
|
||||
await page.mouse.move(x, y)
|
||||
|
||||
// Scroll list, which should load next page
|
||||
await page.mouse.wheel(0, 1000)
|
||||
await utils.scrollYAt(firstCommit, 1000)
|
||||
await expect(page.getByRole('link', { name: 'Commit 20' })).toBeVisible()
|
||||
|
||||
// Refreshing should restore commit list and scroll position
|
||||
await page.reload()
|
||||
await expect(page.getByRole('link', { name: 'Commit 20' })).toBeInViewport()
|
||||
})
|
||||
|
||||
test('no commits', async ({ sg, page }) => {
|
||||
sg.mockOperations({
|
||||
CommitsPage_CommitsQuery: () => ({
|
||||
repository: {
|
||||
commit: {
|
||||
ancestors: {
|
||||
nodes: [],
|
||||
pageInfo: {
|
||||
endCursor: null,
|
||||
hasNextPage: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
})
|
||||
|
||||
await page.goto(url)
|
||||
await expect(page.getByText('No commits found')).toBeVisible()
|
||||
})
|
||||
|
||||
test('error', async ({ sg, page, utils }) => {
|
||||
await page.goto(url)
|
||||
|
||||
const firstCommit = page.getByRole('link', { name: 'Commit 0' })
|
||||
await expect(firstCommit).toBeVisible()
|
||||
|
||||
sg.mockOperations({
|
||||
CommitsPage_CommitsQuery: () => {
|
||||
throw new Error('Test error')
|
||||
},
|
||||
})
|
||||
// Scroll list, which should trigger an error
|
||||
await utils.scrollYAt(firstCommit, 2000)
|
||||
await expect(page.getByText('Test error')).toBeVisible()
|
||||
})
|
||||
|
||||
@ -6,7 +6,7 @@
|
||||
import Timestamp from '$lib/Timestamp.svelte'
|
||||
import Avatar from '$lib/Avatar.svelte'
|
||||
import { createPromiseStore } from '$lib/utils'
|
||||
import { Button, ButtonGroup } from '$lib/wildcard'
|
||||
import { Alert, Button, ButtonGroup } from '$lib/wildcard'
|
||||
import type { ContributorConnection } from './page.gql'
|
||||
|
||||
import type { PageData } from './$types'
|
||||
@ -20,15 +20,15 @@
|
||||
['All time', ''],
|
||||
]
|
||||
|
||||
const { pending, latestValue: contributorConnection, set } = createPromiseStore<ContributorConnection | null>()
|
||||
$: set(data.contributors)
|
||||
const contributorConnection = createPromiseStore<ContributorConnection | null>()
|
||||
$: contributorConnection.set(data.contributors)
|
||||
|
||||
// We want to show stale contributors data when the user navigates to
|
||||
// the next or previous page for the current time period. When the user
|
||||
// changes the time period we want to show a loading indicator instead.
|
||||
let currentContributorConnection = $contributorConnection
|
||||
$: if (!$pending && $contributorConnection) {
|
||||
currentContributorConnection = $contributorConnection
|
||||
let currentContributorConnection = $contributorConnection.value
|
||||
$: if (!$contributorConnection.pending) {
|
||||
currentContributorConnection = $contributorConnection.value
|
||||
}
|
||||
|
||||
$: timePeriod = data.after
|
||||
@ -68,7 +68,7 @@
|
||||
{/each}
|
||||
</ButtonGroup>
|
||||
</form>
|
||||
{#if !currentContributorConnection && $pending}
|
||||
{#if !currentContributorConnection && $contributorConnection.pending}
|
||||
<div class="mt-3">
|
||||
<LoadingSpinner />
|
||||
</div>
|
||||
@ -90,14 +90,31 @@
|
||||
>
|
||||
<td>{contributor.count} commits</td>
|
||||
</tr>
|
||||
{:else}
|
||||
<tr>
|
||||
<td colspan="3">
|
||||
<Alert variant="info">No contributors found</Alert>
|
||||
</td>
|
||||
</tr>
|
||||
{/each}
|
||||
</tbody>
|
||||
</table>
|
||||
<div class="d-flex flex-column align-items-center">
|
||||
<Paginator disabled={$pending} pageInfo={currentContributorConnection.pageInfo} />
|
||||
<p class="mt-1 text-muted">
|
||||
<small>Total contributors: {currentContributorConnection.totalCount}</small>
|
||||
</p>
|
||||
{#if nodes.length > 0}
|
||||
<div class="d-flex flex-column align-items-center">
|
||||
<Paginator
|
||||
disabled={$contributorConnection.pending}
|
||||
pageInfo={currentContributorConnection.pageInfo}
|
||||
/>
|
||||
<p class="mt-1 text-muted">
|
||||
<small>Total contributors: {currentContributorConnection.totalCount}</small>
|
||||
</p>
|
||||
</div>
|
||||
{/if}
|
||||
{:else if $contributorConnection.error}
|
||||
<div class="mt-2">
|
||||
<Alert variant="danger">
|
||||
Unable to load contributors: {$contributorConnection.error.message}
|
||||
</Alert>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
@ -118,6 +135,7 @@
|
||||
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
td {
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import { getGraphQLClient } from '$lib/graphql'
|
||||
import { getGraphQLClient, mapOrThrow } from '$lib/graphql'
|
||||
import { getPaginationParams } from '$lib/Paginator'
|
||||
import { parseRepoRevision } from '$lib/shared'
|
||||
|
||||
@ -7,29 +7,24 @@ import { ContributorsPage_ContributorsQuery } from './page.gql'
|
||||
|
||||
const pageSize = 20
|
||||
|
||||
export const load: PageLoad = async ({ url, params }) => {
|
||||
export const load: PageLoad = ({ url, params }) => {
|
||||
const afterDate = url.searchParams.get('after') ?? ''
|
||||
const { first, last, before, after } = getPaginationParams(url.searchParams, pageSize)
|
||||
const client = await getGraphQLClient()
|
||||
const client = getGraphQLClient()
|
||||
const { repoName } = parseRepoRevision(params.repo)
|
||||
|
||||
const contributors = client
|
||||
.query({
|
||||
query: ContributorsPage_ContributorsQuery,
|
||||
variables: {
|
||||
afterDate,
|
||||
repoName,
|
||||
revisionRange: '',
|
||||
path: '',
|
||||
first,
|
||||
last,
|
||||
after,
|
||||
before,
|
||||
},
|
||||
})
|
||||
.then(result => {
|
||||
return result.data.repository?.contributors ?? null
|
||||
.query(ContributorsPage_ContributorsQuery, {
|
||||
afterDate,
|
||||
repoName,
|
||||
revisionRange: '',
|
||||
path: '',
|
||||
first,
|
||||
last,
|
||||
after,
|
||||
before,
|
||||
})
|
||||
.then(mapOrThrow(result => result.data?.repository?.contributors ?? null))
|
||||
return {
|
||||
after: afterDate,
|
||||
contributors,
|
||||
|
||||
@ -0,0 +1,101 @@
|
||||
import { test, expect } from '../../../../../../testing/integration'
|
||||
|
||||
const repoName = 'sourcegraph/sourcegraph'
|
||||
const url = `/${repoName}/-/stats/contributors`
|
||||
|
||||
test.beforeEach(async ({ sg }) => {
|
||||
sg.mockOperations({
|
||||
ResolveRepoRevision: () => ({
|
||||
repositoryRedirect: {
|
||||
__typename: 'Repository',
|
||||
mirrorInfo: {
|
||||
cloned: true,
|
||||
cloneInProgress: false,
|
||||
},
|
||||
},
|
||||
}),
|
||||
ContributorsPage_ContributorsQuery: ({ after, before }) => {
|
||||
const allNodes = Array.from({ length: 15 }, (_, index) => ({
|
||||
_id: (index + 1).toString(),
|
||||
person: { displayName: `Person ${index + 1}` },
|
||||
}))
|
||||
|
||||
let startCursor = '1'
|
||||
let endCursor = '5'
|
||||
let nodes: any[] = allNodes.slice(0, 5)
|
||||
|
||||
if (after) {
|
||||
const index = allNodes.findIndex(node => node._id === after)
|
||||
startCursor = allNodes[index + 1]._id
|
||||
endCursor = allNodes[index + 5]._id
|
||||
nodes = allNodes.slice(index + 1, index + 6)
|
||||
} else if (before) {
|
||||
const index = allNodes.findIndex(node => node._id === before)
|
||||
startCursor = allNodes[index - 1]._id
|
||||
endCursor = allNodes[index - 5]._id
|
||||
nodes = allNodes.slice(index - 5, index)
|
||||
}
|
||||
|
||||
const pageInfo = {
|
||||
startCursor,
|
||||
endCursor,
|
||||
hasNexPage: endCursor !== allNodes[nodes.length - 1]._id,
|
||||
hasPreviousPage: startCursor !== allNodes[0]._id,
|
||||
}
|
||||
|
||||
return {
|
||||
repository: {
|
||||
contributors: {
|
||||
nodes,
|
||||
pageInfo,
|
||||
totalCount: allNodes.length,
|
||||
},
|
||||
},
|
||||
}
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
test('paginate contributors', async ({ page }) => {
|
||||
await page.goto(url)
|
||||
await expect(page.getByRole('row')).toHaveCount(5)
|
||||
|
||||
// Go to next page
|
||||
await page.getByRole('link', { name: 'Next' }).click()
|
||||
await expect(page.getByText('Person 6')).toBeVisible()
|
||||
|
||||
// Go to next page
|
||||
await page.getByRole('link', { name: 'Next' }).click()
|
||||
await expect(page.getByText('Person 11')).toBeVisible()
|
||||
|
||||
// Go to previous page
|
||||
await page.getByRole('link', { name: 'Previous' }).click()
|
||||
await expect(page.getByText('Person 6')).toBeVisible()
|
||||
})
|
||||
|
||||
test('no contributors', async ({ sg, page }) => {
|
||||
sg.mockOperations({
|
||||
ContributorsPage_ContributorsQuery: () => ({
|
||||
repository: {
|
||||
contributors: {
|
||||
nodes: [],
|
||||
totalCount: 0,
|
||||
},
|
||||
},
|
||||
}),
|
||||
})
|
||||
|
||||
await page.goto(url)
|
||||
await expect(page.getByText('No contributors found')).toBeVisible()
|
||||
})
|
||||
|
||||
test('error', async ({ sg, page }) => {
|
||||
sg.mockOperations({
|
||||
ContributorsPage_ContributorsQuery: () => {
|
||||
throw new Error('Test error')
|
||||
},
|
||||
})
|
||||
|
||||
await page.goto(url)
|
||||
await expect(page.getByText(/Test error/)).toBeVisible()
|
||||
})
|
||||
@ -1,18 +1,11 @@
|
||||
<script lang="ts">
|
||||
import LoadingSpinner from '$lib/LoadingSpinner.svelte'
|
||||
import GitReference from '$lib/repo/GitReference.svelte'
|
||||
import { createPromiseStore } from '$lib/utils'
|
||||
import type { GitTagsConnection } from './page.gql'
|
||||
import { Alert } from '$lib/wildcard'
|
||||
|
||||
import type { PageData } from './$types'
|
||||
|
||||
export let data: PageData
|
||||
|
||||
const { pending, value: connection, set } = createPromiseStore<GitTagsConnection>()
|
||||
$: set(data.tags)
|
||||
|
||||
$: nodes = $connection?.nodes
|
||||
$: total = $connection?.totalCount
|
||||
</script>
|
||||
|
||||
<svelte:head>
|
||||
@ -21,22 +14,28 @@
|
||||
|
||||
<section>
|
||||
<div>
|
||||
{#if $pending}
|
||||
{#await data.tags}
|
||||
<LoadingSpinner />
|
||||
{:else if nodes}
|
||||
{:then connection}
|
||||
<!-- TODO: Search input to filter tags by name -->
|
||||
<!-- TODO: Pagination -->
|
||||
<table>
|
||||
<tbody>
|
||||
{#each nodes as node (node.id)}
|
||||
{#each connection.nodes as node (node.id)}
|
||||
<GitReference ref={node} />
|
||||
{:else}
|
||||
<tr>
|
||||
<td colspan="2">
|
||||
<Alert variant="info">No tags found</Alert>
|
||||
</td>
|
||||
</tr>
|
||||
{/each}
|
||||
</tbody>
|
||||
</table>
|
||||
{#if total !== null}
|
||||
<small class="text-muted">{total} tags total</small>
|
||||
{/if}
|
||||
{/if}
|
||||
<small class="text-muted">{connection.totalCount} tags total</small>
|
||||
{:catch error}
|
||||
<Alert variant="danger">{error.message}</Alert>
|
||||
{/await}
|
||||
</div>
|
||||
</section>
|
||||
|
||||
|
||||
@ -1,28 +1,27 @@
|
||||
import { getGraphQLClient } from '$lib/graphql'
|
||||
import { getGraphQLClient, mapOrThrow } from '$lib/graphql'
|
||||
import { parseRepoRevision } from '$lib/shared'
|
||||
|
||||
import type { PageLoad } from './$types'
|
||||
import { TagsPage_TagsQuery } from './page.gql'
|
||||
|
||||
export const load: PageLoad = async ({ params }) => {
|
||||
const client = await getGraphQLClient()
|
||||
export const load: PageLoad = ({ params }) => {
|
||||
const client = getGraphQLClient()
|
||||
const { repoName } = parseRepoRevision(params.repo)
|
||||
|
||||
return {
|
||||
tags: client
|
||||
.query({
|
||||
query: TagsPage_TagsQuery,
|
||||
variables: {
|
||||
repoName,
|
||||
first: 20,
|
||||
withBehindAhead: false,
|
||||
},
|
||||
.query(TagsPage_TagsQuery, {
|
||||
repoName,
|
||||
first: 20,
|
||||
withBehindAhead: false,
|
||||
})
|
||||
.then(result => {
|
||||
if (!result.data.repository) {
|
||||
throw new Error('Expected Repository')
|
||||
}
|
||||
return result.data.repository.gitRefs
|
||||
}),
|
||||
.then(
|
||||
mapOrThrow(result => {
|
||||
if (!result.data?.repository) {
|
||||
throw new Error('Unable to load repository data.')
|
||||
}
|
||||
return result.data.repository.gitRefs
|
||||
})
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
@ -0,0 +1,66 @@
|
||||
import { test, expect } from '../../../../../testing/integration'
|
||||
|
||||
const repoName = 'sourcegraph/sourcegraph'
|
||||
|
||||
test.beforeEach(async ({ sg }) => {
|
||||
sg.mockOperations({
|
||||
ResolveRepoRevision: () => ({
|
||||
repositoryRedirect: {
|
||||
__typename: 'Repository',
|
||||
mirrorInfo: {
|
||||
cloned: true,
|
||||
cloneInProgress: false,
|
||||
},
|
||||
},
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
test('list tags', async ({ sg, page }) => {
|
||||
sg.mockOperations({
|
||||
TagsPage_TagsQuery: () => ({
|
||||
repository: {
|
||||
gitRefs: {
|
||||
nodes: [{ displayName: 'v1.0.0', url: `/${repoName}@v1.0.0` }, { displayName: 'v1.0.1' }],
|
||||
totalCount: 42,
|
||||
},
|
||||
},
|
||||
}),
|
||||
})
|
||||
|
||||
await page.goto(`/${repoName}/-/tags`)
|
||||
await expect(page.getByRole('link', { name: 'v1.0.0' })).toBeVisible()
|
||||
await expect(page.getByRole('link', { name: 'v1.0.1' })).toBeVisible()
|
||||
await expect(page.getByText('42 tags total')).toBeVisible()
|
||||
|
||||
// Click on a tag
|
||||
await page.getByRole('link', { name: 'v1.0.0' }).click()
|
||||
await expect(page).toHaveURL(`/${repoName}@v1.0.0`)
|
||||
})
|
||||
|
||||
test('no tags', async ({ sg, page }) => {
|
||||
sg.mockOperations({
|
||||
TagsPage_TagsQuery: () => ({
|
||||
repository: {
|
||||
gitRefs: {
|
||||
nodes: [],
|
||||
totalCount: 0,
|
||||
},
|
||||
},
|
||||
}),
|
||||
})
|
||||
|
||||
await page.goto(`/${repoName}/-/tags`)
|
||||
await expect(page.getByText('No tags found')).toBeVisible()
|
||||
})
|
||||
|
||||
test('error', async ({ sg, page }) => {
|
||||
sg.mockOperations({
|
||||
TagsPage_TagsQuery: () => {
|
||||
throw new Error('Test error')
|
||||
},
|
||||
})
|
||||
|
||||
await page.goto(`/${repoName}/-/tags`)
|
||||
await expect(page.getByText('Test error')).toBeVisible()
|
||||
})
|
||||
@ -1,11 +1,27 @@
|
||||
<script lang="ts">
|
||||
import { page } from '$app/stores'
|
||||
import { isRevisionNotFoundErrorLike } from '$lib/shared'
|
||||
import { Alert } from '$lib/wildcard'
|
||||
|
||||
import RevisionNotFoundError from './RevisionNotFoundError.svelte'
|
||||
</script>
|
||||
|
||||
{#if isRevisionNotFoundErrorLike($page.error)}
|
||||
<RevisionNotFoundError />
|
||||
{:else if $page.error}
|
||||
<div>
|
||||
<Alert variant="danger">
|
||||
<p>An error occurred while loading the page:</p>
|
||||
{$page.error.message}
|
||||
</Alert>
|
||||
</div>
|
||||
{/if}
|
||||
<!-- show error information for other errors -->
|
||||
|
||||
<style lang="scss">
|
||||
div {
|
||||
max-width: var(--viewport-xl);
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
margin-top: 5rem;
|
||||
}
|
||||
</style>
|
||||
|
||||
@ -17,13 +17,13 @@ import type { LayoutLoad } from './$types'
|
||||
import { ResolveRepoRevision, ResolvedRepository, type ResolveRepoRevisionResult } from './layout.gql'
|
||||
|
||||
export interface ResolvedRevision {
|
||||
repo: ResolvedRepository
|
||||
repo: ResolvedRepository & NonNullable<{ commit: ResolvedRepository['commit'] }>
|
||||
commitID: string
|
||||
defaultBranch: string
|
||||
}
|
||||
|
||||
export const load: LayoutLoad = async ({ params, url, depends }) => {
|
||||
const client = await getGraphQLClient()
|
||||
const client = getGraphQLClient()
|
||||
|
||||
// This allows other places to reload all repo related data by calling
|
||||
// invalidate('repo:root')
|
||||
@ -84,24 +84,10 @@ async function resolveRepoRevision({
|
||||
revision?: string
|
||||
}): Promise<ResolvedRevision> {
|
||||
// See if we have a cached response
|
||||
let data = client.readQuery({
|
||||
query: ResolveRepoRevision,
|
||||
variables: {
|
||||
repoName,
|
||||
revision,
|
||||
},
|
||||
})
|
||||
let data = client.readQuery(ResolveRepoRevision, { repoName, revision })?.data
|
||||
|
||||
if (shouldResolveRepositoryInformation(data)) {
|
||||
data = await client
|
||||
.query({
|
||||
query: ResolveRepoRevision,
|
||||
variables: {
|
||||
repoName,
|
||||
revision,
|
||||
},
|
||||
fetchPolicy: 'network-only',
|
||||
})
|
||||
.then(result => result.data)
|
||||
data = (await client.query(ResolveRepoRevision, { repoName, revision }, { requestPolicy: 'network-only' })).data
|
||||
}
|
||||
|
||||
if (!data?.repositoryRedirect) {
|
||||
@ -150,7 +136,7 @@ async function resolveRepoRevision({
|
||||
* corresponding commit ID only once.
|
||||
* This ensures consistentcy as the user navigates to and away from the repository page.
|
||||
*/
|
||||
function shouldResolveRepositoryInformation(data: ResolveRepoRevisionResult | null): boolean {
|
||||
function shouldResolveRepositoryInformation(data: ResolveRepoRevisionResult | undefined): boolean {
|
||||
if (!data) {
|
||||
return true
|
||||
}
|
||||
|
||||
@ -18,7 +18,7 @@ test.beforeEach(({ sg }) => {
|
||||
test.describe('cloned repository', () => {
|
||||
test.beforeEach(async ({ sg, page }) => {
|
||||
sg.mockOperations({
|
||||
ResolveRepoRevison: ({ repoName }) => ({
|
||||
ResolveRepoRevision: ({ repoName }) => ({
|
||||
repositoryRedirect: {
|
||||
id: '1',
|
||||
name: repoName,
|
||||
@ -40,7 +40,7 @@ test.describe('cloned repository', () => {
|
||||
|
||||
test('clone in progress', async ({ sg, page }) => {
|
||||
sg.mockOperations({
|
||||
ResolveRepoRevison: ({ repoName }) => ({
|
||||
ResolveRepoRevision: ({ repoName }) => ({
|
||||
repositoryRedirect: {
|
||||
id: '1',
|
||||
name: repoName,
|
||||
@ -62,7 +62,7 @@ test('clone in progress', async ({ sg, page }) => {
|
||||
|
||||
test('not cloned', async ({ sg, page }) => {
|
||||
sg.mockOperations({
|
||||
ResolveRepoRevison: ({ repoName }) => ({
|
||||
ResolveRepoRevision: ({ repoName }) => ({
|
||||
repositoryRedirect: {
|
||||
id: '1',
|
||||
name: repoName,
|
||||
|
||||
@ -25,7 +25,6 @@
|
||||
import SymbolSearchResult from './SymbolSearchResult.svelte'
|
||||
import { createTemporarySettingsStorage } from '$lib/temporarySettings'
|
||||
import { setSearchResultsContext } from './searchResultsContext'
|
||||
import { createTestGraphqlClient } from '$testing/graphql'
|
||||
|
||||
export const meta = {
|
||||
title: 'search/SearchResults',
|
||||
@ -47,7 +46,6 @@
|
||||
settings: readable({}),
|
||||
featureFlags: readable([]),
|
||||
temporarySettingsStorage: createTemporarySettingsStorage(),
|
||||
client: readable(createTestGraphqlClient()),
|
||||
})
|
||||
|
||||
setSearchResultsContext({
|
||||
|
||||
@ -253,21 +253,37 @@ describe('custom mocks', () => {
|
||||
}),
|
||||
},
|
||||
})
|
||||
server.addOperationMocks({ customOperation: () => ({ currentUser: { name: 'custom' } }) })
|
||||
server.addOperationMocks({
|
||||
customOperation: () => ({ currentUser: { name: 'custom', friends: [{ name: 'friend1' }] } }),
|
||||
})
|
||||
server.addTypeMocks({ User: () => ({ name: 'user2' }) })
|
||||
|
||||
expect(server.query(`query {currentUser {name age}}`)).toMatchObject({
|
||||
data: { currentUser: { name: 'user2', age: 42 } },
|
||||
expect(server.query(`query {currentUser {name age friends {name age}}}`)).toMatchObject({
|
||||
data: { currentUser: { name: 'user2', age: 42, friends: [{ name: 'user2', age: 42 }] } },
|
||||
})
|
||||
|
||||
expect(server.query(`query customOperation {currentUser {name age}}`)).toMatchObject({
|
||||
data: { currentUser: { name: 'custom', age: 42 } },
|
||||
expect(server.query(`query customOperation {currentUser {name age friends {name age}}}`)).toMatchObject({
|
||||
data: { currentUser: { name: 'custom', age: 42, friends: [{ name: 'friend1', age: 42 }] } },
|
||||
})
|
||||
})
|
||||
|
||||
test('override partial non-id object', () => {
|
||||
const server = new GraphQLMockServer({
|
||||
schema,
|
||||
mocks: {
|
||||
Action: () => ({
|
||||
name: 'default',
|
||||
}),
|
||||
},
|
||||
})
|
||||
server.addOperationMocks({
|
||||
customOperation: () => ({
|
||||
action: { name: 'custom' },
|
||||
}),
|
||||
})
|
||||
|
||||
server.addTypeMocks({ User: () => ({ friends: [{ name: 'friend1' }] }) })
|
||||
|
||||
expect(server.query(`query {currentUser { friends {name age}}}`)).toMatchObject({
|
||||
data: { currentUser: { friends: [{ name: 'friend1', age: 42 }] } },
|
||||
expect(server.query(`query customOperation {action {name}}`)).toMatchObject({
|
||||
data: { action: { name: 'custom' } },
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@ -421,20 +421,20 @@ export class GraphQLMockServer {
|
||||
return `${type.name}:${keyValue}`
|
||||
}
|
||||
|
||||
private resolveObject(type: GraphQLObjectType, value: ObjectMock = {}): ObjectMock {
|
||||
private resolveObject(type: GraphQLObjectType, override: ObjectMock = {}): ObjectMock {
|
||||
const keyFieldName = this.options.typePolicies?.[type.name]?.keyField ?? 'id'
|
||||
let keyFieldType = type.getFields()[keyFieldName]?.type
|
||||
if (isNonNullType(keyFieldType)) {
|
||||
keyFieldType = keyFieldType.ofType
|
||||
}
|
||||
if (!keyFieldType || !isScalarType(keyFieldType)) {
|
||||
return value
|
||||
return { ...override }
|
||||
}
|
||||
const key = value[keyFieldName]
|
||||
const key = override[keyFieldName]
|
||||
const cacheKey = this.getCacheKey(type, String(key))
|
||||
|
||||
const obj = this.objectStore.get(cacheKey)
|
||||
return obj ? { ...obj, ...value } : { ...value }
|
||||
return obj ? { ...obj, ...override } : { ...override }
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@ -1,25 +1,32 @@
|
||||
import { readFileSync } from 'node:fs'
|
||||
import path from 'node:path'
|
||||
import { fileURLToPath } from 'node:url'
|
||||
import path from 'path'
|
||||
|
||||
import { faker } from '@faker-js/faker'
|
||||
import { test as base, type Page } from '@playwright/test'
|
||||
import { test as base, type Page, type Locator } from '@playwright/test'
|
||||
import glob from 'glob'
|
||||
import { buildSchema } from 'graphql'
|
||||
|
||||
import { GraphQLMockServer } from './graphql-mocking'
|
||||
import type { TypeMocks, ObjectMock, UserMock, OperationMocks } from './graphql-type-mocks'
|
||||
|
||||
export { expect, defineConfig } from '@playwright/test'
|
||||
export { expect, defineConfig, type Locator, type Page } from '@playwright/test'
|
||||
|
||||
const defaultMocks: TypeMocks = {
|
||||
Query: () => ({
|
||||
// null means not signed in
|
||||
currentUser: null,
|
||||
}),
|
||||
Person: () => ({
|
||||
avatarURL: null,
|
||||
}),
|
||||
Person: () => {
|
||||
const firstName = faker.person.firstName()
|
||||
const lastName = faker.person.lastName()
|
||||
return {
|
||||
name: `${firstName} ${lastName}`,
|
||||
email: faker.internet.email({ firstName, lastName }),
|
||||
displayName: faker.internet.userName({ firstName, lastName }),
|
||||
avatarURL: null,
|
||||
}
|
||||
},
|
||||
User: () => ({
|
||||
avatarURL: null,
|
||||
}),
|
||||
@ -37,12 +44,16 @@ const defaultMocks: TypeMocks = {
|
||||
lsif: '{}',
|
||||
},
|
||||
}),
|
||||
GitRef: () => ({
|
||||
url: faker.internet.url(),
|
||||
}),
|
||||
Signature: () => ({
|
||||
date: faker.date.past().toISOString(),
|
||||
}),
|
||||
GitObjectID: () => faker.git.commitSha(),
|
||||
GitCommit: () => ({
|
||||
abbreviatedOID: faker.git.commitSha({ length: 7 }),
|
||||
subject: faker.git.commitMessage(),
|
||||
}),
|
||||
JSONCString: () => '{}',
|
||||
}
|
||||
@ -117,7 +128,24 @@ class Sourcegraph {
|
||||
}
|
||||
}
|
||||
|
||||
export const test = base.extend<{ sg: Sourcegraph }, { graphqlMock: GraphQLMockServer }>({
|
||||
interface Utils {
|
||||
scrollYAt(locator: Locator, distance: number): Promise<void>
|
||||
}
|
||||
|
||||
export const test = base.extend<{ sg: Sourcegraph; utils: Utils }, { graphqlMock: GraphQLMockServer }>({
|
||||
utils: async ({ page }, use) => {
|
||||
use({
|
||||
async scrollYAt(locator: Locator, distance: number): Promise<void> {
|
||||
// Position mouse over target that wheel events will scrolls the container
|
||||
// that contains the target
|
||||
const { x, y } = (await locator.boundingBox()) ?? { x: 0, y: 0 }
|
||||
await page.mouse.move(x, y)
|
||||
|
||||
// Scroll list, which should load next page
|
||||
await page.mouse.wheel(0, distance)
|
||||
},
|
||||
})
|
||||
},
|
||||
sg: [
|
||||
async ({ page, graphqlMock }, use) => {
|
||||
const sg = new Sourcegraph(page, graphqlMock)
|
||||
@ -136,6 +164,9 @@ export const test = base.extend<{ sg: Sourcegraph }, { graphqlMock: GraphQLMockS
|
||||
GitBlob: {
|
||||
keyField: 'canonicalURL',
|
||||
},
|
||||
GitTree: {
|
||||
keyField: 'canonicalURL',
|
||||
},
|
||||
},
|
||||
})
|
||||
await use(graphqlMock)
|
||||
|
||||
@ -61,7 +61,6 @@ const mockedSourcgraphContext: {
|
||||
[key in SourcegraphContextKey]: MockedSourcegraphContextValue<SourcegraphContext[key]> | typeof unmocked
|
||||
} = {
|
||||
user: writable(null),
|
||||
client: unmocked,
|
||||
settings: writable({}),
|
||||
featureFlags: writable([]),
|
||||
temporarySettingsStorage: unmocked,
|
||||
|
||||
@ -1411,6 +1411,9 @@ importers:
|
||||
'@sourcegraph/wildcard':
|
||||
specifier: workspace:*
|
||||
version: link:../wildcard
|
||||
'@urql/core':
|
||||
specifier: ^4.2.3
|
||||
version: 4.2.3(graphql@15.4.0)
|
||||
highlight.js:
|
||||
specifier: ^10.0.0
|
||||
version: 10.7.3
|
||||
@ -1420,6 +1423,9 @@ importers:
|
||||
ts-key-enum:
|
||||
specifier: ^2.0.12
|
||||
version: 2.0.12
|
||||
wonka:
|
||||
specifier: ^6.3.4
|
||||
version: 6.3.4
|
||||
devDependencies:
|
||||
'@faker-js/faker':
|
||||
specifier: ^8.0.2
|
||||
@ -1574,6 +1580,17 @@ importers:
|
||||
|
||||
packages:
|
||||
|
||||
/@0no-co/graphql.web@1.0.4(graphql@15.4.0):
|
||||
resolution: {integrity: sha512-W3ezhHGfO0MS1PtGloaTpg0PbaT8aZSmmaerL7idtU5F7oCI+uu25k+MsMS31BVFlp4aMkHSrNRxiD72IlK8TA==}
|
||||
peerDependencies:
|
||||
graphql: ^14.0.0 || ^15.0.0 || ^16.0.0
|
||||
peerDependenciesMeta:
|
||||
graphql:
|
||||
optional: true
|
||||
dependencies:
|
||||
graphql: 15.4.0
|
||||
dev: false
|
||||
|
||||
/@aashutoshrathi/word-wrap@1.2.6:
|
||||
resolution: {integrity: sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
@ -3872,7 +3889,7 @@ packages:
|
||||
debug: 4.3.4
|
||||
espree: 9.6.1
|
||||
globals: 13.23.0
|
||||
ignore: 5.2.4
|
||||
ignore: 5.3.0
|
||||
import-fresh: 3.3.0
|
||||
js-yaml: 4.1.0
|
||||
minimatch: 3.1.2
|
||||
@ -11434,6 +11451,15 @@ packages:
|
||||
/@ungap/structured-clone@1.2.0:
|
||||
resolution: {integrity: sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==}
|
||||
|
||||
/@urql/core@4.2.3(graphql@15.4.0):
|
||||
resolution: {integrity: sha512-DJ9q9+lcs5JL8DcU2J3NqsgeXYJva+1+Qt8HU94kzTPqVOIRRA7ouvy4ksUfPY+B5G2PQ+vLh+JJGyZCNXv0cg==}
|
||||
dependencies:
|
||||
'@0no-co/graphql.web': 1.0.4(graphql@15.4.0)
|
||||
wonka: 6.3.4
|
||||
transitivePeerDependencies:
|
||||
- graphql
|
||||
dev: false
|
||||
|
||||
/@visx/annotation@2.10.0(react-dom@18.1.0)(react@18.1.0):
|
||||
resolution: {integrity: sha512-r2szuvO5/J0sxqyrqXU0Vusozgm5M0XmyFXRzq8aag0JG+ifigtlClDFPPwFatuIDaZ9hgcCcF696dFM6zw62w==}
|
||||
peerDependencies:
|
||||
@ -17279,11 +17305,6 @@ packages:
|
||||
engines: {node: '>= 4'}
|
||||
dev: true
|
||||
|
||||
/ignore@5.2.4:
|
||||
resolution: {integrity: sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==}
|
||||
engines: {node: '>= 4'}
|
||||
dev: true
|
||||
|
||||
/ignore@5.3.0:
|
||||
resolution: {integrity: sha512-g7dmpshy+gD7mh88OC9NwSGTKoc3kyLAZQRU1mt53Aw/vnvfXnbC+F/7F7QoYVKbV+KNvJx8wArewKy1vXMtlg==}
|
||||
engines: {node: '>= 4'}
|
||||
@ -25475,6 +25496,10 @@ packages:
|
||||
execa: 1.0.0
|
||||
dev: true
|
||||
|
||||
/wonka@6.3.4:
|
||||
resolution: {integrity: sha512-CjpbqNtBGNAeyNS/9W6q3kSkKE52+FjIj7AkFlLr11s/VWGUu6a2CdYSdGxocIhIVjaW/zchesBQUKPVU69Cqg==}
|
||||
dev: false
|
||||
|
||||
/word-wrap@1.2.3:
|
||||
resolution: {integrity: sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
|
||||
Loading…
Reference in New Issue
Block a user