mirror of
https://github.com/sourcegraph/sourcegraph.git
synced 2026-02-06 17:31:43 +00:00
SQLite backend (#5332)
This replaces the original proof-of-concept backend that simply used the code from Microsoft's [vscode extension](https://github.com/microsoft/vscode-lsif-extension/tree/master/server). This held all LSIF dumps in memory, which was an untenable solution at scale. The new server uses SQLite databases to store LSIF data for a repository and commit pair.
This commit is contained in:
parent
27aba513d8
commit
71ef03a178
2
.github/CODEOWNERS
vendored
2
.github/CODEOWNERS
vendored
@ -236,7 +236,7 @@ README.md @sqs
|
||||
/cmd/frontend/db/discussion* @slimsag
|
||||
|
||||
# LSIF
|
||||
/lsif/ @chrismwendt @sourcegraph/code-nav
|
||||
/lsif/ @chrismwendt @efritz @sourcegraph/code-nav
|
||||
|
||||
# Development
|
||||
/dev/fakehub @ijt
|
||||
|
||||
@ -23,8 +23,10 @@ module.exports = api => {
|
||||
'@babel/plugin-syntax-dynamic-import',
|
||||
'babel-plugin-lodash',
|
||||
|
||||
// Required to support typeoerm decorators in ./lsif
|
||||
['@babel/plugin-proposal-decorators', { legacy: true }],
|
||||
// Node 12 (released 2019 Apr 23) supports these natively, so we can remove this plugin soon.
|
||||
'@babel/plugin-proposal-class-properties',
|
||||
['@babel/plugin-proposal-class-properties', { loose: true }],
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
@ -44,7 +44,7 @@ RUN apk update && apk add --no-cache \
|
||||
# https://github.com/sourcegraph/sourcegraph/blob/master/doc/dev/postgresql.md#version-requirements
|
||||
'bash=4.4.19-r1' 'postgresql-contrib=11.5-r0' 'postgresql=11.5-r0' \
|
||||
'redis=3.2.12-r0' bind-tools ca-certificates git@edge \
|
||||
mailcap nginx openssh-client pcre su-exec tini nodejs=10.14.2-r0
|
||||
mailcap nginx openssh-client pcre su-exec tini nodejs-current=11.3.0-r0
|
||||
|
||||
# IMPORTANT: If you update the syntect_server version below, you MUST confirm
|
||||
# the ENV variables from its Dockerfile (https://github.com/sourcegraph/syntect_server/blob/master/Dockerfile)
|
||||
@ -54,6 +54,8 @@ COPY --from=comby/comby:0.7.0 /usr/local/bin/comby /usr/local/bin/comby
|
||||
# hadolint ignore=DL3022
|
||||
COPY --from=sourcegraph/syntect_server:5e1efbb@sha256:6ec136246b302a6c8fc113f087a66d5f9a89a9f5b851e9abb917c8b5e1d8c4b1 /syntect_server /usr/local/bin/
|
||||
COPY --from=ctags /usr/local/bin/universal-* /usr/local/bin/
|
||||
# hadolint ignore=DL3022
|
||||
COPY --from=sourcegraph/lsif-server:ci /lsif /lsif
|
||||
|
||||
# hadolint ignore=DL3022
|
||||
COPY --from=sourcegraph/prometheus:v2.12.0 /bin/prometheus /usr/local/bin
|
||||
|
||||
@ -52,9 +52,7 @@ echo "--- build sqlite for symbols"
|
||||
env CTAGS_D_OUTPUT_PATH="$OUTPUT/.ctags.d" SYMBOLS_EXECUTABLE_OUTPUT_PATH="$bindir/symbols" BUILD_TYPE=dist ./cmd/symbols/build.sh buildSymbolsDockerImageDependencies
|
||||
|
||||
echo "--- build lsif-server"
|
||||
yarn --cwd lsif/server --frozen-lockfile
|
||||
yarn --cwd lsif/server run build
|
||||
cp lsif/server/out/http-server.bundle.js "$OUTPUT/lsif-server.js"
|
||||
IMAGE=sourcegraph/lsif-server:ci ./lsif/build.sh
|
||||
|
||||
echo "--- prometheus config"
|
||||
cp -r docker-images/prometheus/config "$OUTPUT/sg_config_prometheus"
|
||||
|
||||
@ -144,7 +144,7 @@ func Main() {
|
||||
`gitserver: gitserver`,
|
||||
`query-runner: query-runner`,
|
||||
`symbols: symbols`,
|
||||
`lsif-server: node /lsif-server.js | grep -v 'Listening for HTTP requests'`,
|
||||
`lsif-server: node /lsif/out/server.js`,
|
||||
`management-console: management-console`,
|
||||
`searcher: searcher`,
|
||||
`github-proxy: github-proxy`,
|
||||
|
||||
@ -17,6 +17,6 @@ management-console-web: cd ./cmd/management-console/web && ./serve.sh
|
||||
keycloak: ./dev/auth-provider/keycloak.sh
|
||||
# jaeger: docker run --name=jaeger --rm -p5775:5775/udp -p6831:6831/udp -p6832:6832/udp -p5778:5778 -p16686:16686 -p14268:14268 jaegertracing/all-in-one:latest
|
||||
docsite: .bin/docsite -config doc/docsite.json serve -http=localhost:5080
|
||||
lsif-server: yarn --cwd lsif/server run serve
|
||||
lsif-server: yarn --cwd lsif run serve
|
||||
prometheus: ./dev/prometheus.sh
|
||||
grafana: ./dev/grafana.sh
|
||||
|
||||
@ -4,8 +4,7 @@ set -e
|
||||
|
||||
echo "--- yarn"
|
||||
yarn --frozen-lockfile --network-timeout 60000
|
||||
yarn --cwd lsif/server --frozen-lockfile --network-timeout 60000
|
||||
yarn --cwd lsif/extension --frozen-lockfile --network-timeout 60000
|
||||
yarn --cwd lsif --frozen-lockfile --network-timeout 60000
|
||||
|
||||
for cmd in "$@"
|
||||
do
|
||||
|
||||
17
dev/ci/yarn-test-separate.sh
Executable file
17
dev/ci/yarn-test-separate.sh
Executable file
@ -0,0 +1,17 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
echo "--- yarn in root"
|
||||
yarn --frozen-lockfile --network-timeout 60000
|
||||
|
||||
cd $1
|
||||
echo "--- yarn"
|
||||
yarn --frozen-lockfile --network-timeout 60000
|
||||
|
||||
echo "--- test"
|
||||
|
||||
# Limit the number of workers to prevent the default of 1 worker per core from
|
||||
# causing OOM on the buildkite nodes that have 96 CPUs. 4 matches the CPU limits
|
||||
# in infrastructure/kubernetes/ci/buildkite/buildkite-agent/buildkite-agent.Deployment.yaml
|
||||
yarn -s run test --maxWorkers 4 --verbose
|
||||
@ -4,6 +4,6 @@ set -e
|
||||
unset CDPATH
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")/.." # cd to repo root dir
|
||||
|
||||
for dir in web shared browser packages/sourcegraph-extension-api packages/@sourcegraph/extension-api-types lsif/server lsif/extension; do
|
||||
for dir in web shared browser packages/sourcegraph-extension-api packages/@sourcegraph/extension-api-types lsif; do
|
||||
(set -x; cd "$dir" && "$@")
|
||||
done
|
||||
|
||||
@ -111,7 +111,7 @@ export PATH="$PWD/.bin:$PWD/node_modules/.bin:$PATH"
|
||||
|
||||
# LSIF server
|
||||
[ -n "${OFFLINE-}" ] || {
|
||||
pushd ./lsif/server && yarn --no-progress && popd
|
||||
pushd ./lsif && yarn --no-progress && popd
|
||||
}
|
||||
|
||||
printf >&2 "\nStarting all binaries...\n\n"
|
||||
|
||||
@ -73,6 +73,13 @@ func addBrowserExt(pipeline *bk.Pipeline) {
|
||||
bk.ArtifactPaths("browser/coverage/coverage-final.json"))
|
||||
}
|
||||
|
||||
// Tests the LSIF server.
|
||||
func addLSIFServer(pipeline *bk.Pipeline) {
|
||||
pipeline.AddStep(":jest:",
|
||||
bk.Cmd("dev/ci/yarn-test-separate.sh lsif"),
|
||||
bk.ArtifactPaths("lsif/coverage/coverage-final.json"))
|
||||
}
|
||||
|
||||
// Adds the shared frontend tests (shared between the web app and browser extension).
|
||||
func addSharedTests(pipeline *bk.Pipeline) {
|
||||
// Shared tests
|
||||
@ -246,7 +253,7 @@ func addDockerImage(c Config, app string, insiders bool) func(*bk.Pipeline) {
|
||||
|
||||
cmdDir := func() string {
|
||||
cmdDirByApp := map[string]string{
|
||||
"lsif-server": "lsif/server",
|
||||
"lsif-server": "lsif",
|
||||
}
|
||||
if cmdDir, ok := cmdDirByApp[app]; ok {
|
||||
return cmdDir
|
||||
|
||||
@ -77,6 +77,7 @@ func GeneratePipeline(c Config) (*bk.Pipeline, error) {
|
||||
addLint,
|
||||
addBrowserExt,
|
||||
addWebApp,
|
||||
addLSIFServer,
|
||||
addSharedTests,
|
||||
addPostgresBackcompat,
|
||||
addGoTests,
|
||||
|
||||
@ -6,5 +6,5 @@ const config = require('./jest.config.base')
|
||||
/** @type {jest.InitialOptions} */
|
||||
module.exports = {
|
||||
...config,
|
||||
projects: ['browser/jest.config.js', 'shared/jest.config.js', 'web/jest.config.js'],
|
||||
projects: ['browser/jest.config.js', 'shared/jest.config.js', 'web/jest.config.js', 'lsif/jest.config.js'],
|
||||
}
|
||||
|
||||
30
lsif/Dockerfile
Normal file
30
lsif/Dockerfile
Normal file
@ -0,0 +1,30 @@
|
||||
FROM alpine:3.9@sha256:644fcb1a676b5165371437feaa922943aaf7afcfa8bfee4472f6860aad1ef2a0 AS builder
|
||||
|
||||
RUN apk add --no-cache nodejs-current=11.3.0-r0 nodejs-npm=10.14.2-r0
|
||||
RUN npm install -g yarn@1.17.3
|
||||
|
||||
COPY package.json yarn.lock /lsif/
|
||||
RUN yarn --cwd /lsif
|
||||
|
||||
COPY tsconfig.json /lsif
|
||||
COPY src /lsif/src
|
||||
RUN yarn --cwd /lsif run build
|
||||
|
||||
FROM alpine:3.9@sha256:644fcb1a676b5165371437feaa922943aaf7afcfa8bfee4472f6860aad1ef2a0
|
||||
|
||||
ARG COMMIT_SHA="unknown"
|
||||
ARG DATE="unknown"
|
||||
ARG VERSION="unknown"
|
||||
|
||||
LABEL org.opencontainers.image.revision=${COMMIT_SHA}
|
||||
LABEL org.opencontainers.image.created=${DATE}
|
||||
LABEL org.opencontainers.image.version=${VERSION}
|
||||
LABEL com.sourcegraph.github.url=https://github.com/sourcegraph/sourcegraph/commit/${COMMIT_SHA}
|
||||
|
||||
# hadolint ignore=DL3018
|
||||
RUN apk add --no-cache tini nodejs-current=11.3.0-r0
|
||||
|
||||
COPY --from=builder /lsif /lsif
|
||||
|
||||
EXPOSE 3186
|
||||
ENTRYPOINT ["/sbin/tini", "--", "node", "/lsif/out/server.js"]
|
||||
@ -2,10 +2,31 @@
|
||||
|
||||
[LSIF](https://code.visualstudio.com/blogs/2019/02/19/lsif) is a file format that stores code intelligence information such as hover docstrings, definitions, and references.
|
||||
|
||||
Sourcegraph receives and stores LSIF files uploaded using [upload-lsif.sh](upload-lsif.sh), then uses that information to provide fast and precise code intelligence when viewing files.
|
||||
Sourcegraph receives and stores LSIF files uploaded using [upload-lsif.sh](upload-lsif.sh) (usually used in CI, similar to [Codecov's Bash Uploader](https://docs.codecov.io/docs/about-the-codecov-bash-uploader)), then uses that information to provide fast and precise code intelligence when viewing files.
|
||||
|
||||
In this directory:
|
||||
The HTTP [server](src) runs behind Sourcegraph (for auth) and receives and stores LSIF dump uploads and services requests for relevant LSP queries.
|
||||
|
||||
- [upload-lsif.sh](upload-lsif.sh): a script that uploads an LSIF file to Sourcegraph (usually used in CI, similar to [Codecov's Bash Uploader](https://docs.codecov.io/docs/about-the-codecov-bash-uploader))
|
||||
- [server/](server/): an HTTP server which runs behind Sourcegraph (for auth), receives and stores LSIF file uploads, and services requests for hovers/defs/refs
|
||||
- [extension/](extension/): a Sourcegraph extension which sends requests to the Sourcegraph instance at `.api/lsif` for hovers/defs/refs
|
||||
## API
|
||||
|
||||
### `/upload`
|
||||
|
||||
Receives an LSIF dump encoded as JSON lines.
|
||||
|
||||
URL query parameters:
|
||||
|
||||
- `repository`: the name of the repository (e.g. `github.com/sourcegraph/codeintellify`)
|
||||
- `commit`: the 40 character hash of the commit
|
||||
|
||||
The request body must be HTML form data with a single file (e.g. `curl -F "data=@file.lsif" ...`).
|
||||
|
||||
### `/request`
|
||||
|
||||
Performs a `hover`, a `definitions`, or a `references` request for the given repository@commit and returns the result. Fails if there is no LSIF data for the given repository@commit.
|
||||
|
||||
The request body must be a JSON object with these properties:
|
||||
|
||||
- `repository`: the name of the repository (e.g. `github.com/sourcegraph/codeintellify`)
|
||||
- `commit`: the 40 character hash of the commit
|
||||
- `method`: `hover`, `definitions`, or `references`
|
||||
- `path`: the file path in the repository.
|
||||
- `position`: the zero-based `{ line, character }` in the file at which the request is being made
|
||||
|
||||
8
lsif/babel.config.js
Normal file
8
lsif/babel.config.js
Normal file
@ -0,0 +1,8 @@
|
||||
// @ts-check
|
||||
|
||||
/** @type {import('@babel/core').TransformOptions} */
|
||||
const config = {
|
||||
extends: '../babel.config.js',
|
||||
}
|
||||
|
||||
module.exports = config
|
||||
9
lsif/build.sh
Executable file
9
lsif/build.sh
Executable file
@ -0,0 +1,9 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")/.."
|
||||
set -ex
|
||||
|
||||
docker build -f lsif/Dockerfile -t "$IMAGE" lsif \
|
||||
--build-arg COMMIT_SHA \
|
||||
--build-arg DATE \
|
||||
--build-arg VERSION
|
||||
228
lsif/docs/database.md
Normal file
228
lsif/docs/database.md
Normal file
@ -0,0 +1,228 @@
|
||||
# LSIF data model
|
||||
|
||||
This document outlines the data model for a single LSIF dump. The definition of the database tables and the entities encoded within it can be found in `models.database.ts`.
|
||||
|
||||
In the following document, we collapse ranges to keep the document readable, where `a:b-c:d` is shorthand for the following:
|
||||
|
||||
```
|
||||
{
|
||||
"startLine": a,
|
||||
"startCharacter": b,
|
||||
"endLine": c,
|
||||
"endCharacter": d
|
||||
}
|
||||
```
|
||||
|
||||
This applies to JSON payloads, and a similar shorthand is used for the columns of the `definitions` and `references` tables.
|
||||
|
||||
## Source
|
||||
|
||||
The following source files compose the package `sample`, which is used as the running example for this document.
|
||||
|
||||
**foo.ts**
|
||||
|
||||
```typescript
|
||||
export function foo(value: string): string {
|
||||
return value.substring(1, value.length - 1)
|
||||
}
|
||||
```
|
||||
|
||||
**bar.ts**
|
||||
|
||||
```typescript
|
||||
import { foo } from './foo'
|
||||
|
||||
export function bar(input: string): string {
|
||||
return foo(foo(input))
|
||||
}
|
||||
```
|
||||
|
||||
## Database values
|
||||
|
||||
**`meta` table**
|
||||
|
||||
This table is populated with **exactly** one row containing the version of the LSIF input, the version of the software that converted it into a SQLite database, and the number used to determine in which result chunk a result identifier belongs (via hash and modulus over the number of chunks). Generally, this number will be the number of rows in the `resultChunks` table, but this number may be higher as we won't insert empty chunks (in the case that no identifier happened to hash to it).
|
||||
|
||||
The last value is used in order to achieve a consistent hash of identifiers that map to the correct result chunk row identifier. This will be explained in more detail later in this document.
|
||||
|
||||
| id | lsifVersion | sourcegraphVersion | numResultChunks |
|
||||
| --- | ----------- | ------------------ | --------------- |
|
||||
| 0 | 0.4.3 | 0.1.0 | 1 |
|
||||
|
||||
**`documents` table**
|
||||
|
||||
This table is populated with a gzipped JSON payload that represents the ranges as well as each range's definition, reference, and hover result identifiers. The table is indexed on the path of the document relative to the project root.
|
||||
|
||||
| path | data |
|
||||
| ------ | ---------------------------- |
|
||||
| foo.ts | _gzipped_ and _json-encoded_ |
|
||||
| bar.ts | _gzipped_ and _json-encoded_ |
|
||||
|
||||
Each payload has the following form. As the documents are large, we show only the decoded version for `foo.ts`.
|
||||
|
||||
**encoded `foo.ts` payload**
|
||||
|
||||
````json
|
||||
{
|
||||
"ranges": {
|
||||
"9": {
|
||||
"range": "0:0-0:0",
|
||||
"definitionResultId": "49",
|
||||
"referenceResultId": "52",
|
||||
"monikerIds": ["9007199254740990"]
|
||||
},
|
||||
"14": {
|
||||
"range": "0:16-0:19",
|
||||
"definitionResultId": "55",
|
||||
"referenceResultId": "58",
|
||||
"hoverResultId": "16",
|
||||
"monikerIds": ["9007199254740987"]
|
||||
},
|
||||
"21": {
|
||||
"range": "0:20-0:25",
|
||||
"definitionResultId": "61",
|
||||
"referenceResultId": "64",
|
||||
"hoverResultId": "23",
|
||||
"monikerIds": []
|
||||
},
|
||||
"25": {
|
||||
"range": "1:9-1:14",
|
||||
"definitionResultId": "61",
|
||||
"referenceResultId": "64",
|
||||
"hoverResultId": "23",
|
||||
"monikerIds": []
|
||||
},
|
||||
"36": {
|
||||
"range": "1:15-1:24",
|
||||
"definitionResultId": "144",
|
||||
"referenceResultId": "68",
|
||||
"hoverResultId": "34",
|
||||
"monikerIds": ["30"]
|
||||
},
|
||||
"38": {
|
||||
"range": "1:28-1:33",
|
||||
"definitionResultId": "61",
|
||||
"referenceResultId": "64",
|
||||
"hoverResultId": "23",
|
||||
"monikerIds": []
|
||||
},
|
||||
"47": {
|
||||
"range": "1:34-1:40",
|
||||
"definitionResultId": "148",
|
||||
"referenceResultId": "71",
|
||||
"hoverResultId": "45",
|
||||
"monikerIds": []
|
||||
}
|
||||
},
|
||||
"hoverResults": {
|
||||
"16": "```typescript\nfunction foo(value: string): string\n```",
|
||||
"23": "```typescript\n(parameter) value: string\n```",
|
||||
"34": "```typescript\n(method) String.substring(start: number, end?: number): string\n```\n\n---\n\nReturns the substring at the specified location within a String object.",
|
||||
"45": "```typescript\n(property) String.length: number\n```\n\n---\n\nReturns the length of a String object."
|
||||
},
|
||||
"monikers": {
|
||||
"9007199254740987": {
|
||||
"kind": "export",
|
||||
"scheme": "npm",
|
||||
"identifier": "sample:foo:foo",
|
||||
"packageInformationId": "9007199254740991"
|
||||
},
|
||||
"9007199254740990": {
|
||||
"kind": "export",
|
||||
"scheme": "npm",
|
||||
"identifier": "sample:foo:",
|
||||
"packageInformationId": "9007199254740991"
|
||||
}
|
||||
},
|
||||
"packageInformation": {
|
||||
"9007199254740991": {
|
||||
"name": "sample",
|
||||
"version": "0.1.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
````
|
||||
|
||||
The `ranges` field holds a map from range identifier range data including the extents within the source code and optional fields for a definition result, a reference result, and a hover result. Each range also has a possibly empty list of moniker ids. The hover result and moniker identifiers index into the `hoverResults` and `monikers` field of the document. The definition and reference result identifiers index into a result chunk payload, as described below.
|
||||
|
||||
**`resultChunks` table**
|
||||
|
||||
Originally, definition and reference results were stored inline in the document payload. However, this caused document payloads to be come massive in some circumstances (for instance, where the reference result of a frequently used symbol includes multiple ranges in every document of the project). In order to keep each row to a manageable and cacheable size, the definition and reference results were moved into a separate table. The size of each result chunk can then be controlled by varying _how many_ result chunks there are available in a database. It may also be worth noting here that hover result and monikers are best left inlined, as normalizing the former would require another SQL lookup on hover queries, and normalizing the latter would require a SQL lookup per moniker attached to a range; normalizing either does not have a large effect on the size of the document payload.
|
||||
|
||||
This table is populated with gzipped JSON payloads that contains a mapping from definition result or reference result identifiers to the set of ranges that compose that result. A definition or reference result may be referred to by many documents, which is why it is encoded separately. The table is indexed on the common hash of each definition and reference result id inserted in this chunk.
|
||||
|
||||
| id | data |
|
||||
| --- | ---------------------------- |
|
||||
| 0 | _gzipped_ and _json-encoded_ |
|
||||
|
||||
Each payload has the following form.
|
||||
|
||||
**encoded result chunk #0 payload**
|
||||
|
||||
```json
|
||||
{
|
||||
"documentPaths": {
|
||||
"4": "foo.ts",
|
||||
"80": "bar.ts"
|
||||
},
|
||||
"documentIdRangeIds": {
|
||||
"49": [{ "documentId": "4", "rangeId": "9" }],
|
||||
"55": [{ "documentId": "4", "rangeId": "4" }],
|
||||
"61": [{ "documentId": "4", "rangeId": "21" }],
|
||||
"71": [{ "documentId": "4", "rangeId": "47" }],
|
||||
"52": [{ "documentId": "4", "rangeId": "9" }, { "documentId": "80", "rangeId": "95" }],
|
||||
"58": [
|
||||
{ "documentId": "4", "rangeId": "14" },
|
||||
{ "documentId": "80", "rangeId": "91" },
|
||||
{ "documentId": "80", "rangeId": "111" },
|
||||
{ "documentId": "80", "rangeId": "113" }
|
||||
],
|
||||
"64": [
|
||||
{ "documentId": "4", "rangeId": "21" },
|
||||
{ "documentId": "4", "rangeId": "25" },
|
||||
{ "documentId": "4", "rangeId": "38" }
|
||||
],
|
||||
"68": [{ "documentId": "4", "rangeId": "36" }],
|
||||
"117": [{ "documentId": "80", "rangeId": "85" }],
|
||||
"120": [{ "documentId": "80", "rangeId": "85" }],
|
||||
"125": [{ "documentId": "80", "rangeId": "100" }],
|
||||
"128": [{ "documentId": "80", "rangeId": "100" }],
|
||||
"131": [{ "documentId": "80", "rangeId": "107" }],
|
||||
"134": [{ "documentId": "80", "rangeId": "107" }, { "documentId": "80", "rangeId": "115" }]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
The `documentIdRangeIds` field store a list of _pairs_ of document identifiers and range identifiers. To look up a range in this format, the `documentId` must be translated into a document path via the `documentPaths` field. This gives the primary key of the document containing the range in the `documents` table, and the range identifier can be looked up in the decoded payload.
|
||||
|
||||
To retrieve a definition or reference result by its identifier, we must first determine in which result chunk it is defined. This requires that we take the hash of the identifier (modulo the `numResultChunks` field of the `meta` table). This gives us the unique identifier into the `resultChunks` table. In the running example of this document, there is only one result chunk. Larger dumps will have a greater number of result chunks to keep the amount of data encoded in a single database row reasonable.
|
||||
|
||||
**definitions table**
|
||||
|
||||
This table is populated with the monikers of a range and that range's definition result. The table is indexed on the `(scheme, identifier)` pair to allow quick lookup by moniker.
|
||||
|
||||
| id | scheme | identifier | documentPath | range |
|
||||
| --- | ------ | -------------- | ------------ | ------------ |
|
||||
| 1 | npm | sample:foo: | foo.ts | 0:0 to 0:0 |
|
||||
| 2 | npm | sample:foo:foo | foo.ts | 0:16 to 0:19 |
|
||||
| 3 | npm | sample:bar: | bar.ts | 0:0 to 0:0 |
|
||||
| 4 | npm | sample:bar:bar | bar.ts | 2:16 to 2:19 |
|
||||
|
||||
The row with id `2` correlates the `npm` moniker for the `foo` function with the range where it is defined in `foo.ts`. Similarly, the row with id `4` correlates the exported `npm` moniker for the `bar` function with the range where it is defined in `bar.ts`.
|
||||
|
||||
**references table**
|
||||
|
||||
This table is populated with the monikers of a range and that range's reference result. The table is indexed on the `(scheme, identifier)` pair to allow quick lookup by moniker.
|
||||
|
||||
| id | scheme | identifier | documentPath | range |
|
||||
| --- | ------ | -------------- | ------------ | ------------ |
|
||||
| 1 | npm | sample:foo | foo.ts | 0:0 to 0:0 |
|
||||
| 2 | npm | sample:foo | bar.ts | 0:20 to 0:27 |
|
||||
| 3 | npm | sample:bar | bar.ts | 0:0 to 0:0 |
|
||||
| 4 | npm | sample:foo:foo | foo.ts | 0:16 to 0:19 |
|
||||
| 5 | npm | sample:foo:foo | bar.ts | 0:9 to 0:12 |
|
||||
| 6 | npm | sample:foo:foo | bar.ts | 3:9 to 3:12 |
|
||||
| 7 | npm | sample:foo:foo | bar.ts | 3:13 to 3:16 |
|
||||
| 8 | npm | sample:bar:bar | bar.ts | 2:16 to 2:19 |
|
||||
|
||||
The row with ids `4` through `7` correlate the `npm` moniker for the `foo` function with its references: the definition in `foo.ts`, its import in `bar.ts`, and its two uses in `bar.ts`, respectively.
|
||||
28
lsif/docs/xrepo.md
Normal file
28
lsif/docs/xrepo.md
Normal file
@ -0,0 +1,28 @@
|
||||
# Cross-repo data model
|
||||
|
||||
This document outlines the data model used to correlate multiple LSIF dumps. The definition of the cross-repo database tables can be found in `models.xrepo.ts`.
|
||||
|
||||
## Database values
|
||||
|
||||
**`packages` table**
|
||||
|
||||
This table links a package manager-specific identifier and version to the repository and commit that _provides_ the package. The scheme, name, and version values are correlated with a moniker and its package information from an LSIF dump.
|
||||
|
||||
| id | scheme | name | version | repository | commit |
|
||||
| --- | ------ | ------ | ------- | ----------------------------- | ---------------------------------------- |
|
||||
| 1 | npm | sample | 0.1.0 | github.com/sourcegraph/sample | e58d28c98a43f97112299ad6e590e5846b241763 |
|
||||
|
||||
This table enables cross-repository jump-to-definition. When a range has no definition result but does have an _import_ moniker, the scheme, name, and version of the moniker can be queried in this table to get the repository and commit of the package that should contain that moniker's definition.
|
||||
|
||||
**`references` table**
|
||||
|
||||
This table links a repository and commit to the set of packages on which it depends. This table shares common columns with the `packages` table, which are documented above. In addition, this table also has a `filter` column, which encodes a [bloom filter](https://en.wikipedia.org/wiki/Bloom_filter) populated with the set of identifiers that the commit imports from the dependent package.
|
||||
|
||||
| id | scheme | name | version | repository | commit | filter |
|
||||
| --- | ------ | --------- | ------- | ----------------------------- | ---------------------------------------- | ---------------------------- |
|
||||
| 1 | npm | left-pad | 0.1.0 | github.com/sourcegraph/sample | e58d28c98a43f97112299ad6e590e5846b241763 | _gzipped_ and _json-encoded_ |
|
||||
| 2 | npm | right-pad | 1.2.3 | github.com/sourcegraph/sample | e58d28c98a43f97112299ad6e590e5846b241763 | _gzipped_ and _json-encoded_ |
|
||||
| 2 | npm | left-pad | 0.1.0 | github.com/sourcegraph/sample | 9f6e6ec73509159714606ec77e1c55be75235346 | _gzipped_ and _json-encoded_ |
|
||||
| 2 | npm | right-pad | 1.2.4 | github.com/sourcegraph/sample | 9f6e6ec73509159714606ec77e1c55be75235346 | _gzipped_ and _json-encoded_ |
|
||||
|
||||
This table enables global find-references. When finding all references o fa definition that has an _export_ moniker, the set of repositories and commits that depend on the package of that moniker are queried. We want to open only the databases that import this particular symbol (not all projects depending on this package import the identifier under query). To do this, the bloom filter is deserialized and queried for the identifier under query. A positive response from a bloom filter indicates that the identifier may be present in the set; a negative response from the bloom filter indicates that the identifier is _definitely_ not in the set. We only open the set of databases for which the bloom filter query responds positively.
|
||||
@ -1 +0,0 @@
|
||||
.cache/
|
||||
3
lsif/extension/.gitignore
vendored
3
lsif/extension/.gitignore
vendored
@ -1,3 +0,0 @@
|
||||
dist/
|
||||
node_modules/
|
||||
.cache/
|
||||
@ -1,3 +0,0 @@
|
||||
# lsif (Sourcegraph extension)
|
||||
|
||||
This is a Sourcegraph extension which provides code intelligence using the Sourcegraph instance's LSIF API.
|
||||
@ -1,58 +0,0 @@
|
||||
{
|
||||
"$schema": "https://raw.githubusercontent.com/sourcegraph/sourcegraph/master/shared/src/schema/extension.schema.json",
|
||||
"name": "lsif",
|
||||
"description": "LSIF",
|
||||
"publisher": "sourcegraph",
|
||||
"activationEvents": [
|
||||
"*"
|
||||
],
|
||||
"wip": true,
|
||||
"categories": [],
|
||||
"tags": [],
|
||||
"contributes": {
|
||||
"actions": [],
|
||||
"menus": {
|
||||
"editor/title": [],
|
||||
"commandPalette": []
|
||||
},
|
||||
"configuration": {}
|
||||
},
|
||||
"version": "0.0.0-DEVELOPMENT",
|
||||
"license": "MIT",
|
||||
"main": "dist/lsif.js",
|
||||
"scripts": {
|
||||
"tslint": "../../node_modules/.bin/tslint -p tsconfig.json './src/**/*.ts'",
|
||||
"eslint": "../../node_modules/.bin/eslint 'src/**/*.ts?(x)'",
|
||||
"typecheck": "tsc -p tsconfig.json",
|
||||
"build": "parcel build --out-file dist/lsif.js src/lsif.ts",
|
||||
"symlink-package": "mkdirp dist && lnfs ./package.json ./dist/package.json",
|
||||
"serve": "npm run symlink-package && parcel serve --no-hmr --out-file dist/lsif.js src/lsif.ts",
|
||||
"watch:typecheck": "tsc -p tsconfig.json -w",
|
||||
"watch:build": "tsc -p tsconfig.dist.json -w",
|
||||
"sourcegraph:prepublish": "npm run typecheck && npm run build"
|
||||
},
|
||||
"browserslist": [
|
||||
"last 1 Chrome versions",
|
||||
"last 1 Firefox versions",
|
||||
"last 1 Edge versions",
|
||||
"last 1 Safari versions"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@sourcegraph/prettierrc": "^3.0.1",
|
||||
"@sourcegraph/tsconfig": "^4.0.0",
|
||||
"@sourcegraph/tslint-config": "^13.4.0",
|
||||
"lnfs-cli": "^2.1.0",
|
||||
"mkdirp": "^0.5.1",
|
||||
"parcel-bundler": "^1.12.3",
|
||||
"sourcegraph": "^23.0.1",
|
||||
"tslint": "^5.19.0",
|
||||
"typescript": "^3.6.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@sourcegraph/lsp-client": "^2.0.0-beta.2",
|
||||
"vscode-languageserver-types": "^3.14.0"
|
||||
},
|
||||
"resolutions": {
|
||||
"lodash": "4.17.13"
|
||||
}
|
||||
}
|
||||
@ -1,160 +0,0 @@
|
||||
import { convertHover, convertLocations } from '@sourcegraph/lsp-client/dist/lsp-conversion'
|
||||
import * as sourcegraph from 'sourcegraph'
|
||||
import * as LSP from 'vscode-languageserver-types'
|
||||
|
||||
function repositoryFromDoc(doc: sourcegraph.TextDocument): string {
|
||||
const url = new URL(doc.uri)
|
||||
return url.hostname + url.pathname
|
||||
}
|
||||
|
||||
function commitFromDoc(doc: sourcegraph.TextDocument): string {
|
||||
const url = new URL(doc.uri)
|
||||
return url.search.slice(1)
|
||||
}
|
||||
|
||||
function pathFromDoc(doc: sourcegraph.TextDocument): string {
|
||||
const url = new URL(doc.uri)
|
||||
return url.hash.slice(1)
|
||||
}
|
||||
|
||||
function setPath(doc: sourcegraph.TextDocument, path: string): string {
|
||||
const url = new URL(doc.uri)
|
||||
url.hash = path
|
||||
return url.href
|
||||
}
|
||||
|
||||
async function send({
|
||||
doc,
|
||||
method,
|
||||
path,
|
||||
position,
|
||||
}: {
|
||||
doc: sourcegraph.TextDocument
|
||||
method: string
|
||||
path: string
|
||||
position: LSP.Position
|
||||
}): Promise<any> {
|
||||
const url = new URL('.api/lsif/request', sourcegraph.internal.sourcegraphURL)
|
||||
url.searchParams.set('repository', repositoryFromDoc(doc))
|
||||
url.searchParams.set('commit', commitFromDoc(doc))
|
||||
|
||||
const response = await fetch(url.href, {
|
||||
method: 'POST',
|
||||
headers: new Headers({
|
||||
'content-type': 'application/json',
|
||||
'x-requested-with': 'Sourcegraph LSIF extension',
|
||||
}),
|
||||
body: JSON.stringify({
|
||||
method,
|
||||
path,
|
||||
position,
|
||||
}),
|
||||
})
|
||||
if (!response.ok) {
|
||||
throw new Error(`LSIF /request returned ${response.statusText}`)
|
||||
}
|
||||
return await response.json()
|
||||
}
|
||||
|
||||
const lsifDocs = new Map<string, Promise<boolean>>()
|
||||
|
||||
async function hasLSIF(doc: sourcegraph.TextDocument): Promise<boolean> {
|
||||
if (lsifDocs.has(doc.uri)) {
|
||||
return await lsifDocs.get(doc.uri)!
|
||||
}
|
||||
|
||||
const url = new URL('.api/lsif/exists', sourcegraph.internal.sourcegraphURL)
|
||||
url.searchParams.set('repository', repositoryFromDoc(doc))
|
||||
url.searchParams.set('commit', commitFromDoc(doc))
|
||||
url.searchParams.set('file', pathFromDoc(doc))
|
||||
|
||||
const hasLSIFPromise = (async () => {
|
||||
const response = await fetch(url.href, {
|
||||
method: 'POST',
|
||||
headers: new Headers({ 'x-requested-with': 'Sourcegraph LSIF extension' }),
|
||||
})
|
||||
if (!response.ok) {
|
||||
throw new Error(`LSIF /exists returned ${response.statusText}`)
|
||||
}
|
||||
return await response.json()
|
||||
})()
|
||||
|
||||
lsifDocs.set(doc.uri, hasLSIFPromise)
|
||||
|
||||
return await hasLSIFPromise
|
||||
}
|
||||
|
||||
export function activate(ctx: sourcegraph.ExtensionContext): void {
|
||||
ctx.subscriptions.add(
|
||||
sourcegraph.languages.registerHoverProvider(['*'], {
|
||||
provideHover: async (doc, position) => {
|
||||
if (!(await hasLSIF(doc))) {
|
||||
return null
|
||||
}
|
||||
const hover: LSP.Hover | null = await send({
|
||||
doc,
|
||||
method: 'hover',
|
||||
path: pathFromDoc(doc),
|
||||
position,
|
||||
})
|
||||
if (!hover) {
|
||||
return null
|
||||
}
|
||||
return convertHover(sourcegraph, hover)
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
ctx.subscriptions.add(
|
||||
sourcegraph.languages.registerDefinitionProvider(['*'], {
|
||||
provideDefinition: async (doc, position) => {
|
||||
if (!(await hasLSIF(doc))) {
|
||||
return null
|
||||
}
|
||||
const body: LSP.Location | LSP.Location[] | null = await send({
|
||||
doc,
|
||||
method: 'definitions',
|
||||
path: pathFromDoc(doc),
|
||||
position,
|
||||
})
|
||||
if (!body) {
|
||||
return null
|
||||
}
|
||||
const locations = Array.isArray(body) ? body : [body]
|
||||
return convertLocations(
|
||||
sourcegraph,
|
||||
locations.map((definition: LSP.Location) => ({
|
||||
...definition,
|
||||
uri: setPath(doc, definition.uri),
|
||||
}))
|
||||
)
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
ctx.subscriptions.add(
|
||||
sourcegraph.languages.registerReferenceProvider(['*'], {
|
||||
provideReferences: async (doc, position) => {
|
||||
if (!(await hasLSIF(doc))) {
|
||||
return null
|
||||
}
|
||||
const locations: LSP.Location[] | null = await send({
|
||||
doc,
|
||||
method: 'references',
|
||||
path: pathFromDoc(doc),
|
||||
position,
|
||||
})
|
||||
if (!locations) {
|
||||
return null
|
||||
}
|
||||
return convertLocations(
|
||||
sourcegraph,
|
||||
locations.map((reference: LSP.Location) => ({
|
||||
...reference,
|
||||
uri: setPath(doc, reference.uri),
|
||||
}))
|
||||
)
|
||||
},
|
||||
})
|
||||
)
|
||||
}
|
||||
@ -1,16 +0,0 @@
|
||||
{
|
||||
"extends": "@sourcegraph/tsconfig",
|
||||
"compilerOptions": {
|
||||
"target": "es2016",
|
||||
"module": "esnext",
|
||||
"moduleResolution": "node",
|
||||
"lib": ["esnext"],
|
||||
"skipLibCheck": true,
|
||||
"sourceMap": true,
|
||||
"declaration": true,
|
||||
"outDir": "dist",
|
||||
"rootDir": "src",
|
||||
"esModuleInterop": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
},
|
||||
}
|
||||
@ -1,4 +0,0 @@
|
||||
{
|
||||
"extends": ["../../tslint.config.js"],
|
||||
"linterOptions": { "exclude": ["node_modules/**"] }
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
7
lsif/jest.config.js
Normal file
7
lsif/jest.config.js
Normal file
@ -0,0 +1,7 @@
|
||||
// @ts-check
|
||||
|
||||
/** @type {jest.InitialOptions} */
|
||||
const config = require('../jest.config.base')
|
||||
|
||||
/** @type {jest.InitialOptions} */
|
||||
module.exports = { ...config, displayName: 'lsif', rootDir: __dirname }
|
||||
@ -1,29 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
urlencode() {
|
||||
echo "$1" | curl -Gso /dev/null -w %{url_effective} --data-urlencode @- "" | cut -c 3- | sed -e 's/%0A//'
|
||||
}
|
||||
|
||||
file="$1"
|
||||
|
||||
usage() {
|
||||
echo "Sourcegraph LSIF uploader usage:"
|
||||
echo ""
|
||||
echo "env \\"
|
||||
echo " SRC_ENDPOINT=<https://sourcegraph.example.com> \\"
|
||||
echo " SRC_ACCESS_TOKEN=<secret> \\"
|
||||
echo " REPOSITORY=<github.com/you/your-repo> \\"
|
||||
echo " COMMIT=<40-char-hash> \\"
|
||||
echo " bash upload-lsif.sh <file.lsif>"
|
||||
}
|
||||
|
||||
if [[ -z "$SRC_ACCESS_TOKEN" || -z "$REPOSITORY" || -z "$COMMIT" || -z "$file" ]]; then
|
||||
usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
curl \
|
||||
-H "Authorization: token $SRC_ACCESS_TOKEN" \
|
||||
-H "Content-Type: application/x-ndjson+lsif" \
|
||||
"$SRC_ENDPOINT/.api/lsif/upload?repository=$(urlencode "$REPOSITORY")&commit=$(urlencode "$COMMIT")" \
|
||||
--data-binary "@$file"
|
||||
50
lsif/package.json
Normal file
50
lsif/package.json
Normal file
@ -0,0 +1,50 @@
|
||||
{
|
||||
"name": "lsif-server",
|
||||
"description": "LSIF Server with an HTTP API",
|
||||
"author": "Sourcegraph",
|
||||
"license": "MIT",
|
||||
"version": "0.1.0",
|
||||
"dependencies": {
|
||||
"async-middleware": "^1.2.1",
|
||||
"bloomfilter": "^0.0.18",
|
||||
"body-parser": "^1.19.0",
|
||||
"express": "^4.17.1",
|
||||
"lodash": "^4.17.15",
|
||||
"lsif-protocol": "0.4.3",
|
||||
"mz": "^2.7.0",
|
||||
"relateurl": "^0.2.7",
|
||||
"rmfr": "^2.0.0",
|
||||
"sqlite3": "^4.1.0",
|
||||
"typeorm": "^0.2.18",
|
||||
"vscode-languageserver": "^5.2.1",
|
||||
"yallist": "^3.0.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@sourcegraph/tsconfig": "^4.0.0",
|
||||
"@types/bloomfilter": "^0.0.0",
|
||||
"@types/body-parser": "1.17.0",
|
||||
"@types/express": "4.17.0",
|
||||
"@types/jest": "24.0.17",
|
||||
"@types/lodash": "^4.14.138",
|
||||
"@types/mz": "0.0.32",
|
||||
"@types/relateurl": "^0.2.28",
|
||||
"@types/rmfr": "^2.0.0",
|
||||
"@types/sinon": "^7.0.13",
|
||||
"@types/yallist": "^3.0.1",
|
||||
"babel-jest": "^24.8.0",
|
||||
"jest": "^24.8.0",
|
||||
"sinon": "^7.4.1",
|
||||
"source-map-support": "^0.5.13",
|
||||
"tsc-watch": "^2.2.1",
|
||||
"tslint": "^5.18.0",
|
||||
"typescript": "^3.5.3"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"test": "jest --testPathIgnorePatterns e2e",
|
||||
"typecheck": "tsc --noEmit",
|
||||
"tslint": "../node_modules/.bin/tslint -p tsconfig.json",
|
||||
"eslint": "../node_modules/.bin/eslint 'src/**/*.ts?(x)'",
|
||||
"serve": "tsc-watch --onSuccess \"node --inspect -r source-map-support/register out/server.js\" --noClear"
|
||||
}
|
||||
}
|
||||
@ -1,7 +0,0 @@
|
||||
out/
|
||||
.cache/
|
||||
|
||||
# copied from lsif-node
|
||||
src/database.ts
|
||||
src/files.ts
|
||||
src/json.ts
|
||||
3
lsif/server/.gitignore
vendored
3
lsif/server/.gitignore
vendored
@ -1,3 +0,0 @@
|
||||
.cache/
|
||||
out/
|
||||
node_modules/
|
||||
@ -1,18 +0,0 @@
|
||||
FROM alpine:3.9@sha256:644fcb1a676b5165371437feaa922943aaf7afcfa8bfee4472f6860aad1ef2a0
|
||||
|
||||
ARG COMMIT_SHA="unknown"
|
||||
ARG DATE="unknown"
|
||||
ARG VERSION="unknown"
|
||||
|
||||
LABEL org.opencontainers.image.revision=${COMMIT_SHA}
|
||||
LABEL org.opencontainers.image.created=${DATE}
|
||||
LABEL org.opencontainers.image.version=${VERSION}
|
||||
LABEL com.sourcegraph.github.url=https://github.com/sourcegraph/sourcegraph/commit/${COMMIT_SHA}
|
||||
|
||||
# hadolint ignore=DL3018
|
||||
RUN apk add --no-cache nodejs=10.14.2-r0 tini
|
||||
|
||||
COPY http-server.bundle.js /lsif-server.js
|
||||
|
||||
EXPOSE 3186
|
||||
ENTRYPOINT ["/sbin/tini", "--", "node", "/lsif-server.js"]
|
||||
@ -1,19 +0,0 @@
|
||||
This license refers to the files in the src/ directory that have Microsoft attributions of authorship.
|
||||
|
||||
The MIT License (MIT)
|
||||
Copyright (c) Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
|
||||
associated documentation files (the "Software"), to deal in the Software without restriction,
|
||||
including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial
|
||||
portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
|
||||
NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
@ -1,48 +0,0 @@
|
||||
# LSIF HTTP server
|
||||
|
||||
This is an HTTP server on top of https://github.com/Microsoft/vscode-lsif-extension. Since there's currently no npm release of vscode-lsif-extension, the relevant files have been copied here with only trivial modifications to make it pass type checking and linting in this repository:
|
||||
|
||||
- [src/database.ts](src/database.ts)
|
||||
- [src/json.ts](src/json.ts)
|
||||
- [src/files.ts](src/files.ts)
|
||||
|
||||
The only new file is [src/http-server.ts](src/http-server.ts), which is a Node.js Express HTTP server with the following API:
|
||||
|
||||
## API
|
||||
|
||||
### `/upload`
|
||||
|
||||
Receives a file upload, and stores it on disk. Files that are too big are rejected. If the max disk usage has been reached, old files (based on upload time) get deleted to free up space.
|
||||
|
||||
URL query parameters:
|
||||
|
||||
- `repository`: the name of the repository (e.g. `github.com/sourcegraph/codeintellify`)
|
||||
- `commit`: the 40 character hash of the commit
|
||||
|
||||
The request body must be HTML form data with a single file (e.g. `curl -F "data=@file.lsif" ...`).
|
||||
|
||||
### `/request`
|
||||
|
||||
Performs a `hover`, `definitions`, or `references` request on the LSIF file for the given repository@commit and returns the result. Fails if there is no LSIF data for the given repository@commit. Internally, it maintains an LRU cache of open `Database`s for speed and evicts old ones to avoid running out of memory.
|
||||
|
||||
The request body must be a JSON object with these properties:
|
||||
|
||||
- `repository`: the name of the repository (e.g. `github.com/sourcegraph/codeintellify`)
|
||||
- `commit`: the 40 character hash of the commit
|
||||
- `method`: `hover`, `definitions`, or `references`
|
||||
- `path`: the file path in the repository. This deviates from `uri` in the LSIF specification because Sourcegraph currently only supports same-repository code intelligence.
|
||||
- `position`: the `{ line, character }` in the file at which the request is being made
|
||||
|
||||
## LSIF file storage details
|
||||
|
||||
LSIF files are stored on disk with the following naming convention:
|
||||
|
||||
```
|
||||
$URL_ENCODED_REPOSITORY@$40_CHAR_HASH.lsif
|
||||
```
|
||||
|
||||
For example, for `github.com/sourcegraph/codeintellify` at commit `c21c0da7b2a6cacafcbf90c85a81bf432020ad9b`:
|
||||
|
||||
```
|
||||
github.com%2Fsourcegraph%2Fcodeintellify@c21c0da7b2a6cacafcbf90c85a81bf432020ad9b.lsif
|
||||
```
|
||||
@ -1,12 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")/../.."
|
||||
set -ex
|
||||
|
||||
yarn --cwd lsif/server
|
||||
yarn --cwd lsif/server run build
|
||||
|
||||
docker build -f lsif/server/Dockerfile -t "$IMAGE" lsif/server/out \
|
||||
--build-arg COMMIT_SHA \
|
||||
--build-arg DATE \
|
||||
--build-arg VERSION
|
||||
@ -1,43 +0,0 @@
|
||||
{
|
||||
"name": "lsif-server",
|
||||
"description": "LSIF Server with an HTTP API",
|
||||
"author": "Microsoft Corporation and Sourcegraph",
|
||||
"license": "MIT",
|
||||
"version": "0.1.0",
|
||||
"engines": {
|
||||
"node": ">=10.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"async-middleware": "^1.2.1",
|
||||
"body-parser": "^1.19.0",
|
||||
"express": "^4.17.1",
|
||||
"express-async-handler": "^1.1.4",
|
||||
"lru-cache": "^5.1.1",
|
||||
"lsif-protocol": "0.4.0",
|
||||
"move-file": "^1.2.0",
|
||||
"mz": "^2.7.0",
|
||||
"semver": "^6.0.0",
|
||||
"tmp-promise": "^2.0.2",
|
||||
"vscode-languageserver": "^5.2.1",
|
||||
"vscode-uri": "^2.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/body-parser": "1.17.1",
|
||||
"@types/express": "4.17.1",
|
||||
"@types/fs-extra": "8.0.0",
|
||||
"@types/lru-cache": "5.1.0",
|
||||
"@types/mz": "0.0.32",
|
||||
"@types/semver": "6.0.1",
|
||||
"parcel-bundler": "^1.12.3",
|
||||
"tsc-watch": "^2.2.1",
|
||||
"tslint": "^5.19.0",
|
||||
"typescript": "^3.6.3"
|
||||
},
|
||||
"scripts": {
|
||||
"typecheck": "tsc -p .",
|
||||
"serve": "tsc-watch --onSuccess \"node out/http-server.js\" --noClear",
|
||||
"build": "parcel build --out-dir out --out-file http-server.bundle.js --target node --bundle-node-modules src/http-server.ts",
|
||||
"tslint": "../../node_modules/.bin/tslint -p tsconfig.json",
|
||||
"eslint": "../../node_modules/.bin/eslint 'src/**/*.ts?(x)'"
|
||||
}
|
||||
}
|
||||
@ -1,122 +0,0 @@
|
||||
/* --------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
* ------------------------------------------------------------------------------------------ */
|
||||
import { URI } from 'vscode-uri'
|
||||
import * as lsp from 'vscode-languageserver'
|
||||
import { Range, Id } from 'lsif-protocol'
|
||||
|
||||
import { FileType, FileSystem, DocumentInfo, FileStat } from './files'
|
||||
|
||||
export interface UriTransformer {
|
||||
toDatabase(uri: string): string
|
||||
fromDatabase(uri: string): string
|
||||
}
|
||||
|
||||
export const noopTransformer: UriTransformer = {
|
||||
toDatabase: uri => uri,
|
||||
fromDatabase: uri => uri,
|
||||
}
|
||||
|
||||
export abstract class Database {
|
||||
private fileSystem!: FileSystem
|
||||
private uriTransformer!: UriTransformer
|
||||
|
||||
protected constructor() {}
|
||||
|
||||
protected initialize(transformerFactory: (projectRoot: string) => UriTransformer): void {
|
||||
const projectRoot = this.getProjectRoot().toString(true)
|
||||
this.uriTransformer = transformerFactory ? transformerFactory(projectRoot) : noopTransformer
|
||||
this.fileSystem = new FileSystem(projectRoot, this.getDocumentInfos())
|
||||
}
|
||||
|
||||
public abstract load(file: string, transformerFactory: (projectRoot: string) => UriTransformer): Promise<void>
|
||||
|
||||
public abstract close(): void
|
||||
|
||||
public abstract getProjectRoot(): URI
|
||||
|
||||
protected abstract getDocumentInfos(): DocumentInfo[]
|
||||
|
||||
public stat(uri: string): FileStat | null {
|
||||
let transformed = this.uriTransformer.toDatabase(uri)
|
||||
let result = this.fileSystem.stat(transformed)
|
||||
if (result !== null) {
|
||||
return result
|
||||
}
|
||||
let id = this.findFile(transformed)
|
||||
if (id === undefined) {
|
||||
return null
|
||||
}
|
||||
return FileStat.createFile()
|
||||
}
|
||||
|
||||
public readDirectory(uri: string): [string, FileType][] {
|
||||
return this.fileSystem.readDirectory(this.uriTransformer.toDatabase(uri))
|
||||
}
|
||||
|
||||
public readFileContent(uri: string): string | null {
|
||||
let transformed = this.uriTransformer.toDatabase(uri)
|
||||
let id = this.fileSystem.getFileId(transformed)
|
||||
if (id === undefined) {
|
||||
id = this.findFile(transformed)
|
||||
}
|
||||
if (id === undefined) {
|
||||
return null
|
||||
}
|
||||
let result = this.fileContent(id)
|
||||
if (result === undefined) {
|
||||
return null
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
protected abstract findFile(uri: string): Id | undefined
|
||||
|
||||
protected abstract fileContent(id: Id): string | undefined
|
||||
|
||||
public abstract foldingRanges(uri: string): lsp.FoldingRange[] | undefined
|
||||
|
||||
public abstract documentSymbols(uri: string): lsp.DocumentSymbol[] | undefined
|
||||
|
||||
public abstract hover(uri: string, position: lsp.Position): lsp.Hover | undefined
|
||||
|
||||
public abstract declarations(uri: string, position: lsp.Position): lsp.Location | lsp.Location[] | undefined
|
||||
|
||||
public abstract definitions(uri: string, position: lsp.Position): lsp.Location | lsp.Location[] | undefined
|
||||
|
||||
public abstract references(
|
||||
uri: string,
|
||||
position: lsp.Position,
|
||||
context: lsp.ReferenceContext
|
||||
): lsp.Location[] | undefined
|
||||
|
||||
protected asDocumentSymbol(range: Range): lsp.DocumentSymbol | undefined {
|
||||
let tag = range.tag
|
||||
if (tag === undefined || !(tag.type === 'declaration' || tag.type === 'definition')) {
|
||||
return undefined
|
||||
}
|
||||
return lsp.DocumentSymbol.create(tag.text, tag.detail || '', tag.kind, tag.fullRange, this.asRange(range))
|
||||
}
|
||||
|
||||
protected asRange(value: Range): lsp.Range {
|
||||
return {
|
||||
start: {
|
||||
line: value.start.line,
|
||||
character: value.start.character,
|
||||
},
|
||||
end: {
|
||||
line: value.end.line,
|
||||
character: value.end.character,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
protected toDatabase(uri: string): string {
|
||||
return this.uriTransformer.toDatabase(uri)
|
||||
}
|
||||
|
||||
protected fromDatabase(uri: string): string {
|
||||
return this.uriTransformer.fromDatabase(uri)
|
||||
}
|
||||
}
|
||||
@ -1,159 +0,0 @@
|
||||
/* --------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
* ------------------------------------------------------------------------------------------ */
|
||||
import * as path from 'path'
|
||||
|
||||
import { Id } from 'lsif-protocol'
|
||||
|
||||
const ctime = Date.now()
|
||||
const mtime = Date.now()
|
||||
|
||||
export namespace FileType {
|
||||
export const Unknown: 0 = 0
|
||||
export const File: 1 = 1
|
||||
export const Directory: 2 = 2
|
||||
export const SymbolicLink: 64 = 64
|
||||
}
|
||||
|
||||
export type FileType = 0 | 1 | 2 | 64
|
||||
|
||||
export interface FileStat {
|
||||
type: FileType
|
||||
ctime: number
|
||||
mtime: number
|
||||
size: number
|
||||
}
|
||||
|
||||
export namespace FileStat {
|
||||
export function createFile(): FileStat {
|
||||
return { type: FileType.File, ctime: ctime, mtime: mtime, size: 0 }
|
||||
}
|
||||
}
|
||||
|
||||
export interface DocumentInfo {
|
||||
id: Id
|
||||
uri: string
|
||||
}
|
||||
|
||||
interface File extends FileStat {
|
||||
type: 1
|
||||
name: string
|
||||
id: Id
|
||||
}
|
||||
|
||||
namespace File {
|
||||
export function create(name: string, id: Id): File {
|
||||
return { type: FileType.File, ctime: ctime, mtime: mtime, size: 0, name, id }
|
||||
}
|
||||
}
|
||||
|
||||
interface Directory extends FileStat {
|
||||
type: 2
|
||||
name: string
|
||||
children: Map<string, Entry>
|
||||
}
|
||||
|
||||
namespace Directory {
|
||||
export function create(name: string): Directory {
|
||||
return { type: FileType.Directory, ctime: Date.now(), mtime: Date.now(), size: 0, name, children: new Map() }
|
||||
}
|
||||
}
|
||||
|
||||
export type Entry = File | Directory
|
||||
|
||||
export class FileSystem {
|
||||
private projectRoot: string
|
||||
private projectRootWithSlash: string
|
||||
private outside: Map<string, Id>
|
||||
private root: Directory
|
||||
|
||||
constructor(projectRoot: string, documents: DocumentInfo[]) {
|
||||
if (projectRoot.charAt(projectRoot.length - 1) === '/') {
|
||||
this.projectRoot = projectRoot.substr(0, projectRoot.length - 1)
|
||||
this.projectRootWithSlash = projectRoot
|
||||
} else {
|
||||
this.projectRoot = projectRoot
|
||||
this.projectRootWithSlash = projectRoot + '/'
|
||||
}
|
||||
this.root = Directory.create('')
|
||||
this.outside = new Map()
|
||||
for (let info of documents) {
|
||||
// Do not show file outside the projectRoot.
|
||||
if (!info.uri.startsWith(this.projectRootWithSlash)) {
|
||||
this.outside.set(info.uri, info.id)
|
||||
continue
|
||||
}
|
||||
let p = info.uri.substring(projectRoot.length)
|
||||
let dirname = path.posix.dirname(p)
|
||||
let basename = path.posix.basename(p)
|
||||
let entry = this.lookup(dirname, true)
|
||||
if (entry && entry.type === FileType.Directory) {
|
||||
entry.children.set(basename, File.create(basename, info.id))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public stat(uri: string): FileStat | null {
|
||||
let isRoot = this.projectRoot === uri
|
||||
if (!uri.startsWith(this.projectRootWithSlash) && !isRoot) {
|
||||
return null
|
||||
}
|
||||
let p = isRoot ? '' : uri.substring(this.projectRootWithSlash.length)
|
||||
let entry = this.lookup(p, false)
|
||||
return entry ? entry : null
|
||||
}
|
||||
|
||||
public readDirectory(uri: string): [string, FileType][] {
|
||||
let isRoot = this.projectRoot === uri
|
||||
if (!uri.startsWith(this.projectRootWithSlash) && !isRoot) {
|
||||
return []
|
||||
}
|
||||
let p = isRoot ? '' : uri.substring(this.projectRootWithSlash.length)
|
||||
let entry = this.lookup(p, false)
|
||||
if (entry === undefined || entry.type !== FileType.Directory) {
|
||||
return []
|
||||
}
|
||||
let result: [string, FileType][] = []
|
||||
for (let child of entry.children.values()) {
|
||||
result.push([child.name, child.type])
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
public getFileId(uri: string): Id | undefined {
|
||||
let isRoot = this.projectRoot === uri
|
||||
let result = this.outside.get(uri)
|
||||
if (result !== undefined) {
|
||||
return result
|
||||
}
|
||||
if (!uri.startsWith(this.projectRootWithSlash) && !isRoot) {
|
||||
return undefined
|
||||
}
|
||||
let entry = this.lookup(isRoot ? '' : uri.substring(this.projectRootWithSlash.length))
|
||||
return entry && entry.type === FileType.File ? entry.id : undefined
|
||||
}
|
||||
|
||||
private lookup(uri: string, create: boolean = false): Entry | undefined {
|
||||
let parts = uri.split('/')
|
||||
let entry: Entry = this.root
|
||||
for (const part of parts) {
|
||||
if (!part || part === '.') {
|
||||
continue
|
||||
}
|
||||
let child: Entry | undefined
|
||||
if (entry.type === FileType.Directory) {
|
||||
child = entry.children.get(part)
|
||||
if (child === undefined && create) {
|
||||
child = Directory.create(part)
|
||||
entry.children.set(part, child)
|
||||
}
|
||||
}
|
||||
if (!child) {
|
||||
return undefined
|
||||
}
|
||||
entry = child
|
||||
}
|
||||
return entry
|
||||
}
|
||||
}
|
||||
@ -1,399 +0,0 @@
|
||||
import { wrap } from 'async-middleware'
|
||||
import bodyParser from 'body-parser'
|
||||
import express from 'express'
|
||||
import LRU from 'lru-cache'
|
||||
import moveFile from 'move-file'
|
||||
import { fs } from 'mz'
|
||||
import * as path from 'path'
|
||||
import { withFile } from 'tmp-promise'
|
||||
import { Database, noopTransformer } from './database'
|
||||
import { JsonDatabase } from './json'
|
||||
|
||||
/**
|
||||
* Reads an integer from an environment variable or defaults to the given value.
|
||||
*/
|
||||
function readEnvInt({ key, defaultValue }: { key: string; defaultValue: number }): number {
|
||||
const value = process.env[key]
|
||||
if (!value) {
|
||||
return defaultValue
|
||||
}
|
||||
const n = parseInt(value, 10)
|
||||
if (isNaN(n)) {
|
||||
return defaultValue
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
/**
|
||||
* Where on the file system to store LSIF files.
|
||||
*/
|
||||
const STORAGE_ROOT = process.env.LSIF_STORAGE_ROOT || 'lsif-storage'
|
||||
|
||||
/**
|
||||
* Soft limit on the amount of storage used by LSIF files. Storage can exceed
|
||||
* this limit if a single LSIF file is larger than this, otherwise storage will
|
||||
* be kept under this limit. Defaults to 100GB.
|
||||
*/
|
||||
const SOFT_MAX_STORAGE = readEnvInt({ key: 'LSIF_SOFT_MAX_STORAGE', defaultValue: 100 * 1024 * 1024 * 1024 })
|
||||
|
||||
/**
|
||||
* Limit on the file size accepted by the /upload endpoint. Defaults to 100MB.
|
||||
*/
|
||||
const MAX_FILE_SIZE = readEnvInt({ key: 'LSIF_MAX_FILE_SIZE', defaultValue: 100 * 1024 * 1024 })
|
||||
|
||||
/**
|
||||
* Soft limit on the total amount of storage occupied by LSIF data loaded in
|
||||
* memory. The actual amount can exceed this if a single LSIF file is larger
|
||||
* than this limit, otherwise memory will be kept under this limit. Defaults to
|
||||
* 100MB.
|
||||
*
|
||||
* Empirically based on github.com/sourcegraph/codeintellify, each byte of
|
||||
* storage (uncompressed newline-delimited JSON) expands to 3 bytes in memory.
|
||||
*/
|
||||
const SOFT_MAX_STORAGE_IN_MEMORY = readEnvInt({
|
||||
key: 'LSIF_SOFT_MAX_STORAGE_IN_MEMORY',
|
||||
defaultValue: 100 * 1024 * 1024,
|
||||
})
|
||||
|
||||
/**
|
||||
* Which port to run the LSIF server on. Defaults to 3186.
|
||||
*/
|
||||
const PORT = readEnvInt({ key: 'LSIF_HTTP_PORT', defaultValue: 3186 })
|
||||
|
||||
/**
|
||||
* An opaque repository ID.
|
||||
*/
|
||||
interface Repository {
|
||||
repository: string
|
||||
}
|
||||
|
||||
/**
|
||||
* A 40-character commit hash.
|
||||
*/
|
||||
interface Commit {
|
||||
commit: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Combines `Repository` and `Commit`.
|
||||
*/
|
||||
interface RepositoryCommit extends Repository, Commit {}
|
||||
|
||||
/**
|
||||
* Deletes old files (sorted by last modified time) to keep the disk usage below
|
||||
* the given `max`.
|
||||
*/
|
||||
async function enforceMaxDiskUsage({
|
||||
flatDirectory,
|
||||
max,
|
||||
onBeforeDelete,
|
||||
}: {
|
||||
flatDirectory: string
|
||||
max: number
|
||||
onBeforeDelete: (filePath: string) => void
|
||||
}): Promise<void> {
|
||||
if (!(await fs.exists(flatDirectory))) {
|
||||
return
|
||||
}
|
||||
const files = await Promise.all(
|
||||
(await fs.readdir(flatDirectory)).map(async f => ({
|
||||
path: path.join(flatDirectory, f),
|
||||
stat: await fs.stat(path.join(flatDirectory, f)),
|
||||
}))
|
||||
)
|
||||
let totalSize = files.reduce((subtotal, f) => subtotal + f.stat.size, 0)
|
||||
for (const f of files.sort((a, b) => a.stat.atimeMs - b.stat.atimeMs)) {
|
||||
if (totalSize <= max) {
|
||||
break
|
||||
}
|
||||
onBeforeDelete(f.path)
|
||||
await fs.unlink(f.path)
|
||||
totalSize = totalSize - f.stat.size
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes the filename that contains LSIF data for the given repository@commit.
|
||||
*/
|
||||
function diskKey({ repository, commit }: RepositoryCommit): string {
|
||||
const urlEncodedRepository = encodeURIComponent(repository)
|
||||
return path.join(STORAGE_ROOT, `${urlEncodedRepository}@${commit}.lsif`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads LSIF data from disk and returns a promise to the resulting `Database`.
|
||||
* Throws ENOENT when there is no LSIF data for the given repository@commit.
|
||||
*/
|
||||
async function createDB(repositoryCommit: RepositoryCommit): Promise<Database> {
|
||||
const db = new JsonDatabase()
|
||||
await db.load(diskKey(repositoryCommit), projectRootURI => ({
|
||||
toDatabase: pathRelativeToProjectRoot => projectRootURI + '/' + pathRelativeToProjectRoot,
|
||||
fromDatabase: uri => (uri.startsWith(projectRootURI) ? uri.slice(`${projectRootURI}/`.length) : uri),
|
||||
}))
|
||||
return db
|
||||
}
|
||||
|
||||
/**
|
||||
* List of supported `Database` methods.
|
||||
*/
|
||||
type SupportedMethods = 'hover' | 'definitions' | 'references'
|
||||
|
||||
const SUPPORTED_METHODS: Set<SupportedMethods> = new Set(['hover', 'definitions', 'references'])
|
||||
|
||||
/**
|
||||
* Type guard for SupportedMethods.
|
||||
*/
|
||||
function isSupportedMethod(method: string): method is SupportedMethods {
|
||||
return (SUPPORTED_METHODS as Set<string>).has(method)
|
||||
}
|
||||
|
||||
/**
|
||||
* Throws an error with status 400 if the repository is invalid.
|
||||
*/
|
||||
function checkRepository(repository: any): void {
|
||||
if (typeof repository !== 'string') {
|
||||
throw Object.assign(new Error('Must specify the repository (usually of the form github.com/user/repo)'), {
|
||||
status: 400,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Throws an error with status 400 if the commit is invalid.
|
||||
*/
|
||||
function checkCommit(commit: any): void {
|
||||
if (typeof commit !== 'string' || commit.length !== 40 || !/^[0-9a-f]+$/.test(commit)) {
|
||||
throw Object.assign(new Error('Must specify the commit as a 40 character hash ' + commit), { status: 400 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A `Database`, the size of the LSIF file it was loaded from, and a callback to
|
||||
* dispose of it when evicted from the cache.
|
||||
*/
|
||||
interface LRUDBEntry {
|
||||
dbPromise: Promise<Database>
|
||||
/**
|
||||
* The size of the underlying LSIF file. This directly contributes to the
|
||||
* size of the cache. Ideally, this would be set to the amount of memory
|
||||
* that the `Database` uses, but calculating the memory usage is difficult
|
||||
* so this uses the file size as a rough heuristic.
|
||||
*/
|
||||
length: number
|
||||
dispose: () => void
|
||||
}
|
||||
|
||||
/**
|
||||
* An LRU cache mapping `repository@commit`s to in-memory `Database`s. Old
|
||||
* `Database`s are evicted from the cache to prevent OOM errors.
|
||||
*/
|
||||
const dbLRU = new LRU<string, LRUDBEntry>({
|
||||
max: SOFT_MAX_STORAGE_IN_MEMORY,
|
||||
// `length` contributes to the total size of the cache, with a `max` specified
|
||||
// above, after which old items get evicted.
|
||||
length: (entry, key) => entry.length,
|
||||
dispose: (key, entry) => entry.dispose(),
|
||||
})
|
||||
|
||||
/**
|
||||
* Runs the given `action` with the `Database` associated with the given
|
||||
* repository@commit. Internally, it either gets the `Database` from the LRU
|
||||
* cache or loads it from storage.
|
||||
*/
|
||||
async function withDB(
|
||||
repositoryCommit: RepositoryCommit,
|
||||
action: (db: Database) => Promise<void> | void
|
||||
): Promise<void> {
|
||||
const entry = dbLRU.get(diskKey(repositoryCommit))
|
||||
if (entry) {
|
||||
await action(await entry.dbPromise)
|
||||
} else {
|
||||
const length = (await fs.stat(diskKey(repositoryCommit))).size
|
||||
const dbPromise = createDB(repositoryCommit)
|
||||
dbLRU.set(diskKey(repositoryCommit), {
|
||||
dbPromise,
|
||||
length,
|
||||
dispose: () => dbPromise.then(db => db.close()),
|
||||
})
|
||||
await action(await dbPromise)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs the HTTP server which accepts LSIF file uploads and responds to
|
||||
* hover/defs/refs requests.
|
||||
*/
|
||||
function main(): void {
|
||||
const app = express()
|
||||
|
||||
app.use((err: any, req: express.Request, res: express.Response, next: express.NextFunction) => {
|
||||
if (err && err.status) {
|
||||
res.status(err.status).send({ message: err.message })
|
||||
return
|
||||
}
|
||||
res.status(500).send({ message: 'Unknown error' })
|
||||
console.error(err)
|
||||
})
|
||||
|
||||
app.get('/ping', (req, res) => {
|
||||
res.send({ pong: 'pong' })
|
||||
})
|
||||
|
||||
app.post(
|
||||
'/request',
|
||||
bodyParser.json({ limit: '1mb' }),
|
||||
wrap(async (req, res) => {
|
||||
const { repository, commit } = req.query
|
||||
const { path, position, method } = req.body
|
||||
|
||||
checkRepository(repository)
|
||||
checkCommit(commit)
|
||||
if (!isSupportedMethod(method)) {
|
||||
throw Object.assign(new Error('Method must be one of ' + SUPPORTED_METHODS), { status: 422 })
|
||||
}
|
||||
|
||||
try {
|
||||
await withDB({ repository, commit }, db => {
|
||||
let result: any
|
||||
switch (method) {
|
||||
case 'hover':
|
||||
result = db.hover(path, position)
|
||||
break
|
||||
case 'definitions':
|
||||
result = db.definitions(path, position)
|
||||
break
|
||||
case 'references':
|
||||
result = db.references(path, position, { includeDeclaration: false })
|
||||
break
|
||||
default:
|
||||
throw new Error(`Unknown method ${method}`)
|
||||
}
|
||||
res.json(result || null)
|
||||
})
|
||||
} catch (e) {
|
||||
if ('code' in e && e.code === 'ENOENT') {
|
||||
throw Object.assign(new Error(`No LSIF data available for ${repository}@${commit}.`), {
|
||||
status: 404,
|
||||
})
|
||||
}
|
||||
throw e
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
app.post(
|
||||
'/exists',
|
||||
wrap(async (req, res) => {
|
||||
const { repository, commit, file } = req.query
|
||||
|
||||
checkRepository(repository)
|
||||
checkCommit(commit)
|
||||
|
||||
if (!(await fs.exists(diskKey({ repository, commit })))) {
|
||||
res.send(false)
|
||||
return
|
||||
}
|
||||
|
||||
if (!file) {
|
||||
res.send(await fs.exists(diskKey({ repository, commit })))
|
||||
return
|
||||
}
|
||||
|
||||
if (typeof file !== 'string') {
|
||||
throw Object.assign(new Error('File must be a string'), { status: 400 })
|
||||
}
|
||||
|
||||
try {
|
||||
res.send(Boolean((await createDB({ repository, commit })).stat(file)))
|
||||
} catch (e) {
|
||||
if ('code' in e && e.code === 'ENOENT') {
|
||||
res.send(false)
|
||||
return
|
||||
}
|
||||
throw e
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
app.post(
|
||||
'/upload',
|
||||
wrap(async (req, res) => {
|
||||
const { repository, commit } = req.query
|
||||
|
||||
checkRepository(repository)
|
||||
checkCommit(commit)
|
||||
|
||||
if (req.header('Content-Length') && parseInt(req.header('Content-Length') || '', 10) > MAX_FILE_SIZE) {
|
||||
throw Object.assign(
|
||||
new Error(
|
||||
`The size of the given LSIF file (${req.header(
|
||||
'Content-Length'
|
||||
)} bytes) exceeds the max of ${MAX_FILE_SIZE}`
|
||||
),
|
||||
{ status: 413 }
|
||||
)
|
||||
}
|
||||
|
||||
let contentLength = 0
|
||||
|
||||
await withFile(async tempFile => {
|
||||
// Pipe the given LSIF data to a temp file.
|
||||
await new Promise((resolve, reject) => {
|
||||
const tempFileWriteStream = fs.createWriteStream(tempFile.path)
|
||||
req.on('data', chunk => {
|
||||
contentLength += chunk.length
|
||||
if (contentLength > MAX_FILE_SIZE) {
|
||||
tempFileWriteStream.destroy()
|
||||
reject(
|
||||
Object.assign(
|
||||
new Error(
|
||||
`The size of the given LSIF file (${contentLength} bytes so far) exceeds the max of ${MAX_FILE_SIZE}`
|
||||
),
|
||||
{ status: 413 }
|
||||
)
|
||||
)
|
||||
}
|
||||
}).pipe(tempFileWriteStream)
|
||||
|
||||
tempFileWriteStream.on('close', resolve)
|
||||
tempFileWriteStream.on('error', reject)
|
||||
})
|
||||
|
||||
// Load a `Database` from the file to check that it's valid.
|
||||
await new JsonDatabase().load(tempFile.path, () => noopTransformer)
|
||||
|
||||
// Replace the old LSIF file with the new file.
|
||||
try {
|
||||
await fs.mkdir(STORAGE_ROOT)
|
||||
} catch (e) {
|
||||
if (e.code !== 'EEXIST') {
|
||||
throw e
|
||||
}
|
||||
}
|
||||
await moveFile(tempFile.path, diskKey({ repository, commit }))
|
||||
|
||||
// Evict the old `Database` from the LRU cache to cause it to pick up the new LSIF data from disk.
|
||||
dbLRU.del(diskKey({ repository, commit }))
|
||||
|
||||
res.send('Upload successful.')
|
||||
})
|
||||
|
||||
// TODO enforce max disk usage per-repository. Currently, a
|
||||
// misbehaving client could upload a bunch of LSIF files for one
|
||||
// repository and take up all of the disk space, causing all other
|
||||
// LSIF files to get deleted to make room for the new files.
|
||||
await enforceMaxDiskUsage({
|
||||
flatDirectory: STORAGE_ROOT,
|
||||
max: Math.max(0, SOFT_MAX_STORAGE - contentLength),
|
||||
onBeforeDelete: filePath =>
|
||||
console.log(`Deleting ${filePath} to help keep disk usage under ${SOFT_MAX_STORAGE}.`),
|
||||
})
|
||||
})
|
||||
)
|
||||
|
||||
app.listen(PORT, () => {
|
||||
console.log(`Listening for HTTP requests on port ${PORT}`)
|
||||
})
|
||||
}
|
||||
|
||||
main()
|
||||
@ -1,588 +0,0 @@
|
||||
/* --------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
* ------------------------------------------------------------------------------------------ */
|
||||
import * as fs from 'fs'
|
||||
import * as readline from 'readline'
|
||||
|
||||
import { URI } from 'vscode-uri'
|
||||
import * as SemVer from 'semver'
|
||||
|
||||
import * as lsp from 'vscode-languageserver'
|
||||
import {
|
||||
Id,
|
||||
Vertex,
|
||||
Project,
|
||||
Document,
|
||||
Range,
|
||||
DiagnosticResult,
|
||||
DocumentSymbolResult,
|
||||
FoldingRangeResult,
|
||||
DocumentLinkResult,
|
||||
DefinitionResult,
|
||||
TypeDefinitionResult,
|
||||
HoverResult,
|
||||
ReferenceResult,
|
||||
ImplementationResult,
|
||||
Edge,
|
||||
RangeBasedDocumentSymbol,
|
||||
DeclarationResult,
|
||||
ResultSet,
|
||||
ElementTypes,
|
||||
VertexLabels,
|
||||
EdgeLabels,
|
||||
ItemEdgeProperties,
|
||||
} from 'lsif-protocol'
|
||||
|
||||
import { DocumentInfo } from './files'
|
||||
import { Database, UriTransformer } from './database'
|
||||
|
||||
interface Vertices {
|
||||
all: Map<Id, Vertex>
|
||||
projects: Map<Id, Project>
|
||||
documents: Map<Id, Document>
|
||||
ranges: Map<Id, Range>
|
||||
}
|
||||
|
||||
type ItemTarget =
|
||||
| Range
|
||||
| { type: ItemEdgeProperties.declarations; range: Range }
|
||||
| { type: ItemEdgeProperties.definitions; range: Range }
|
||||
| { type: ItemEdgeProperties.references; range: Range }
|
||||
| { type: ItemEdgeProperties.referenceResults; result: ReferenceResult }
|
||||
|
||||
interface Out {
|
||||
contains: Map<Id, Document[] | Range[]>
|
||||
item: Map<Id, ItemTarget[]>
|
||||
next: Map<Id, ResultSet>
|
||||
documentSymbol: Map<Id, DocumentSymbolResult>
|
||||
foldingRange: Map<Id, FoldingRangeResult>
|
||||
documentLink: Map<Id, DocumentLinkResult>
|
||||
diagnostic: Map<Id, DiagnosticResult>
|
||||
declaration: Map<Id, DeclarationResult>
|
||||
definition: Map<Id, DefinitionResult>
|
||||
typeDefinition: Map<Id, TypeDefinitionResult>
|
||||
hover: Map<Id, HoverResult>
|
||||
references: Map<Id, ReferenceResult>
|
||||
implementation: Map<Id, ImplementationResult>
|
||||
}
|
||||
|
||||
interface In {
|
||||
contains: Map<Id, Project | Document>
|
||||
}
|
||||
|
||||
interface Indices {
|
||||
documents: Map<string, Document>
|
||||
}
|
||||
|
||||
export class JsonDatabase extends Database {
|
||||
private version: string | undefined
|
||||
private projectRoot!: URI
|
||||
|
||||
private vertices: Vertices
|
||||
private indices: Indices
|
||||
private out: Out
|
||||
private in: In
|
||||
|
||||
constructor() {
|
||||
super()
|
||||
this.vertices = {
|
||||
all: new Map(),
|
||||
projects: new Map(),
|
||||
documents: new Map(),
|
||||
ranges: new Map(),
|
||||
}
|
||||
|
||||
this.indices = {
|
||||
documents: new Map(),
|
||||
}
|
||||
|
||||
this.out = {
|
||||
contains: new Map(),
|
||||
item: new Map(),
|
||||
next: new Map(),
|
||||
documentSymbol: new Map(),
|
||||
foldingRange: new Map(),
|
||||
documentLink: new Map(),
|
||||
diagnostic: new Map(),
|
||||
declaration: new Map(),
|
||||
definition: new Map(),
|
||||
typeDefinition: new Map(),
|
||||
hover: new Map(),
|
||||
references: new Map(),
|
||||
implementation: new Map(),
|
||||
}
|
||||
|
||||
this.in = {
|
||||
contains: new Map(),
|
||||
}
|
||||
}
|
||||
|
||||
public load(file: string, transformerFactory: (projectRoot: string) => UriTransformer): Promise<void> {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
let input: fs.ReadStream = fs.createReadStream(file, { encoding: 'utf8' })
|
||||
input.on('error', reject)
|
||||
const rd = readline.createInterface(input)
|
||||
rd.on('line', (line: string) => {
|
||||
if (!line || line.length === 0) {
|
||||
return
|
||||
}
|
||||
try {
|
||||
let element: Edge | Vertex = JSON.parse(line)
|
||||
switch (element.type) {
|
||||
case ElementTypes.vertex:
|
||||
this.processVertex(element)
|
||||
break
|
||||
case ElementTypes.edge:
|
||||
this.processEdge(element)
|
||||
break
|
||||
}
|
||||
} catch (error) {
|
||||
input.destroy()
|
||||
reject(error)
|
||||
}
|
||||
})
|
||||
rd.on('close', () => {
|
||||
if (this.projectRoot === undefined) {
|
||||
reject(new Error('No project root provided.'))
|
||||
return
|
||||
}
|
||||
if (this.version === undefined) {
|
||||
reject(new Error('No version found.'))
|
||||
return
|
||||
} else {
|
||||
let semVer = SemVer.parse(this.version)
|
||||
if (!semVer) {
|
||||
reject(new Error(`No valid semantic version string. The version is: ${this.version}`))
|
||||
return
|
||||
}
|
||||
let range: SemVer.Range = new SemVer.Range('>=0.4.0 <0.5.0')
|
||||
range.includePrerelease = true
|
||||
if (!SemVer.satisfies(semVer, range)) {
|
||||
reject(new Error(`Requires version 0.4.1 but received: ${this.version}`))
|
||||
return
|
||||
}
|
||||
}
|
||||
resolve()
|
||||
})
|
||||
}).then(() => {
|
||||
this.initialize(transformerFactory)
|
||||
})
|
||||
}
|
||||
|
||||
public getProjectRoot(): URI {
|
||||
return this.projectRoot
|
||||
}
|
||||
|
||||
public close(): void {}
|
||||
|
||||
private processVertex(vertex: Vertex): void {
|
||||
this.vertices.all.set(vertex.id, vertex)
|
||||
switch (vertex.label) {
|
||||
case VertexLabels.metaData:
|
||||
this.version = vertex.version
|
||||
if (vertex.projectRoot !== undefined) {
|
||||
this.projectRoot = URI.parse(vertex.projectRoot)
|
||||
}
|
||||
break
|
||||
case VertexLabels.project:
|
||||
this.vertices.projects.set(vertex.id, vertex)
|
||||
break
|
||||
case VertexLabels.document:
|
||||
this.vertices.documents.set(vertex.id, vertex)
|
||||
this.indices.documents.set(vertex.uri, vertex)
|
||||
break
|
||||
case VertexLabels.range:
|
||||
this.vertices.ranges.set(vertex.id, vertex)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
private processEdge(edge: Edge): void {
|
||||
let property: ItemEdgeProperties | undefined
|
||||
if (edge.label === 'item') {
|
||||
property = edge.property
|
||||
}
|
||||
if (Edge.is11(edge)) {
|
||||
this.doProcessEdge(edge.label, edge.outV, edge.inV, property)
|
||||
} else if (Edge.is1N(edge)) {
|
||||
for (let inV of edge.inVs) {
|
||||
this.doProcessEdge(edge.label, edge.outV, inV, property)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private doProcessEdge(label: EdgeLabels, outV: Id, inV: Id, property?: ItemEdgeProperties): void {
|
||||
let from: Vertex | undefined = this.vertices.all.get(outV)
|
||||
let to: Vertex | undefined = this.vertices.all.get(inV)
|
||||
if (from === undefined) {
|
||||
throw new Error(`No vertex found for Id ${outV}`)
|
||||
}
|
||||
if (to === undefined) {
|
||||
throw new Error(`No vertex found for Id ${inV}`)
|
||||
}
|
||||
let values: any[] | undefined
|
||||
switch (label) {
|
||||
case EdgeLabels.contains:
|
||||
values = this.out.contains.get(from.id)
|
||||
if (values === undefined) {
|
||||
values = [to as any]
|
||||
this.out.contains.set(from.id, values)
|
||||
} else {
|
||||
values.push(to)
|
||||
}
|
||||
this.in.contains.set(to.id, from as any)
|
||||
break
|
||||
case EdgeLabels.item:
|
||||
values = this.out.item.get(from.id)
|
||||
let itemTarget: ItemTarget | undefined
|
||||
if (property !== undefined) {
|
||||
switch (property) {
|
||||
case ItemEdgeProperties.references:
|
||||
itemTarget = { type: property, range: to as Range }
|
||||
break
|
||||
case ItemEdgeProperties.declarations:
|
||||
itemTarget = { type: property, range: to as Range }
|
||||
break
|
||||
case ItemEdgeProperties.definitions:
|
||||
itemTarget = { type: property, range: to as Range }
|
||||
break
|
||||
case ItemEdgeProperties.referenceResults:
|
||||
itemTarget = { type: property, result: to as ReferenceResult }
|
||||
break
|
||||
}
|
||||
} else {
|
||||
itemTarget = to as Range
|
||||
}
|
||||
if (itemTarget !== undefined) {
|
||||
if (values === undefined) {
|
||||
values = [itemTarget]
|
||||
this.out.item.set(from.id, values)
|
||||
} else {
|
||||
values.push(itemTarget)
|
||||
}
|
||||
}
|
||||
break
|
||||
case EdgeLabels.next:
|
||||
this.out.next.set(from.id, to as ResultSet)
|
||||
break
|
||||
case EdgeLabels.textDocument_documentSymbol:
|
||||
this.out.documentSymbol.set(from.id, to as DocumentSymbolResult)
|
||||
break
|
||||
case EdgeLabels.textDocument_foldingRange:
|
||||
this.out.foldingRange.set(from.id, to as FoldingRangeResult)
|
||||
break
|
||||
case EdgeLabels.textDocument_documentLink:
|
||||
this.out.documentLink.set(from.id, to as DocumentLinkResult)
|
||||
break
|
||||
case EdgeLabels.textDocument_diagnostic:
|
||||
this.out.diagnostic.set(from.id, to as DiagnosticResult)
|
||||
break
|
||||
case EdgeLabels.textDocument_definition:
|
||||
this.out.definition.set(from.id, to as DefinitionResult)
|
||||
break
|
||||
case EdgeLabels.textDocument_typeDefinition:
|
||||
this.out.typeDefinition.set(from.id, to as TypeDefinitionResult)
|
||||
break
|
||||
case EdgeLabels.textDocument_hover:
|
||||
this.out.hover.set(from.id, to as HoverResult)
|
||||
break
|
||||
case EdgeLabels.textDocument_references:
|
||||
this.out.references.set(from.id, to as ReferenceResult)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
public getDocumentInfos(): DocumentInfo[] {
|
||||
let result: DocumentInfo[] = []
|
||||
this.vertices.documents.forEach((document, key) => {
|
||||
result.push({ uri: document.uri, id: key })
|
||||
})
|
||||
return result
|
||||
}
|
||||
|
||||
protected findFile(uri: string): Id | undefined {
|
||||
let result = this.indices.documents.get(uri)
|
||||
if (result == undefined) {
|
||||
return undefined
|
||||
}
|
||||
return result.id
|
||||
}
|
||||
|
||||
protected fileContent(id: Id): string | undefined {
|
||||
let document = this.vertices.documents.get(id)
|
||||
if (document === undefined) {
|
||||
return undefined
|
||||
}
|
||||
return document.contents
|
||||
}
|
||||
|
||||
public foldingRanges(uri: string): lsp.FoldingRange[] | undefined {
|
||||
let document = this.indices.documents.get(this.toDatabase(uri))
|
||||
if (document === undefined) {
|
||||
return undefined
|
||||
}
|
||||
let foldingRangeResult = this.out.foldingRange.get(document.id)
|
||||
if (foldingRangeResult === undefined) {
|
||||
return undefined
|
||||
}
|
||||
let result: lsp.FoldingRange[] = []
|
||||
for (let item of foldingRangeResult.result) {
|
||||
result.push(Object.assign(Object.create(null), item))
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
public documentSymbols(uri: string): lsp.DocumentSymbol[] | undefined {
|
||||
let document = this.indices.documents.get(this.toDatabase(uri))
|
||||
if (document === undefined) {
|
||||
return undefined
|
||||
}
|
||||
let documentSymbolResult = this.out.documentSymbol.get(document.id)
|
||||
if (documentSymbolResult === undefined || documentSymbolResult.result.length === 0) {
|
||||
return undefined
|
||||
}
|
||||
let first = documentSymbolResult.result[0]
|
||||
let result: lsp.DocumentSymbol[] = []
|
||||
if (lsp.DocumentSymbol.is(first)) {
|
||||
for (let item of documentSymbolResult.result) {
|
||||
result.push(Object.assign(Object.create(null), item))
|
||||
}
|
||||
} else {
|
||||
for (let item of documentSymbolResult.result as RangeBasedDocumentSymbol[]) {
|
||||
let converted = this.toDocumentSymbol(item)
|
||||
if (converted !== undefined) {
|
||||
result.push(converted)
|
||||
}
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
private toDocumentSymbol(value: RangeBasedDocumentSymbol): lsp.DocumentSymbol | undefined {
|
||||
let range = this.vertices.ranges.get(value.id)!
|
||||
let tag = range.tag
|
||||
if (tag === undefined || !(tag.type === 'declaration' || tag.type === 'definition')) {
|
||||
return undefined
|
||||
}
|
||||
let result: lsp.DocumentSymbol = lsp.DocumentSymbol.create(
|
||||
tag.text,
|
||||
tag.detail || '',
|
||||
tag.kind,
|
||||
tag.fullRange,
|
||||
this.asRange(range)
|
||||
)
|
||||
if (value.children && value.children.length > 0) {
|
||||
result.children = []
|
||||
for (let child of value.children) {
|
||||
let converted = this.toDocumentSymbol(child)
|
||||
if (converted !== undefined) {
|
||||
result.children.push(converted)
|
||||
}
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
public hover(uri: string, position: lsp.Position): lsp.Hover | undefined {
|
||||
let range = this.findRangeFromPosition(this.toDatabase(uri), position)
|
||||
if (range === undefined) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
let hoverResult: HoverResult | undefined = this.getResult(range, this.out.hover)
|
||||
if (hoverResult === undefined) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
let hoverRange = hoverResult.result.range !== undefined ? hoverResult.result.range : range
|
||||
return {
|
||||
contents: hoverResult.result.contents,
|
||||
range: hoverRange,
|
||||
}
|
||||
}
|
||||
|
||||
public declarations(uri: string, position: lsp.Position): lsp.Location | lsp.Location[] | undefined {
|
||||
let range = this.findRangeFromPosition(this.toDatabase(uri), position)
|
||||
if (range === undefined) {
|
||||
return undefined
|
||||
}
|
||||
let declarationResult: DeclarationResult | undefined = this.getResult(range, this.out.declaration)
|
||||
if (declarationResult === undefined) {
|
||||
return undefined
|
||||
}
|
||||
let ranges = this.item(declarationResult)
|
||||
if (ranges === undefined) {
|
||||
return undefined
|
||||
}
|
||||
let result: lsp.Location[] = []
|
||||
for (let element of ranges) {
|
||||
result.push(this.asLocation(element))
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
public definitions(uri: string, position: lsp.Position): lsp.Location | lsp.Location[] | undefined {
|
||||
let range = this.findRangeFromPosition(this.toDatabase(uri), position)
|
||||
if (range === undefined) {
|
||||
return undefined
|
||||
}
|
||||
let definitionResult: DefinitionResult | undefined = this.getResult(range, this.out.definition)
|
||||
if (definitionResult === undefined) {
|
||||
return undefined
|
||||
}
|
||||
let ranges = this.item(definitionResult)
|
||||
if (ranges === undefined) {
|
||||
return undefined
|
||||
}
|
||||
let result: lsp.Location[] = []
|
||||
for (let element of ranges) {
|
||||
result.push(this.asLocation(element))
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
public references(uri: string, position: lsp.Position, context: lsp.ReferenceContext): lsp.Location[] | undefined {
|
||||
let range = this.findRangeFromPosition(this.toDatabase(uri), position)
|
||||
if (range === undefined) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
let referenceResult: ReferenceResult | undefined = this.getResult(range, this.out.references)
|
||||
if (referenceResult === undefined) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
let targets = this.item(referenceResult)
|
||||
if (targets === undefined) {
|
||||
return undefined
|
||||
}
|
||||
return this.asReferenceResult(targets, context, new Set())
|
||||
}
|
||||
|
||||
private getResult<T>(range: Range, edges: Map<Id, T>): T | undefined {
|
||||
let id: Id | undefined = range.id
|
||||
do {
|
||||
let result: T | undefined = edges.get(id)
|
||||
if (result !== undefined) {
|
||||
return result
|
||||
}
|
||||
let next = this.out.next.get(id)
|
||||
id = next !== undefined ? next.id : undefined
|
||||
} while (id !== undefined)
|
||||
return undefined
|
||||
}
|
||||
|
||||
private item(value: DeclarationResult): Range[]
|
||||
private item(value: DefinitionResult): Range[]
|
||||
private item(value: ReferenceResult): ItemTarget[]
|
||||
private item(value: DeclarationResult | DefinitionResult | ReferenceResult): Range[] | ItemTarget[] | undefined {
|
||||
if (value.label === 'declarationResult') {
|
||||
return this.out.item.get(value.id) as Range[]
|
||||
} else if (value.label === 'definitionResult') {
|
||||
return this.out.item.get(value.id) as Range[]
|
||||
} else if (value.label === 'referenceResult') {
|
||||
return this.out.item.get(value.id) as ItemTarget[]
|
||||
} else {
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
private asReferenceResult(targets: ItemTarget[], context: lsp.ReferenceContext, dedup: Set<Id>): lsp.Location[] {
|
||||
let result: lsp.Location[] = []
|
||||
for (let target of targets) {
|
||||
if (target.type === ItemEdgeProperties.declarations && context.includeDeclaration) {
|
||||
this.addLocation(result, target.range, dedup)
|
||||
} else if (target.type === ItemEdgeProperties.definitions && context.includeDeclaration) {
|
||||
this.addLocation(result, target.range, dedup)
|
||||
} else if (target.type === ItemEdgeProperties.references) {
|
||||
this.addLocation(result, target.range, dedup)
|
||||
} else if (target.type === ItemEdgeProperties.referenceResults) {
|
||||
result.push(...this.asReferenceResult(this.item(target.result), context, dedup))
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
private addLocation(result: lsp.Location[], value: Range | lsp.Location, dedup: Set<Id>): void {
|
||||
if (lsp.Location.is(value)) {
|
||||
result.push(value)
|
||||
} else {
|
||||
if (dedup.has(value.id)) {
|
||||
return
|
||||
}
|
||||
let document = this.in.contains.get(value.id)!
|
||||
result.push(lsp.Location.create(this.fromDatabase((document as Document).uri), this.asRange(value)))
|
||||
dedup.add(value.id)
|
||||
}
|
||||
}
|
||||
|
||||
private findRangeFromPosition(file: string, position: lsp.Position): Range | undefined {
|
||||
let document = this.indices.documents.get(file)
|
||||
if (document === undefined) {
|
||||
return undefined
|
||||
}
|
||||
let contains = this.out.contains.get(document.id)
|
||||
if (contains === undefined || contains.length === 0) {
|
||||
return undefined
|
||||
}
|
||||
|
||||
let candidate: Range | undefined
|
||||
for (let item of contains) {
|
||||
if (item.label !== VertexLabels.range) {
|
||||
continue
|
||||
}
|
||||
if (JsonDatabase.containsPosition(item, position)) {
|
||||
if (!candidate) {
|
||||
candidate = item
|
||||
} else {
|
||||
if (JsonDatabase.containsRange(candidate, item)) {
|
||||
candidate = item
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return candidate
|
||||
}
|
||||
|
||||
private asLocation(value: Range | lsp.Location): lsp.Location {
|
||||
if (lsp.Location.is(value)) {
|
||||
return value
|
||||
} else {
|
||||
let document = this.in.contains.get(value.id)!
|
||||
return lsp.Location.create(this.fromDatabase((document as Document).uri), this.asRange(value))
|
||||
}
|
||||
}
|
||||
|
||||
private static containsPosition(range: lsp.Range, position: lsp.Position): boolean {
|
||||
if (position.line < range.start.line || position.line > range.end.line) {
|
||||
return false
|
||||
}
|
||||
if (position.line === range.start.line && position.character < range.start.character) {
|
||||
return false
|
||||
}
|
||||
if (position.line === range.end.line && position.character > range.end.character) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
/**
|
||||
* Test if `otherRange` is in `range`. If the ranges are equal, will return true.
|
||||
*/
|
||||
public static containsRange(range: lsp.Range, otherRange: lsp.Range): boolean {
|
||||
if (otherRange.start.line < range.start.line || otherRange.end.line < range.start.line) {
|
||||
return false
|
||||
}
|
||||
if (otherRange.start.line > range.end.line || otherRange.end.line > range.end.line) {
|
||||
return false
|
||||
}
|
||||
if (otherRange.start.line === range.start.line && otherRange.start.character < range.start.character) {
|
||||
return false
|
||||
}
|
||||
if (otherRange.end.line === range.end.line && otherRange.end.character > range.end.character) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
@ -1,20 +0,0 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"noEmit": false,
|
||||
"target": "es6",
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"esModuleInterop": true,
|
||||
"sourceMap": true,
|
||||
"outDir": "out",
|
||||
"lib": ["es6"],
|
||||
"strict": true,
|
||||
"skipLibCheck": true,
|
||||
"noImplicitAny": true,
|
||||
"noImplicitReturns": true,
|
||||
},
|
||||
"files": [],
|
||||
"include": ["src"],
|
||||
"exclude": ["node_modules"],
|
||||
}
|
||||
@ -1,6 +0,0 @@
|
||||
{
|
||||
"extends": ["../../tslint.config.js"],
|
||||
"linterOptions": {
|
||||
"exclude": ["node_modules/**", "src/database.ts", "src/files.ts", "src/json.ts"]
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
162
lsif/src/backend.ts
Normal file
162
lsif/src/backend.ts
Normal file
@ -0,0 +1,162 @@
|
||||
import * as fs from 'mz/fs'
|
||||
import * as path from 'path'
|
||||
import * as readline from 'mz/readline'
|
||||
import { ConnectionCache, DocumentCache, ResultChunkCache } from './cache'
|
||||
import { Database } from './database'
|
||||
import { DefinitionModel, DocumentModel, MetaModel, ReferenceModel, ResultChunkModel } from './models.database'
|
||||
import { Edge, Vertex } from 'lsif-protocol'
|
||||
import { hasErrorCode } from './util'
|
||||
import { importLsif } from './importer'
|
||||
import { Readable } from 'stream'
|
||||
import { XrepoDatabase } from './xrepo'
|
||||
|
||||
export const ERRNOLSIFDATA = 'NoLSIFDataError'
|
||||
|
||||
/**
|
||||
* An error thrown when no LSIF database can be found on disk.
|
||||
*/
|
||||
export class NoLSIFDataError extends Error {
|
||||
public readonly name = ERRNOLSIFDATA
|
||||
|
||||
constructor(repository: string, commit: string) {
|
||||
super(`No LSIF data available for ${repository}@${commit}.`)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Backend for LSIF dumps stored in SQLite.
|
||||
*/
|
||||
export class Backend {
|
||||
constructor(
|
||||
private storageRoot: string,
|
||||
private xrepoDatabase: XrepoDatabase,
|
||||
private connectionCache: ConnectionCache,
|
||||
private documentCache: DocumentCache,
|
||||
private resultChunkCache: ResultChunkCache
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Read the content of the temporary file containing a JSON-encoded LSIF
|
||||
* dump. Insert these contents into some storage with an encoding that
|
||||
* can be subsequently read by the `createRunner` method.
|
||||
*/
|
||||
public async insertDump(input: Readable, repository: string, commit: string): Promise<void> {
|
||||
const outFile = makeFilename(this.storageRoot, repository, commit)
|
||||
|
||||
try {
|
||||
// Remove old databse file, if it exists
|
||||
await fs.unlink(outFile)
|
||||
} catch (e) {
|
||||
if (!hasErrorCode(e, 'ENOENT')) {
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
// Remove old data from xrepo database
|
||||
await this.xrepoDatabase.clearCommit(repository, commit)
|
||||
|
||||
// Remove any connection in the cache to the file we just removed
|
||||
await this.connectionCache.bustKey(outFile)
|
||||
|
||||
const { packages, references } = await this.connectionCache.withTransactionalEntityManager(
|
||||
outFile,
|
||||
[DefinitionModel, DocumentModel, MetaModel, ReferenceModel, ResultChunkModel],
|
||||
entityManager => importLsif(entityManager, parseLines(readline.createInterface({ input }))),
|
||||
async connection => {
|
||||
await connection.query('PRAGMA synchronous = OFF')
|
||||
await connection.query('PRAGMA journal_mode = OFF')
|
||||
}
|
||||
)
|
||||
|
||||
// These needs to be done in sequence as SQLite can only have one
|
||||
// write txn at a time without causing the other one to abort with
|
||||
// a weird error.
|
||||
await this.xrepoDatabase.addPackages(repository, commit, packages)
|
||||
await this.xrepoDatabase.addReferences(repository, commit, references)
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a database relevant to the given repository and commit hash. This
|
||||
* assumes that data for this subset of data has already been inserted via
|
||||
* `insertDump` (otherwise this method is expected to throw).
|
||||
*/
|
||||
public async createDatabase(repository: string, commit: string): Promise<Database> {
|
||||
const file = makeFilename(this.storageRoot, repository, commit)
|
||||
|
||||
try {
|
||||
await fs.stat(file)
|
||||
} catch (e) {
|
||||
if (hasErrorCode(e, 'ENOENT')) {
|
||||
throw new NoLSIFDataError(repository, commit)
|
||||
}
|
||||
|
||||
throw e
|
||||
}
|
||||
|
||||
return new Database(
|
||||
this.storageRoot,
|
||||
this.xrepoDatabase,
|
||||
this.connectionCache,
|
||||
this.documentCache,
|
||||
this.resultChunkCache,
|
||||
repository,
|
||||
commit,
|
||||
file
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the path of the SQLite database file for the given repository and commit.
|
||||
*
|
||||
* @param storageRoot The path where SQLite databases are stored.
|
||||
* @param repository The repository name.
|
||||
* @param commit The repository commit.
|
||||
*/
|
||||
export function makeFilename(storageRoot: string, repository: string, commit: string): string {
|
||||
return path.join(storageRoot, `${encodeURIComponent(repository)}@${commit}.lsif.db`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts streaming JSON input into an iterable of vertex and edge objects.
|
||||
*
|
||||
* @param lines The stream of raw, uncompressed JSON lines.
|
||||
*/
|
||||
async function* parseLines(lines: AsyncIterable<string>): AsyncIterable<Vertex | Edge> {
|
||||
let i = 0
|
||||
for await (const line of lines) {
|
||||
try {
|
||||
yield JSON.parse(line)
|
||||
} catch (e) {
|
||||
throw Object.assign(
|
||||
new Error(`Failed to process line #${i + 1} (${JSON.stringify(line)}): Invalid JSON.`),
|
||||
{ status: 422 }
|
||||
)
|
||||
}
|
||||
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
export async function createBackend(
|
||||
storageRoot: string,
|
||||
connectionCache: ConnectionCache,
|
||||
documentCache: DocumentCache,
|
||||
resultChunkCache: ResultChunkCache
|
||||
): Promise<Backend> {
|
||||
try {
|
||||
await fs.mkdir(storageRoot)
|
||||
} catch (e) {
|
||||
if (!hasErrorCode(e, 'EEXIST')) {
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
return new Backend(
|
||||
storageRoot,
|
||||
new XrepoDatabase(connectionCache, path.join(storageRoot, 'xrepo.db')),
|
||||
connectionCache,
|
||||
documentCache,
|
||||
resultChunkCache
|
||||
)
|
||||
}
|
||||
188
lsif/src/cache.test.ts
Normal file
188
lsif/src/cache.test.ts
Normal file
@ -0,0 +1,188 @@
|
||||
import { GenericCache, createBarrierPromise } from './cache'
|
||||
import * as sinon from 'sinon'
|
||||
|
||||
describe('GenericCache', () => {
|
||||
it('should evict items based by reverse recency', async () => {
|
||||
const values = [
|
||||
'foo', // foo*
|
||||
'bar', // bar* foo
|
||||
'baz', // baz* bar foo
|
||||
'bonk', // bonk* baz bar foo
|
||||
'quux', // quux* bonk baz bar foo
|
||||
'bar', // bar quux bonk baz foo
|
||||
'foo', // foo bar quux bonk baz
|
||||
'honk', // honk* foo bar quux bonk
|
||||
'foo', // foo honk bar quux bonk
|
||||
'baz', // baz* foo honk bar quux
|
||||
]
|
||||
|
||||
// These are the cache values that need to be created, in-order
|
||||
const expectedInstantiations = ['foo', 'bar', 'baz', 'bonk', 'quux', 'honk', 'baz']
|
||||
|
||||
const factory = sinon.stub<string[], Promise<string>>()
|
||||
for (const [i, value] of expectedInstantiations.entries()) {
|
||||
// Log the value arg and resolve the cache data immediately
|
||||
factory.onCall(i).returns(Promise.resolve(value))
|
||||
}
|
||||
|
||||
const cache = new GenericCache<string, string>(5, () => 1, () => {})
|
||||
for (const value of values) {
|
||||
const returnValue = await cache.withValue(value, () => factory(value), v => Promise.resolve(v))
|
||||
expect(returnValue).toBe(value)
|
||||
}
|
||||
|
||||
// Expect the args of the factory to equal the resolved values
|
||||
expect(factory.args).toEqual(expectedInstantiations.map(v => [v]))
|
||||
})
|
||||
|
||||
it('should asynchronously resolve cache values', async () => {
|
||||
const factory = sinon.stub<string[], Promise<string>>()
|
||||
const { wait, done } = createBarrierPromise()
|
||||
factory.returns(wait.then(() => 'bar'))
|
||||
|
||||
const cache = new GenericCache<string, string>(5, () => 1, () => {})
|
||||
const p1 = cache.withValue('foo', factory, v => Promise.resolve(v))
|
||||
const p2 = cache.withValue('foo', factory, v => Promise.resolve(v))
|
||||
const p3 = cache.withValue('foo', factory, v => Promise.resolve(v))
|
||||
done()
|
||||
|
||||
expect(await Promise.all([p1, p2, p3])).toEqual(['bar', 'bar', 'bar'])
|
||||
expect(factory.callCount).toEqual(1)
|
||||
})
|
||||
|
||||
it('should call dispose function on eviction', async () => {
|
||||
const values = [
|
||||
'foo', // foo
|
||||
'bar', // bar foo
|
||||
'baz', // baz bar (drops foo)
|
||||
'foo', // foo baz (drops bar)
|
||||
]
|
||||
|
||||
const { wait, done } = createBarrierPromise()
|
||||
const disposer = sinon.spy(done)
|
||||
const cache = new GenericCache<string, string>(2, () => 1, disposer)
|
||||
|
||||
for (const value of values) {
|
||||
await cache.withValue(value, () => Promise.resolve(value), v => Promise.resolve(v))
|
||||
}
|
||||
|
||||
await wait
|
||||
expect(disposer.args).toEqual([['foo'], ['bar']])
|
||||
})
|
||||
|
||||
it('should calculate size by resolved value', async () => {
|
||||
const values = [
|
||||
2, // 2, size = 2
|
||||
3, // 3 2, size = 5
|
||||
1, // 1 3, size = 4
|
||||
2, // 1 2, size = 3
|
||||
]
|
||||
|
||||
const expectedInstantiations = [2, 3, 1, 2]
|
||||
|
||||
const factory = sinon.stub<number[], Promise<number>>()
|
||||
for (const [i, value] of expectedInstantiations.entries()) {
|
||||
factory.onCall(i).returns(Promise.resolve(value))
|
||||
}
|
||||
|
||||
const cache = new GenericCache<number, number>(5, v => v, () => {})
|
||||
for (const value of values) {
|
||||
await cache.withValue(value, () => factory(value), v => Promise.resolve(v))
|
||||
}
|
||||
|
||||
expect(factory.args).toEqual(expectedInstantiations.map(v => [v]))
|
||||
})
|
||||
|
||||
it('should not evict referenced cache entries', async () => {
|
||||
const { wait, done } = createBarrierPromise()
|
||||
const disposer = sinon.spy(done)
|
||||
const cache = new GenericCache<string, string>(5, () => 1, disposer)
|
||||
|
||||
const fooResolver = () => Promise.resolve('foo')
|
||||
const barResolver = () => Promise.resolve('bar')
|
||||
const bazResolver = () => Promise.resolve('baz')
|
||||
const bonkResolver = () => Promise.resolve('bonk')
|
||||
const quuxResolver = () => Promise.resolve('quux')
|
||||
const honkResolver = () => Promise.resolve('honk')
|
||||
const ronkResolver = () => Promise.resolve('ronk')
|
||||
|
||||
await cache.withValue('foo', fooResolver, async () => {
|
||||
await cache.withValue('bar', barResolver, async () => {
|
||||
await cache.withValue('baz', bazResolver, async () => {
|
||||
await cache.withValue('bonk', bonkResolver, async () => {
|
||||
await cache.withValue('quux', quuxResolver, async () => {
|
||||
// Sixth entry, but nothing to evict (all held)
|
||||
await cache.withValue('honk', honkResolver, () => Promise.resolve())
|
||||
|
||||
// Seventh entry, honk can now be removed as it's the least
|
||||
// recently used value that's not currently under a read lock.
|
||||
await cache.withValue('ronk', ronkResolver, () => Promise.resolve())
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// Release and remove the least recently used
|
||||
|
||||
await cache.withValue(
|
||||
'honk',
|
||||
() => Promise.resolve('honk'),
|
||||
async () => {
|
||||
await wait
|
||||
expect(disposer.args).toEqual([['honk'], ['foo'], ['bar']])
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
it('should dispose busted keys', async () => {
|
||||
const { wait, done } = createBarrierPromise()
|
||||
const disposer = sinon.spy(done)
|
||||
const cache = new GenericCache<string, string>(5, () => 1, disposer)
|
||||
|
||||
const factory = sinon.stub<string[], Promise<string>>()
|
||||
factory.returns(Promise.resolve('foo'))
|
||||
|
||||
// Construct then bust a same key
|
||||
await cache.withValue('foo', factory, () => Promise.resolve())
|
||||
await cache.bustKey('foo')
|
||||
await wait
|
||||
|
||||
// Ensure value was disposed
|
||||
expect(disposer.args).toEqual([['foo']])
|
||||
|
||||
// Ensure entry was removed
|
||||
expect(cache.withValue('foo', factory, () => Promise.resolve()))
|
||||
expect(factory.args).toHaveLength(2)
|
||||
})
|
||||
|
||||
it('should wait to dispose busted keys that are in use', async () => {
|
||||
const { wait: wait1, done: done1 } = createBarrierPromise()
|
||||
const { wait: wait2, done: done2 } = createBarrierPromise()
|
||||
|
||||
const resolver = () => Promise.resolve('foo')
|
||||
const disposer = sinon.spy(done1)
|
||||
const cache = new GenericCache<string, string>(5, () => 1, disposer)
|
||||
|
||||
// Create a cache entry for 'foo' that blocks on done2
|
||||
const p1 = cache.withValue('foo', resolver, () => wait2)
|
||||
|
||||
// Attempt to bust the cache key that's used in the blocking promise above
|
||||
const p2 = cache.bustKey('foo')
|
||||
|
||||
// Ensure that p1 and p2 are blocked on each other
|
||||
const timedResolver = new Promise(resolve => setTimeout(() => resolve('$'), 10))
|
||||
const winner = await Promise.race([p1, p2, timedResolver])
|
||||
expect(winner).toEqual('$')
|
||||
|
||||
// Ensure dispose hasn't been called
|
||||
expect(disposer.args).toHaveLength(0)
|
||||
|
||||
// Unblock p1
|
||||
done2()
|
||||
|
||||
// Show that all promises are unblocked and dispose was called
|
||||
await Promise.all([p1, p2, wait1])
|
||||
expect(disposer.args).toEqual([['foo']])
|
||||
})
|
||||
})
|
||||
386
lsif/src/cache.ts
Normal file
386
lsif/src/cache.ts
Normal file
@ -0,0 +1,386 @@
|
||||
import { Connection, createConnection, EntityManager } from 'typeorm'
|
||||
import { DocumentData, ResultChunkData } from './models.database'
|
||||
import Yallist from 'yallist'
|
||||
|
||||
/**
|
||||
* A wrapper around a cache value promise.
|
||||
*/
|
||||
interface CacheEntry<K, V> {
|
||||
/**
|
||||
* The key that can retrieve this cache entry.
|
||||
*/
|
||||
key: K
|
||||
|
||||
/**
|
||||
* The promise that will resolve the cache value.
|
||||
*/
|
||||
promise: Promise<V>
|
||||
|
||||
/**
|
||||
* The size of the promise value, once resolved. This value is
|
||||
* initially zero and is updated once an appropriate can be
|
||||
* determined from the result of `promise`.
|
||||
*/
|
||||
size: number
|
||||
|
||||
/**
|
||||
* The number of active withValue calls referencing this entry. If
|
||||
* this value is non-zero, it is not evict-able from the cache.
|
||||
*/
|
||||
readers: number
|
||||
|
||||
/**
|
||||
* A function reference that should be called, if present, when
|
||||
* the reader count for an entry goes to zero. This will unblock a
|
||||
* a promise created in `bustKey` to wait for all readers to finish
|
||||
* using the cache value.
|
||||
*/
|
||||
waiter: (() => void) | undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* A generic LRU cache. We use this instead of the `lru-cache` package
|
||||
* available in NPM so that we can handle async payloads in a more
|
||||
* first-class way as well as shedding some of the cruft around evictions.
|
||||
* We need to ensure database handles are closed when they are no longer
|
||||
* accessible, and we also do not want to evict any database handle while
|
||||
* it is actively being used.
|
||||
*/
|
||||
export class GenericCache<K, V> {
|
||||
/**
|
||||
* A map from from keys to nodes in `lruList`.
|
||||
*/
|
||||
private cache = new Map<K, Yallist.Node<CacheEntry<K, V>>>()
|
||||
|
||||
/**
|
||||
* A linked list of cache entires ordered by last-touch.
|
||||
*/
|
||||
private lruList = new Yallist<CacheEntry<K, V>>()
|
||||
|
||||
/**
|
||||
* The additive size of the items currently in the cache.
|
||||
*/
|
||||
private size = 0
|
||||
|
||||
/**
|
||||
* Create a new `GenericCache` with the given maximum (soft) size for
|
||||
* all items in the cache, a function that determine the size of a
|
||||
* cache item from its resolved value, and a function that is called
|
||||
* when an item falls out of the cache.
|
||||
*/
|
||||
constructor(
|
||||
private max: number,
|
||||
private sizeFunction: (value: V) => number,
|
||||
private disposeFunction: (value: V) => Promise<void> | void
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Check if `key` exists in the cache. If it does not, create a value
|
||||
* from `factory`. Once the cache value resolves, invoke `callback` and
|
||||
* return its value. This method acts as a lock around the cache entry
|
||||
* so that it may not be removed while the factory or callback functions
|
||||
* are running.
|
||||
*
|
||||
* @param key The cache key.
|
||||
* @param factory The function used to create a new value.
|
||||
* @param callback The function to invoke with the resolved cache value.
|
||||
*/
|
||||
public async withValue<T>(key: K, factory: () => Promise<V>, callback: (value: V) => Promise<T>): Promise<T> {
|
||||
// Find or create the entry
|
||||
const entry = await this.getEntry(key, factory)
|
||||
|
||||
try {
|
||||
// Re-resolve the promise. If this is already resolved it's a fast
|
||||
// no-op. Otherwise, we got a cache entry that was under-construction
|
||||
// and will resolve shortly.
|
||||
|
||||
return await callback(await entry.promise)
|
||||
} finally {
|
||||
// Unlock the cache entry
|
||||
entry.readers--
|
||||
|
||||
// If we were the last reader and there's a bustKey call waiting on
|
||||
// us to finish, inform it that we're done using it. Bust away!
|
||||
|
||||
if (entry.readers === 0 && entry.waiter !== undefined) {
|
||||
entry.waiter()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a key from the cache. This blocks until all current readers
|
||||
* of the cached value have completed, then calls the dispose function.
|
||||
*
|
||||
* Do NOT call this function while holding the same: you will deadlock.
|
||||
*
|
||||
* @param key The cache key.
|
||||
*/
|
||||
public async bustKey(key: K): Promise<void> {
|
||||
const node = this.cache.get(key)
|
||||
if (!node) {
|
||||
return
|
||||
}
|
||||
|
||||
const {
|
||||
value: { promise, size, readers },
|
||||
} = node
|
||||
|
||||
// Immediately remove from cache so that another reader cannot get
|
||||
// ahold of the value, and so that another bust attempt cannot call
|
||||
// dispose twice on the same value.
|
||||
|
||||
this.removeNode(node, size)
|
||||
|
||||
// Wait for the value to resolve. We do this first in case the value
|
||||
// was still under construction. This simplifies the rest of the logic
|
||||
// below, as readers can never be negative once the promise value has
|
||||
// resolved.
|
||||
|
||||
const value = await promise
|
||||
|
||||
if (readers > 0) {
|
||||
// There's someone holding the cache value. Create a barrier promise
|
||||
// and stash the function that can unlock it. When the reader count
|
||||
// for an entry is decremented, the waiter function, if present, is
|
||||
// invoked. This basically forms a condition variable.
|
||||
|
||||
const { wait, done } = createBarrierPromise()
|
||||
node.value.waiter = done
|
||||
await wait
|
||||
}
|
||||
|
||||
// We have the resolved value, removed from the cache, which is no longer
|
||||
// used by any reader. It's safe to dispose now.
|
||||
await this.disposeFunction(value)
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if `key` exists in the cache. If it does not, create a value
|
||||
* from `factory` and add it to the cache. In either case, update the
|
||||
* cache entry's place in `lruCache` and return the entry. If a new
|
||||
* value was created, then it may trigger a cache eviction once its
|
||||
* value resolves.
|
||||
*
|
||||
* @param key The cache key.
|
||||
* @param factory The function used to create a new value.
|
||||
*/
|
||||
private async getEntry(key: K, factory: () => Promise<V>): Promise<CacheEntry<K, V>> {
|
||||
const node = this.cache.get(key)
|
||||
if (node) {
|
||||
// Found, move to head of list
|
||||
this.lruList.unshiftNode(node)
|
||||
const entry = node.value
|
||||
// Ensure entry is locked before returning
|
||||
entry.readers++
|
||||
return entry
|
||||
}
|
||||
|
||||
// Create promise and the entry that wraps it. We don't know the effective
|
||||
// size of the value until the promise resolves, so we put zero. We have a
|
||||
// reader count of 1, in order to lock the entry until after the user that
|
||||
// requested the entry is done using it. We don't want to block here while
|
||||
// waiting for the promise value to resolve, otherwise a second request for
|
||||
// the same key will create a duplicate cache entry.
|
||||
|
||||
const promise = factory()
|
||||
const newEntry = { key, promise, size: 0, readers: 1, waiter: undefined }
|
||||
|
||||
// Add to head of list
|
||||
this.lruList.unshift(newEntry)
|
||||
|
||||
// Grab the head of the list we just pushed and store it
|
||||
// in the map. We need the node that the unshift method
|
||||
// creates so we can unlink it in constant time.
|
||||
const head = this.lruList.head
|
||||
if (head) {
|
||||
this.cache.set(key, head)
|
||||
}
|
||||
|
||||
// Now that another call to getEntry will find the cache entry
|
||||
// and early-out, we can block here and wait to resolve the
|
||||
// value, then update the entry and cache sizes.
|
||||
|
||||
const value = await promise
|
||||
await this.resolved(newEntry, value)
|
||||
return newEntry
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine the size of the resolved value and update the size of the
|
||||
* entry as well as `size`. While the total cache size exceeds `max`,
|
||||
* try to evict the least recently used cache entries that do not have
|
||||
* a non-zero `readers` count.
|
||||
*
|
||||
* @param entry The cache entry.
|
||||
* @param value The cache entry's resolved value.
|
||||
*/
|
||||
private async resolved(entry: CacheEntry<K, V>, value: V): Promise<void> {
|
||||
const size = this.sizeFunction(value)
|
||||
this.size += size
|
||||
entry.size = size
|
||||
|
||||
let node = this.lruList.tail
|
||||
while (this.size > this.max && node) {
|
||||
const {
|
||||
prev,
|
||||
value: { promise, size, readers },
|
||||
} = node
|
||||
|
||||
if (readers === 0) {
|
||||
// If readers > 0, then it may be actively used by another
|
||||
// part of the code that hit a portion of their critical
|
||||
// section that returned control to the event loop. We don't
|
||||
// want to mess with those if we can help it.
|
||||
|
||||
this.removeNode(node, size)
|
||||
await this.disposeFunction(await promise)
|
||||
}
|
||||
|
||||
node = prev
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove the given node from the list and update the cache size.
|
||||
*
|
||||
* @param node The node to remove.
|
||||
* @param size The size of the promise value.
|
||||
*/
|
||||
private removeNode(node: Yallist.Node<CacheEntry<K, V>>, size: number): void {
|
||||
this.size -= size
|
||||
this.lruList.removeNode(node)
|
||||
this.cache.delete(node.value.key)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A cache of SQLite database connections indexed by database filenames.
|
||||
*/
|
||||
export class ConnectionCache extends GenericCache<string, Connection> {
|
||||
/**
|
||||
* Create a new `ConnectionCache` with the given maximum (soft) size for
|
||||
* all items in the cache.
|
||||
*/
|
||||
constructor(max: number) {
|
||||
super(
|
||||
max,
|
||||
// Each handle is roughly the same size.
|
||||
() => 1,
|
||||
// Close the underlying file handle on cache eviction.
|
||||
connection => connection.close()
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Invoke `callback` with a SQLite connection object obtained from the
|
||||
* cache or created on cache miss. This connection is guaranteed not to
|
||||
* be disposed by cache eviction while the callback is active.
|
||||
*
|
||||
* @param database The database filename.
|
||||
* @param entities The set of expected entities present in this schema.
|
||||
* @param callback The function invoke with the SQLite connection.
|
||||
*/
|
||||
public withConnection<T>(
|
||||
database: string,
|
||||
// Decorators are not possible type check
|
||||
// eslint-disable-next-line @typescript-eslint/ban-types
|
||||
entities: Function[],
|
||||
callback: (connection: Connection) => Promise<T>
|
||||
): Promise<T> {
|
||||
const factory = (): Promise<Connection> =>
|
||||
createConnection({
|
||||
database,
|
||||
entities,
|
||||
type: 'sqlite',
|
||||
name: database,
|
||||
synchronize: true,
|
||||
// logging: 'all',
|
||||
})
|
||||
|
||||
return this.withValue(database, factory, callback)
|
||||
}
|
||||
|
||||
/**
|
||||
* Like `withConnection`, but will open a transaction on the connection
|
||||
* before invoking the callback.
|
||||
*
|
||||
* @param database The database filename.
|
||||
* @param entities The set of expected entities present in this schema.
|
||||
* @param callback The function invoke with a SQLite transaction connection.
|
||||
* @param pragmaHook The function called with connection before the transaction begins.
|
||||
*/
|
||||
public withTransactionalEntityManager<T>(
|
||||
database: string,
|
||||
// Decorators are not possible type check
|
||||
// eslint-disable-next-line @typescript-eslint/ban-types
|
||||
entities: Function[],
|
||||
callback: (entityManager: EntityManager) => Promise<T>,
|
||||
pragmaHook?: (connection: Connection) => Promise<void>
|
||||
): Promise<T> {
|
||||
return this.withConnection(database, entities, async connection => {
|
||||
if (pragmaHook) {
|
||||
await pragmaHook(connection)
|
||||
}
|
||||
|
||||
return await connection.transaction(callback)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A wrapper around a cache value that retains its encoded size. In order to keep
|
||||
* the in-memory limit of these decoded items, we use this value as the cache entry
|
||||
* size. This assumes that the size of the encoded text is a good proxy for the size
|
||||
* of the in-memory representation.
|
||||
*/
|
||||
export interface EncodedJsonCacheValue<T> {
|
||||
/**
|
||||
* The size of the encoded value.
|
||||
*/
|
||||
size: number
|
||||
|
||||
/**
|
||||
* The decoded value.
|
||||
*/
|
||||
data: T
|
||||
}
|
||||
|
||||
/**
|
||||
* A cache of decoded values encoded as JSON and gzipped in a SQLite database.
|
||||
*/
|
||||
class EncodedJsonCache<K, V> extends GenericCache<K, EncodedJsonCacheValue<V>> {
|
||||
/**
|
||||
* Create a new `EncodedJsonCache` with the given maximum (soft) size for
|
||||
* all items in the cache.
|
||||
*/
|
||||
constructor(max: number) {
|
||||
super(
|
||||
max,
|
||||
v => v.size,
|
||||
// Let GC handle the cleanup of the object on cache eviction.
|
||||
(): Promise<void> => Promise.resolve()
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A cache of deserialized `DocumentData` values indexed by a string containing
|
||||
* the database path and the path of the document.
|
||||
*/
|
||||
export class DocumentCache extends EncodedJsonCache<string, DocumentData> {}
|
||||
|
||||
/**
|
||||
* A cache of deserialized `ResultChunkData` values indexed by a string containing
|
||||
* the database path and the chunk index.
|
||||
*/
|
||||
export class ResultChunkCache extends EncodedJsonCache<string, ResultChunkData> {}
|
||||
|
||||
/**
|
||||
* Return a promise and a function pair. The promise resolves once the function is called.
|
||||
*/
|
||||
export function createBarrierPromise(): { wait: Promise<void>; done: () => void } {
|
||||
let done!: () => void
|
||||
const wait = new Promise<void>(resolve => (done = resolve))
|
||||
return { wait, done }
|
||||
}
|
||||
279
lsif/src/correlator.test.ts
Normal file
279
lsif/src/correlator.test.ts
Normal file
@ -0,0 +1,279 @@
|
||||
import { normalizeHover, Correlator } from './correlator'
|
||||
import { ElementTypes, VertexLabels, EdgeLabels, MonikerKind } from 'lsif-protocol'
|
||||
|
||||
describe('Correlator', () => {
|
||||
it('should stash lsif version and project root from metadata', () => {
|
||||
const c = new Correlator()
|
||||
c.insert({
|
||||
id: '1',
|
||||
type: ElementTypes.vertex,
|
||||
label: VertexLabels.metaData,
|
||||
positionEncoding: 'utf-16',
|
||||
version: '0.4.3',
|
||||
projectRoot: 'file:///lsif-test',
|
||||
})
|
||||
|
||||
const projectRoot = c.projectRoot
|
||||
expect(c.lsifVersion).toEqual('0.4.3')
|
||||
expect(projectRoot && projectRoot.href).toEqual('file:///lsif-test')
|
||||
})
|
||||
|
||||
it('should require metadata vertex before document vertices', () => {
|
||||
const c = new Correlator()
|
||||
|
||||
expect(() => {
|
||||
c.insert({
|
||||
id: '1',
|
||||
type: ElementTypes.vertex,
|
||||
label: VertexLabels.document,
|
||||
uri: 'file:///lsif-test/index.ts',
|
||||
languageId: 'typescript',
|
||||
})
|
||||
}).toThrowError(new Error('No metadata defined.'))
|
||||
})
|
||||
|
||||
it('should find root-relative document paths', () => {
|
||||
const c = new Correlator()
|
||||
c.insert({
|
||||
id: '1',
|
||||
type: ElementTypes.vertex,
|
||||
label: VertexLabels.metaData,
|
||||
positionEncoding: 'utf-16',
|
||||
version: '0.4.3',
|
||||
projectRoot: 'file:///lsif-test',
|
||||
})
|
||||
|
||||
c.insert({
|
||||
id: '2',
|
||||
type: ElementTypes.vertex,
|
||||
label: VertexLabels.document,
|
||||
uri: 'file:///lsif-test/sub/path/index.ts',
|
||||
languageId: 'typescript',
|
||||
})
|
||||
|
||||
expect(c.documentPaths).toEqual(new Map([['2', 'sub/path/index.ts']]))
|
||||
})
|
||||
|
||||
it('should item outV endpoint type', () => {
|
||||
const c = new Correlator()
|
||||
c.insert({
|
||||
id: '1',
|
||||
type: ElementTypes.vertex,
|
||||
label: VertexLabels.metaData,
|
||||
positionEncoding: 'utf-16',
|
||||
version: '0.4.3',
|
||||
projectRoot: 'file:///lsif-test',
|
||||
})
|
||||
|
||||
c.insert({
|
||||
id: '2',
|
||||
type: ElementTypes.vertex,
|
||||
label: VertexLabels.document,
|
||||
uri: 'file:///lsif-test/sub/path/index.ts',
|
||||
languageId: 'typescript',
|
||||
})
|
||||
|
||||
c.insert({
|
||||
id: '3',
|
||||
type: ElementTypes.vertex,
|
||||
label: VertexLabels.range,
|
||||
start: { line: 3, character: 16 },
|
||||
end: { line: 3, character: 19 },
|
||||
})
|
||||
|
||||
c.insert({
|
||||
id: '4',
|
||||
type: ElementTypes.vertex,
|
||||
label: VertexLabels.definitionResult,
|
||||
})
|
||||
|
||||
c.insert({
|
||||
id: '5',
|
||||
type: ElementTypes.vertex,
|
||||
label: VertexLabels.referenceResult,
|
||||
})
|
||||
|
||||
c.insert({
|
||||
id: '5',
|
||||
type: ElementTypes.edge,
|
||||
label: EdgeLabels.item,
|
||||
outV: '4',
|
||||
inVs: ['3'],
|
||||
document: '2',
|
||||
})
|
||||
|
||||
c.insert({
|
||||
id: '5',
|
||||
type: ElementTypes.edge,
|
||||
label: EdgeLabels.item,
|
||||
outV: '5',
|
||||
inVs: ['3'],
|
||||
document: '2',
|
||||
})
|
||||
|
||||
const defs = c.definitionData.get('4')
|
||||
expect(defs && defs.get('2')).toEqual(['3'])
|
||||
|
||||
const refs = c.referenceData.get('5')
|
||||
expect(refs && refs.get('2')).toEqual(['3'])
|
||||
})
|
||||
|
||||
it('should normalize hover results', () => {
|
||||
const c = new Correlator()
|
||||
c.insert({
|
||||
id: '1',
|
||||
type: ElementTypes.vertex,
|
||||
label: VertexLabels.hoverResult,
|
||||
result: {
|
||||
contents: {
|
||||
language: 'typescript',
|
||||
value: 'bar',
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(c.hoverData.get('1')).toEqual('```typescript\nbar\n```')
|
||||
})
|
||||
|
||||
it('should stash imported monikers', () => {
|
||||
const c = new Correlator()
|
||||
c.insert({
|
||||
id: '1',
|
||||
type: ElementTypes.vertex,
|
||||
label: VertexLabels.moniker,
|
||||
kind: MonikerKind.import,
|
||||
scheme: 'tsc',
|
||||
identifier: 'lsif-test:index:foo',
|
||||
})
|
||||
|
||||
c.insert({
|
||||
id: '2',
|
||||
type: ElementTypes.vertex,
|
||||
label: VertexLabels.packageInformation,
|
||||
manager: 'npm',
|
||||
name: 'dependency',
|
||||
version: '0.1.0',
|
||||
})
|
||||
|
||||
c.insert({
|
||||
id: '3',
|
||||
type: ElementTypes.edge,
|
||||
label: EdgeLabels.packageInformation,
|
||||
outV: '1',
|
||||
inV: '2',
|
||||
})
|
||||
|
||||
expect(c.importedMonikers).toEqual(new Set(['1']))
|
||||
})
|
||||
|
||||
it('should stash exported monikers', () => {
|
||||
const c = new Correlator()
|
||||
c.insert({
|
||||
id: '1',
|
||||
type: ElementTypes.vertex,
|
||||
label: VertexLabels.moniker,
|
||||
kind: MonikerKind.export,
|
||||
scheme: 'tsc',
|
||||
identifier: 'lsif-test:index:foo',
|
||||
})
|
||||
|
||||
c.insert({
|
||||
id: '2',
|
||||
type: ElementTypes.vertex,
|
||||
label: VertexLabels.packageInformation,
|
||||
manager: 'npm',
|
||||
name: 'dependency',
|
||||
version: '0.1.0',
|
||||
})
|
||||
|
||||
c.insert({
|
||||
id: '3',
|
||||
type: ElementTypes.edge,
|
||||
label: EdgeLabels.packageInformation,
|
||||
outV: '1',
|
||||
inV: '2',
|
||||
})
|
||||
|
||||
expect(c.exportedMonikers).toEqual(new Set(['1']))
|
||||
})
|
||||
|
||||
it('should correlate next monikers', () => {
|
||||
const c = new Correlator()
|
||||
c.insert({
|
||||
id: '1',
|
||||
type: ElementTypes.vertex,
|
||||
label: VertexLabels.range,
|
||||
start: { line: 3, character: 16 },
|
||||
end: { line: 3, character: 19 },
|
||||
})
|
||||
|
||||
c.insert({
|
||||
id: '2',
|
||||
type: ElementTypes.vertex,
|
||||
label: VertexLabels.moniker,
|
||||
scheme: 'tsc',
|
||||
identifier: 'lsif-test:index:foo',
|
||||
})
|
||||
|
||||
c.insert({
|
||||
id: '3',
|
||||
type: ElementTypes.vertex,
|
||||
label: VertexLabels.moniker,
|
||||
scheme: 'npm',
|
||||
identifier: 'lsif-test:index:foo',
|
||||
})
|
||||
|
||||
c.insert({
|
||||
id: '4',
|
||||
type: ElementTypes.vertex,
|
||||
label: VertexLabels.moniker,
|
||||
scheme: 'super-npm',
|
||||
identifier: 'lsif-test:index:foo',
|
||||
})
|
||||
|
||||
c.insert({
|
||||
id: '5',
|
||||
type: ElementTypes.edge,
|
||||
label: EdgeLabels.moniker,
|
||||
outV: '1',
|
||||
inV: '2',
|
||||
})
|
||||
|
||||
c.insert({
|
||||
id: '6',
|
||||
type: ElementTypes.edge,
|
||||
label: EdgeLabels.nextMoniker,
|
||||
outV: '2',
|
||||
inV: '3',
|
||||
})
|
||||
|
||||
c.insert({
|
||||
id: '6',
|
||||
type: ElementTypes.edge,
|
||||
label: EdgeLabels.nextMoniker,
|
||||
outV: '3',
|
||||
inV: '4',
|
||||
})
|
||||
|
||||
const range = c.rangeData.get('1')
|
||||
expect(range && range.monikerIds).toEqual(new Set(['2']))
|
||||
expect(c.monikerSets.get('2')).toEqual(new Set(['3']))
|
||||
expect(c.monikerSets.get('3')).toEqual(new Set(['2', '4']))
|
||||
expect(c.monikerSets.get('4')).toEqual(new Set(['3']))
|
||||
})
|
||||
})
|
||||
|
||||
describe('normalizeHover', () => {
|
||||
it('should handle all lsp.Hover types', () => {
|
||||
expect(normalizeHover({ contents: 'foo' })).toEqual('foo')
|
||||
expect(normalizeHover({ contents: { language: 'typescript', value: 'bar' } })).toEqual(
|
||||
'```typescript\nbar\n```'
|
||||
)
|
||||
expect(normalizeHover({ contents: { kind: 'markdown', value: 'baz' } })).toEqual('baz')
|
||||
expect(
|
||||
normalizeHover({
|
||||
contents: ['foo', { language: 'typescript', value: 'bar' }],
|
||||
})
|
||||
).toEqual('foo\n\n---\n\n```typescript\nbar\n```')
|
||||
})
|
||||
})
|
||||
454
lsif/src/correlator.ts
Normal file
454
lsif/src/correlator.ts
Normal file
@ -0,0 +1,454 @@
|
||||
import RelateUrl from 'relateurl'
|
||||
import { mustGet, mustGetFromEither } from './util'
|
||||
import { DefaultMap } from './default-map'
|
||||
import { Hover, MarkupContent } from 'vscode-languageserver-types'
|
||||
import {
|
||||
MonikerData,
|
||||
PackageInformationData,
|
||||
RangeData,
|
||||
MonikerId,
|
||||
HoverResultId,
|
||||
ReferenceResultId,
|
||||
DefinitionResultId,
|
||||
DocumentId,
|
||||
PackageInformationId,
|
||||
} from './models.database'
|
||||
import {
|
||||
Id,
|
||||
VertexLabels,
|
||||
EdgeLabels,
|
||||
Vertex,
|
||||
Edge,
|
||||
MonikerKind,
|
||||
moniker,
|
||||
next,
|
||||
nextMoniker,
|
||||
textDocument_definition,
|
||||
textDocument_hover,
|
||||
textDocument_references,
|
||||
packageInformation,
|
||||
item,
|
||||
MetaData,
|
||||
ElementTypes,
|
||||
contains,
|
||||
RangeId,
|
||||
} from 'lsif-protocol'
|
||||
|
||||
/**
|
||||
* Identifiers of result set vertices.
|
||||
*/
|
||||
export type ResultSetId = Id
|
||||
|
||||
/**
|
||||
* An internal representation of a result set vertex. This is only used during
|
||||
* correlation and import as we flatten this data into the range vertices for
|
||||
* faster queries.
|
||||
*/
|
||||
export interface ResultSetData {
|
||||
/**
|
||||
* The identifier of the definition result attached to this result set.
|
||||
*/
|
||||
definitionResultId?: DefinitionResultId
|
||||
|
||||
/**
|
||||
* The identifier of the reference result attached to this result set.
|
||||
*/
|
||||
referenceResultId?: ReferenceResultId
|
||||
|
||||
/**
|
||||
* The identifier of the hover result attached to this result set.
|
||||
*/
|
||||
hoverResultId?: HoverResultId
|
||||
|
||||
/**
|
||||
* The set of moniker identifiers directly attached to this result set.
|
||||
*/
|
||||
monikerIds: Set<MonikerId>
|
||||
}
|
||||
|
||||
/**
|
||||
* Common state around the conversion of a single LSIF dump upload. This class
|
||||
* receives the parsed vertex or edge, line by line and adds it into an in-memory
|
||||
* adjacency-list graph structure that is later processed and converted into a
|
||||
* SQLite database on disk.
|
||||
*/
|
||||
export class Correlator {
|
||||
/**
|
||||
* The LSIF version of the input. This is extracted from the metadata vertex at
|
||||
* the beginning of processing.
|
||||
*/
|
||||
public lsifVersion?: string
|
||||
|
||||
/**
|
||||
* The root of all document URIs. This is extracted from the metadata vertex at
|
||||
* the beginning of processing.
|
||||
*/
|
||||
public projectRoot?: URL
|
||||
|
||||
// Vertex data
|
||||
public documentPaths = new Map<DocumentId, string>()
|
||||
public rangeData = new Map<RangeId, RangeData>()
|
||||
public resultSetData = new Map<ResultSetId, ResultSetData>()
|
||||
public hoverData = new Map<HoverResultId, string>()
|
||||
public monikerData = new Map<MonikerId, MonikerData>()
|
||||
public packageInformationData = new Map<PackageInformationId, PackageInformationData>()
|
||||
|
||||
// Edge data
|
||||
public nextData = new Map<RangeId | ResultSetId, ResultSetId>()
|
||||
public containsData = new Map<DocumentId, Set<RangeId>>()
|
||||
public definitionData = new Map<DefinitionResultId, DefaultMap<DocumentId, RangeId[]>>()
|
||||
public referenceData = new Map<ReferenceResultId, DefaultMap<DocumentId, RangeId[]>>()
|
||||
|
||||
/**
|
||||
* A mapping for the relation from moniker to the set of monikers that they are related
|
||||
* to via nextMoniker edges. This relation is symmetric such that if `a` is in
|
||||
* `MonikerSets[b]`, then `b` is in `monikerSets[a]`.
|
||||
*/
|
||||
public monikerSets = new DefaultMap<RangeId, Set<MonikerId>>(() => new Set())
|
||||
|
||||
/**
|
||||
* The set of exported moniker identifiers that have package information attached.
|
||||
*/
|
||||
public importedMonikers = new Set<MonikerId>()
|
||||
|
||||
/**
|
||||
* The set of exported moniker identifiers that have package information attached.
|
||||
*/
|
||||
public exportedMonikers = new Set<MonikerId>()
|
||||
|
||||
/**
|
||||
* Process a single vertex or edge.
|
||||
*
|
||||
* @param element A vertex or edge element from the LSIF dump.
|
||||
*/
|
||||
public insert(element: Vertex | Edge): void {
|
||||
if (element.type === ElementTypes.vertex) {
|
||||
switch (element.label) {
|
||||
case VertexLabels.metaData:
|
||||
this.handleMetaData(element)
|
||||
break
|
||||
|
||||
case VertexLabels.document: {
|
||||
if (!this.projectRoot) {
|
||||
throw new Error('No metadata defined.')
|
||||
}
|
||||
|
||||
const path = RelateUrl.relate(this.projectRoot.href + '/', new URL(element.uri).href, {
|
||||
defaultPorts: {},
|
||||
output: RelateUrl.PATH_RELATIVE,
|
||||
removeRootTrailingSlash: false,
|
||||
})
|
||||
|
||||
this.documentPaths.set(element.id, path)
|
||||
this.containsData.set(element.id, new Set<RangeId>())
|
||||
break
|
||||
}
|
||||
|
||||
// The remaining vertex handlers stash data into an appropriate map. This data
|
||||
// may be retrieved when an edge that references it is seen, or when a document
|
||||
// is finalized.
|
||||
|
||||
case VertexLabels.range:
|
||||
this.rangeData.set(element.id, {
|
||||
startLine: element.start.line,
|
||||
startCharacter: element.start.character,
|
||||
endLine: element.end.line,
|
||||
endCharacter: element.end.character,
|
||||
monikerIds: new Set<MonikerId>(),
|
||||
})
|
||||
break
|
||||
|
||||
case VertexLabels.resultSet:
|
||||
this.resultSetData.set(element.id, { monikerIds: new Set<MonikerId>() })
|
||||
break
|
||||
|
||||
case VertexLabels.definitionResult:
|
||||
this.definitionData.set(element.id, new DefaultMap<DocumentId, RangeId[]>(() => []))
|
||||
break
|
||||
|
||||
case VertexLabels.referenceResult:
|
||||
this.referenceData.set(element.id, new DefaultMap<DocumentId, RangeId[]>(() => []))
|
||||
break
|
||||
|
||||
case VertexLabels.hoverResult:
|
||||
this.hoverData.set(element.id, normalizeHover(element.result))
|
||||
break
|
||||
|
||||
case VertexLabels.moniker:
|
||||
this.monikerData.set(element.id, {
|
||||
kind: element.kind || MonikerKind.local,
|
||||
scheme: element.scheme,
|
||||
identifier: element.identifier,
|
||||
})
|
||||
break
|
||||
|
||||
case VertexLabels.packageInformation:
|
||||
this.packageInformationData.set(element.id, {
|
||||
name: element.name,
|
||||
version: element.version || null,
|
||||
})
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (element.type === ElementTypes.edge) {
|
||||
switch (element.label) {
|
||||
case EdgeLabels.contains:
|
||||
this.handleContains(element)
|
||||
break
|
||||
|
||||
case EdgeLabels.next:
|
||||
this.handleNextEdge(element)
|
||||
break
|
||||
|
||||
case EdgeLabels.item:
|
||||
this.handleItemEdge(element)
|
||||
break
|
||||
|
||||
case EdgeLabels.textDocument_definition:
|
||||
this.handleDefinitionEdge(element)
|
||||
break
|
||||
|
||||
case EdgeLabels.textDocument_references:
|
||||
this.handleReferenceEdge(element)
|
||||
break
|
||||
|
||||
case EdgeLabels.textDocument_hover:
|
||||
this.handleHoverEdge(element)
|
||||
break
|
||||
|
||||
case EdgeLabels.moniker:
|
||||
this.handleMonikerEdge(element)
|
||||
break
|
||||
|
||||
case EdgeLabels.nextMoniker:
|
||||
this.handleNextMonikerEdge(element)
|
||||
break
|
||||
|
||||
case EdgeLabels.packageInformation:
|
||||
this.handlePackageInformationEdge(element)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Vertex Handlers
|
||||
|
||||
/**
|
||||
* This should be the first vertex seen. Extract the project root so we
|
||||
* can create relative paths for documents and cache the LSIF protocol
|
||||
* version that we will later insert into he metadata table.
|
||||
*
|
||||
* @param vertex The metadata vertex.
|
||||
*/
|
||||
private handleMetaData(vertex: MetaData): void {
|
||||
this.lsifVersion = vertex.version
|
||||
this.projectRoot = new URL(vertex.projectRoot)
|
||||
}
|
||||
|
||||
//
|
||||
// Edge Handlers
|
||||
|
||||
/**
|
||||
* Add range data ids into the document in which they are contained. Ensures
|
||||
* all referenced vertices are defined.
|
||||
*
|
||||
* @param edge The contains edge.
|
||||
*/
|
||||
private handleContains(edge: contains): void {
|
||||
// Do not track project contains
|
||||
if (!this.documentPaths.has(edge.outV)) {
|
||||
return
|
||||
}
|
||||
|
||||
const set = mustGet(this.containsData, edge.outV, 'contains')
|
||||
for (const inV of edge.inVs) {
|
||||
mustGet(this.rangeData, inV, 'range')
|
||||
set.add(inV)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update definition and reference fields from an item edge. Ensures all
|
||||
* referenced vertices are defined.
|
||||
*
|
||||
* @param edge The item edge.
|
||||
*/
|
||||
private handleItemEdge(edge: item): void {
|
||||
if (this.definitionData.has(edge.outV)) {
|
||||
const documentMap = mustGet(this.definitionData, edge.outV, 'definitionResult')
|
||||
const rangeIds = documentMap.getOrDefault(edge.document)
|
||||
for (const inV of edge.inVs) {
|
||||
mustGet(this.rangeData, inV, 'range')
|
||||
rangeIds.push(inV)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
if (this.referenceData.has(edge.outV)) {
|
||||
const documentMap = mustGet(this.referenceData, edge.outV, 'referenceResult')
|
||||
const rangeIds = documentMap.getOrDefault(edge.document)
|
||||
for (const inV of edge.inVs) {
|
||||
mustGet(this.rangeData, inV, 'range')
|
||||
rangeIds.push(inV)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
throw new Error(`Unknown definition or reference result ${edge.outV}.`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Attaches the specified moniker to the specified range or result set. Ensures all referenced
|
||||
* vertices are defined.
|
||||
*
|
||||
* @param edge The moniker edge.
|
||||
*/
|
||||
private handleMonikerEdge(edge: moniker): void {
|
||||
const source = mustGetFromEither<RangeId | ResultSetId, RangeData | ResultSetData>(
|
||||
this.rangeData,
|
||||
this.resultSetData,
|
||||
edge.outV,
|
||||
'range/resultSet'
|
||||
)
|
||||
|
||||
mustGet(this.monikerData, edge.inV, 'moniker')
|
||||
source.monikerIds = new Set<MonikerId>([edge.inV])
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the next field of the specified range or result set. Ensures all referenced vertices
|
||||
* are defined.
|
||||
*
|
||||
* @param edge The next edge.
|
||||
*/
|
||||
private handleNextEdge(edge: next): void {
|
||||
mustGetFromEither<RangeId | ResultSetId, RangeData | ResultSetData>(
|
||||
this.rangeData,
|
||||
this.resultSetData,
|
||||
edge.outV,
|
||||
'range/resultSet'
|
||||
)
|
||||
|
||||
mustGet(this.resultSetData, edge.inV, 'resultSet')
|
||||
this.nextData.set(edge.outV, edge.inV)
|
||||
}
|
||||
|
||||
/**
|
||||
* Correlates monikers together so that when one moniker is queried, each correlated moniker
|
||||
* is also returned as a strongly connected set. Ensures all referenced vertices are defined.
|
||||
*
|
||||
* @param edge The nextMoniker edge.
|
||||
*/
|
||||
private handleNextMonikerEdge(edge: nextMoniker): void {
|
||||
mustGet(this.monikerData, edge.inV, 'moniker')
|
||||
mustGet(this.monikerData, edge.outV, 'moniker')
|
||||
this.monikerSets.getOrDefault(edge.inV).add(edge.outV) // Forward direction
|
||||
this.monikerSets.getOrDefault(edge.outV).add(edge.inV) // Backwards direction
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the package information of the specified moniker. If the moniker is an export moniker,
|
||||
* then the package information will also be returned as an exported package by the `finalize`
|
||||
* method. Ensures all referenced vertices are defined.
|
||||
*
|
||||
* @param edge The packageInformation edge.
|
||||
*/
|
||||
private handlePackageInformationEdge(edge: packageInformation): void {
|
||||
const source = mustGet(this.monikerData, edge.outV, 'moniker')
|
||||
mustGet(this.packageInformationData, edge.inV, 'packageInformation')
|
||||
source.packageInformationId = edge.inV
|
||||
|
||||
if (source.kind === 'export') {
|
||||
this.exportedMonikers.add(edge.outV)
|
||||
}
|
||||
|
||||
if (source.kind === 'import') {
|
||||
this.importedMonikers.add(edge.outV)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the definition result of the specified range or result set. Ensures all referenced
|
||||
* vertices are defined.
|
||||
*
|
||||
* @param edge The textDocument/definition edge.
|
||||
*/
|
||||
private handleDefinitionEdge(edge: textDocument_definition): void {
|
||||
const outV = mustGetFromEither<RangeId | ResultSetId, RangeData | ResultSetData>(
|
||||
this.rangeData,
|
||||
this.resultSetData,
|
||||
edge.outV,
|
||||
'range/resultSet'
|
||||
)
|
||||
|
||||
mustGet(this.definitionData, edge.inV, 'definitionResult')
|
||||
outV.definitionResultId = edge.inV
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the hover result of the specified range or result set. Ensures all referenced
|
||||
* vertices are defined.
|
||||
*
|
||||
* @param edge The textDocument/hover edge.
|
||||
*/
|
||||
private handleHoverEdge(edge: textDocument_hover): void {
|
||||
const outV = mustGetFromEither<RangeId | ResultSetId, RangeData | ResultSetData>(
|
||||
this.rangeData,
|
||||
this.resultSetData,
|
||||
edge.outV,
|
||||
'range/resultSet'
|
||||
)
|
||||
|
||||
mustGet(this.hoverData, edge.inV, 'hoverResult')
|
||||
outV.hoverResultId = edge.inV
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the reference result of the specified range or result set. Ensures all
|
||||
* referenced vertices are defined.
|
||||
*
|
||||
* @param edge The textDocument/references edge.
|
||||
*/
|
||||
private handleReferenceEdge(edge: textDocument_references): void {
|
||||
const outV = mustGetFromEither<RangeId | ResultSetId, RangeData | ResultSetData>(
|
||||
this.rangeData,
|
||||
this.resultSetData,
|
||||
edge.outV,
|
||||
'range/resultSet'
|
||||
)
|
||||
|
||||
mustGet(this.referenceData, edge.inV, 'referenceResult')
|
||||
outV.referenceResultId = edge.inV
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize an LSP hover object into a string.
|
||||
*
|
||||
* @param hover The hover object.
|
||||
*/
|
||||
export function normalizeHover(hover: Hover): string {
|
||||
const normalizeContent = (content: string | MarkupContent | { language: string; value: string }): string => {
|
||||
if (typeof content === 'string') {
|
||||
return content
|
||||
}
|
||||
|
||||
if (MarkupContent.is(content)) {
|
||||
return content.value
|
||||
}
|
||||
|
||||
const tick = '```'
|
||||
return `${tick}${content.language}\n${content.value}\n${tick}`
|
||||
}
|
||||
|
||||
const separator = '\n\n---\n\n'
|
||||
const contents = Array.isArray(hover.contents) ? hover.contents : [hover.contents]
|
||||
return contents
|
||||
.map(c => normalizeContent(c).trim())
|
||||
.filter(s => s)
|
||||
.join(separator)
|
||||
}
|
||||
78
lsif/src/database.test.ts
Normal file
78
lsif/src/database.test.ts
Normal file
@ -0,0 +1,78 @@
|
||||
import * as lsp from 'vscode-languageserver-protocol'
|
||||
import { comparePosition, createRemoteUri, mapRangesToLocations } from './database'
|
||||
import { MonikerId, RangeData, RangeId } from './models.database'
|
||||
|
||||
describe('comparePosition', () => {
|
||||
it('should return the relative order to a range', () => {
|
||||
const range = {
|
||||
startLine: 5,
|
||||
startCharacter: 11,
|
||||
endLine: 5,
|
||||
endCharacter: 13,
|
||||
monikerIds: new Set<MonikerId>(),
|
||||
}
|
||||
|
||||
expect(comparePosition(range, { line: 5, character: 11 })).toEqual(0)
|
||||
expect(comparePosition(range, { line: 5, character: 12 })).toEqual(0)
|
||||
expect(comparePosition(range, { line: 5, character: 13 })).toEqual(0)
|
||||
expect(comparePosition(range, { line: 4, character: 12 })).toEqual(+1)
|
||||
expect(comparePosition(range, { line: 5, character: 10 })).toEqual(+1)
|
||||
expect(comparePosition(range, { line: 5, character: 14 })).toEqual(-1)
|
||||
expect(comparePosition(range, { line: 6, character: 12 })).toEqual(-1)
|
||||
})
|
||||
})
|
||||
|
||||
describe('createRemoteUri', () => {
|
||||
it('should generate a URI to another project', () => {
|
||||
const pkg = {
|
||||
id: 0,
|
||||
scheme: '',
|
||||
name: '',
|
||||
version: '',
|
||||
repository: 'github.com/sourcegraph/codeintellify',
|
||||
commit: 'deadbeef',
|
||||
}
|
||||
|
||||
const uri = createRemoteUri(pkg, 'src/position.ts')
|
||||
expect(uri).toEqual('git://github.com/sourcegraph/codeintellify?deadbeef#src/position.ts')
|
||||
})
|
||||
})
|
||||
|
||||
describe('mapRangesToLocations', () => {
|
||||
it('should map ranges to locations', () => {
|
||||
const ranges = new Map<RangeId, RangeData>()
|
||||
ranges.set(1, {
|
||||
startLine: 1,
|
||||
startCharacter: 1,
|
||||
endLine: 1,
|
||||
endCharacter: 2,
|
||||
monikerIds: new Set<MonikerId>(),
|
||||
})
|
||||
ranges.set(2, {
|
||||
startLine: 3,
|
||||
startCharacter: 1,
|
||||
endLine: 3,
|
||||
endCharacter: 2,
|
||||
monikerIds: new Set<MonikerId>(),
|
||||
})
|
||||
ranges.set(4, {
|
||||
startLine: 2,
|
||||
startCharacter: 1,
|
||||
endLine: 2,
|
||||
endCharacter: 2,
|
||||
monikerIds: new Set<MonikerId>(),
|
||||
})
|
||||
|
||||
const locations = mapRangesToLocations(ranges, 'src/position.ts', new Set([1, 2, 4]))
|
||||
expect(locations).toContainEqual(
|
||||
lsp.Location.create('src/position.ts', { start: { line: 1, character: 1 }, end: { line: 1, character: 2 } })
|
||||
)
|
||||
expect(locations).toContainEqual(
|
||||
lsp.Location.create('src/position.ts', { start: { line: 3, character: 1 }, end: { line: 3, character: 2 } })
|
||||
)
|
||||
expect(locations).toContainEqual(
|
||||
lsp.Location.create('src/position.ts', { start: { line: 2, character: 1 }, end: { line: 2, character: 2 } })
|
||||
)
|
||||
expect(locations).toHaveLength(3)
|
||||
})
|
||||
})
|
||||
659
lsif/src/database.ts
Normal file
659
lsif/src/database.ts
Normal file
@ -0,0 +1,659 @@
|
||||
import * as lsp from 'vscode-languageserver-protocol'
|
||||
import { mustGet, hashKey } from './util'
|
||||
import { Connection } from 'typeorm'
|
||||
import { ConnectionCache, DocumentCache, EncodedJsonCacheValue, ResultChunkCache } from './cache'
|
||||
import { gunzipJSON } from './encoding'
|
||||
import { DefaultMap } from './default-map'
|
||||
import {
|
||||
DefinitionModel,
|
||||
DocumentData,
|
||||
DocumentModel,
|
||||
MetaModel,
|
||||
MonikerData,
|
||||
RangeData,
|
||||
ReferenceModel,
|
||||
ResultChunkData,
|
||||
ResultChunkModel,
|
||||
DocumentPathRangeId,
|
||||
DefinitionReferenceResultId,
|
||||
RangeId,
|
||||
} from './models.database'
|
||||
import { isEqual, uniqWith } from 'lodash'
|
||||
import { makeFilename } from './backend'
|
||||
import { PackageModel } from './models.xrepo'
|
||||
import { XrepoDatabase } from './xrepo'
|
||||
|
||||
/**
|
||||
* A wrapper around operations for single repository/commit pair.
|
||||
*/
|
||||
export class Database {
|
||||
/**
|
||||
* A static map of database paths to the `numResultChunks` value of their
|
||||
* metadata row. This map is populated lazily as the values are needed.
|
||||
*/
|
||||
private static numResultChunks = new Map<string, number>()
|
||||
|
||||
/**
|
||||
* Create a new `Database` with the given cross-repo database instance and the
|
||||
* filename of the database that contains data for a particular repository/commit.
|
||||
*
|
||||
* @param storageRoot The path where SQLite databases are stored.
|
||||
* @param xrepoDatabase The cross-repo database.
|
||||
* @param connectionCache The cache of SQLite connections.
|
||||
* @param documentCache The cache of loaded documents.
|
||||
* @param resultChunkCache The cache of loaded result chunks.
|
||||
* @param repository The repository for which this database answers queries.
|
||||
* @param commit The commit for which this database answers queries.
|
||||
* @param databasePath The path to the database file.
|
||||
*/
|
||||
constructor(
|
||||
private storageRoot: string,
|
||||
private xrepoDatabase: XrepoDatabase,
|
||||
private connectionCache: ConnectionCache,
|
||||
private documentCache: DocumentCache,
|
||||
private resultChunkCache: ResultChunkCache,
|
||||
private repository: string,
|
||||
private commit: string,
|
||||
private databasePath: string
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Determine if data exists for a particular document in this database.
|
||||
*
|
||||
* @param path The path of the document.
|
||||
*/
|
||||
public async exists(path: string): Promise<boolean> {
|
||||
return (await this.getDocumentByPath(path)) !== undefined
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the location for the definition of the reference at the given position.
|
||||
*
|
||||
* @param path The path of the document to which the position belongs.
|
||||
* @param position The current hover position.
|
||||
*/
|
||||
public async definitions(path: string, position: lsp.Position): Promise<lsp.Location[]> {
|
||||
const { document, range } = await this.getRangeByPosition(path, position)
|
||||
if (!document || !range) {
|
||||
return []
|
||||
}
|
||||
|
||||
// First, we try to find the definition result attached to the range or one
|
||||
// of the result sets to which the range is attached.
|
||||
|
||||
if (range.definitionResultId) {
|
||||
// We have a definition result in this database.
|
||||
const definitionResults = await this.getResultById(range.definitionResultId)
|
||||
|
||||
// TODO - due to some bugs in tsc... this fixes the tests and some typescript examples
|
||||
// Not sure of a better way to do this right now until we work through how to patch
|
||||
// lsif-tsc to handle node_modules inclusion (or somehow blacklist it on import).
|
||||
|
||||
if (!definitionResults.some(v => v.documentPath.includes('node_modules'))) {
|
||||
return await this.convertRangesToLspLocations(path, document, definitionResults)
|
||||
}
|
||||
}
|
||||
|
||||
// Otherwise, we fall back to a moniker search. We get all the monikers attached
|
||||
// to the range or a result set to which the range is attached. We process each
|
||||
// moniker sequentially in order of priority, where import monikers, if any exist,
|
||||
// will be processed first.
|
||||
|
||||
for (const moniker of sortMonikers(
|
||||
Array.from(range.monikerIds).map(id => mustGet(document.monikers, id, 'moniker'))
|
||||
)) {
|
||||
if (moniker.kind === 'import') {
|
||||
// This symbol was imported from another database. See if we have xrepo
|
||||
// definition for it.
|
||||
|
||||
const remoteDefinitions = await this.remoteDefinitions(document, moniker)
|
||||
if (remoteDefinitions) {
|
||||
return remoteDefinitions
|
||||
}
|
||||
} else {
|
||||
// This symbol was not imported from another database. We search the definitions
|
||||
// table of our own database in case there was a definition that wasn't properly
|
||||
// attached to a result set but did have the correct monikers attached.
|
||||
|
||||
const localDefinitions = await Database.monikerResults(this, DefinitionModel, moniker, path => path)
|
||||
if (localDefinitions) {
|
||||
return localDefinitions
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return []
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a list of locations which reference the definition at the given position.
|
||||
*
|
||||
* @param path The path of the document to which the position belongs.
|
||||
* @param position The current hover position.
|
||||
*/
|
||||
public async references(path: string, position: lsp.Position): Promise<lsp.Location[]> {
|
||||
const { document, range } = await this.getRangeByPosition(path, position)
|
||||
if (!document || !range) {
|
||||
return []
|
||||
}
|
||||
|
||||
let locations: lsp.Location[] = []
|
||||
|
||||
// First, we try to find the reference result attached to the range or one
|
||||
// of the result sets to which the range is attached.
|
||||
|
||||
if (range.referenceResultId) {
|
||||
// We have references in this database.
|
||||
locations = locations.concat(
|
||||
await this.convertRangesToLspLocations(
|
||||
path,
|
||||
document,
|
||||
await this.getResultById(range.referenceResultId)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
// Next, we do a moniker search in two stages, described below. We process each
|
||||
// moniker sequentially in order of priority for each stage, where import monikers,
|
||||
// if any exist, will be processed first.
|
||||
|
||||
const monikers = sortMonikers(
|
||||
Array.from(range.monikerIds).map(id => mustGet(document.monikers, id, 'monikers'))
|
||||
)
|
||||
|
||||
// Next, we search the references table of our own database - this search is necessary,
|
||||
// but may be un-intuitive, but remember that a 'Find References' operation on a reference
|
||||
// should also return references to the definition. These are not necessarily fully linked
|
||||
// in the LSIF data.
|
||||
|
||||
for (const moniker of monikers) {
|
||||
locations = locations.concat(await Database.monikerResults(this, ReferenceModel, moniker, path => path))
|
||||
}
|
||||
|
||||
// Next, we perform an xrepo search for uses of each nonlocal moniker. We stop processing after
|
||||
// the first moniker for which we received results. As we process monikers in an order that
|
||||
// considers moniker schemes, the first one to get results should be the most desirable.
|
||||
|
||||
for (const moniker of monikers) {
|
||||
if (moniker.kind === 'import') {
|
||||
// Get locations in the defining package
|
||||
locations = locations.concat(await this.remoteMoniker(document, moniker))
|
||||
}
|
||||
|
||||
// Get locations in all packages
|
||||
const remoteResults = await this.remoteReferences(document, moniker)
|
||||
if (remoteResults) {
|
||||
// TODO - determine source of duplication (and below)
|
||||
return uniqWith(locations.concat(remoteResults), isEqual)
|
||||
}
|
||||
}
|
||||
|
||||
return uniqWith(locations, isEqual)
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the hover content for the definition or reference at the given position.
|
||||
*
|
||||
* @param path The path of the document to which the position belongs.
|
||||
* @param position The current hover position.
|
||||
*/
|
||||
public async hover(path: string, position: lsp.Position): Promise<lsp.Hover | null> {
|
||||
const { document, range } = await this.getRangeByPosition(path, position)
|
||||
if (!document || !range) {
|
||||
return null
|
||||
}
|
||||
|
||||
// Try to find the hover content attached to the range or one of the result sets to
|
||||
// which the range is attached. There is no fall-back search via monikers for this
|
||||
// operation.
|
||||
|
||||
if (range.hoverResultId) {
|
||||
return {
|
||||
contents: {
|
||||
kind: lsp.MarkupKind.Markdown,
|
||||
value: mustGet(document.hoverResults, range.hoverResultId, 'hoverResult'),
|
||||
},
|
||||
range: createRange(range),
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
//
|
||||
// Helper Functions
|
||||
|
||||
/**
|
||||
* Convert a set of range-document pairs (from a definition or reference query) into
|
||||
* a set of LSP ranges. Each pair holds the range identifier as well as the document
|
||||
* path. For document paths matching the loaded document, find the range data locally.
|
||||
* For all other paths, find the document in this database and find the range in that
|
||||
* document.
|
||||
*
|
||||
* @param path The path of the document for this query.
|
||||
* @param document The document object for this query.
|
||||
* @param resultData A list of range ids and the document they belong to.
|
||||
*/
|
||||
private async convertRangesToLspLocations(
|
||||
path: string,
|
||||
document: DocumentData,
|
||||
resultData: DocumentPathRangeId[]
|
||||
): Promise<lsp.Location[]> {
|
||||
// Group by document path so we only have to load each document once
|
||||
const groupedResults = new DefaultMap<string, Set<RangeId>>(() => new Set())
|
||||
|
||||
for (const { documentPath, rangeId } of resultData) {
|
||||
groupedResults.getOrDefault(documentPath).add(rangeId)
|
||||
}
|
||||
|
||||
let results: lsp.Location[] = []
|
||||
for (const [documentPath, rangeIds] of groupedResults) {
|
||||
if (documentPath === path) {
|
||||
// If the document path is this document, convert the locations directly
|
||||
results = results.concat(mapRangesToLocations(document.ranges, path, rangeIds))
|
||||
continue
|
||||
}
|
||||
|
||||
// Otherwise, we need to get the correct document
|
||||
const sibling = await this.getDocumentByPath(documentPath)
|
||||
if (!sibling) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Then finally convert the locations in the sibling document
|
||||
results = results.concat(mapRangesToLocations(sibling.ranges, documentPath, rangeIds))
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
/**
|
||||
* Query the definitions or references table of `db` for items that match the given moniker.
|
||||
* Convert each result into an LSP location. The `pathTransformer` function is invoked on each
|
||||
* result item to modify the resulting locations.
|
||||
*
|
||||
* @param db The target database.
|
||||
* @param model The constructor for the model type.
|
||||
* @param moniker The target moniker.
|
||||
* @param pathTransformer The function used to alter location paths.
|
||||
*/
|
||||
private static async monikerResults(
|
||||
db: Database,
|
||||
model: typeof DefinitionModel | typeof ReferenceModel,
|
||||
moniker: MonikerData,
|
||||
pathTransformer: (path: string) => string
|
||||
): Promise<lsp.Location[]> {
|
||||
const results = await db.withConnection(connection =>
|
||||
connection.getRepository<DefinitionModel | ReferenceModel>(model).find({
|
||||
where: {
|
||||
scheme: moniker.scheme,
|
||||
identifier: moniker.identifier,
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
return results.map(result => lsp.Location.create(pathTransformer(result.documentPath), createRange(result)))
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the definition of the target moniker outside of the current database. If the
|
||||
* moniker has attached package information, then the xrepo database is queried for
|
||||
* the target package. That database is opened, and its def table is queried for the
|
||||
* target moniker.
|
||||
*
|
||||
* @param document The document containing the reference.
|
||||
* @param moniker The target moniker.
|
||||
*/
|
||||
private async remoteDefinitions(document: DocumentData, moniker: MonikerData): Promise<lsp.Location[] | null> {
|
||||
if (!moniker.packageInformationId) {
|
||||
return null
|
||||
}
|
||||
|
||||
const packageInformation = document.packageInformation.get(moniker.packageInformationId)
|
||||
if (!packageInformation) {
|
||||
return null
|
||||
}
|
||||
|
||||
const packageEntity = await this.xrepoDatabase.getPackage(
|
||||
moniker.scheme,
|
||||
packageInformation.name,
|
||||
packageInformation.version
|
||||
)
|
||||
|
||||
if (!packageEntity) {
|
||||
return null
|
||||
}
|
||||
|
||||
const db = this.createNewDatabase(
|
||||
packageEntity.repository,
|
||||
packageEntity.commit,
|
||||
makeFilename(this.storageRoot, packageEntity.repository, packageEntity.commit)
|
||||
)
|
||||
|
||||
const pathTransformer = (path: string): string => createRemoteUri(packageEntity, path)
|
||||
return await Database.monikerResults(db, DefinitionModel, moniker, pathTransformer)
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the references of of the target moniker inside the database where that moniker is defined.
|
||||
*
|
||||
* @param document The document containing the definition.
|
||||
* @param moniker The target moniker.
|
||||
*/
|
||||
private async remoteMoniker(document: DocumentData, moniker: MonikerData): Promise<lsp.Location[]> {
|
||||
if (!moniker.packageInformationId) {
|
||||
return []
|
||||
}
|
||||
|
||||
const packageInformation = document.packageInformation.get(moniker.packageInformationId)
|
||||
if (!packageInformation) {
|
||||
return []
|
||||
}
|
||||
|
||||
const packageEntity = await this.xrepoDatabase.getPackage(
|
||||
moniker.scheme,
|
||||
packageInformation.name,
|
||||
packageInformation.version
|
||||
)
|
||||
|
||||
if (!packageEntity) {
|
||||
return []
|
||||
}
|
||||
|
||||
const db = this.createNewDatabase(
|
||||
packageEntity.repository,
|
||||
packageEntity.commit,
|
||||
makeFilename(this.storageRoot, packageEntity.repository, packageEntity.commit)
|
||||
)
|
||||
|
||||
const pathTransformer = (path: string): string => createRemoteUri(packageEntity, path)
|
||||
return await Database.monikerResults(db, ReferenceModel, moniker, pathTransformer)
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the references of the target moniker outside of the current database. If the moniker
|
||||
* has attached package information, then the xrepo database is queried for the packages that
|
||||
* require this particular moniker identifier. These databases are opened, and their ref tables
|
||||
* are queried for the target moniker.
|
||||
*
|
||||
* @param document The document containing the definition.
|
||||
* @param moniker The target moniker.
|
||||
*/
|
||||
private async remoteReferences(document: DocumentData, moniker: MonikerData): Promise<lsp.Location[]> {
|
||||
if (!moniker.packageInformationId) {
|
||||
return []
|
||||
}
|
||||
|
||||
const packageInformation = document.packageInformation.get(moniker.packageInformationId)
|
||||
if (!packageInformation) {
|
||||
return []
|
||||
}
|
||||
|
||||
const references = await this.xrepoDatabase.getReferences({
|
||||
scheme: moniker.scheme,
|
||||
name: packageInformation.name,
|
||||
version: packageInformation.version,
|
||||
value: moniker.identifier,
|
||||
})
|
||||
|
||||
let allReferences: lsp.Location[] = []
|
||||
for (const reference of references) {
|
||||
// Skip the remote reference that show up for ourselves - we've already gathered
|
||||
// these in the previous step of the references query.
|
||||
if (reference.repository === this.repository && reference.commit === this.commit) {
|
||||
continue
|
||||
}
|
||||
|
||||
const db = this.createNewDatabase(
|
||||
reference.repository,
|
||||
reference.commit,
|
||||
makeFilename(this.storageRoot, reference.repository, reference.commit)
|
||||
)
|
||||
|
||||
const pathTransformer = (path: string): string => createRemoteUri(reference, path)
|
||||
const references = await Database.monikerResults(db, ReferenceModel, moniker, pathTransformer)
|
||||
allReferences = allReferences.concat(references)
|
||||
}
|
||||
|
||||
return allReferences
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a parsed document that describes the given path. The result of this
|
||||
* method is cached across all database instances.
|
||||
*
|
||||
* @param path The path of the document.
|
||||
*/
|
||||
private async getDocumentByPath(path: string): Promise<DocumentData | undefined> {
|
||||
const factory = async (): Promise<EncodedJsonCacheValue<DocumentData>> => {
|
||||
const document = await this.withConnection(connection =>
|
||||
connection.getRepository(DocumentModel).findOneOrFail(path)
|
||||
)
|
||||
|
||||
return {
|
||||
size: document.data.length,
|
||||
data: await gunzipJSON<DocumentData>(document.data),
|
||||
}
|
||||
}
|
||||
|
||||
return await this.documentCache.withValue(`${this.databasePath}::${path}`, factory, document =>
|
||||
Promise.resolve(document.data)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a parsed document that describes the given path as well as the range
|
||||
* from that document that contains the given position. Returns undefined for
|
||||
* both values if one cannot be loaded.
|
||||
*
|
||||
* @param path The path of the document.
|
||||
* @param position The user's hover position.
|
||||
*/
|
||||
private async getRangeByPosition(
|
||||
path: string,
|
||||
position: lsp.Position
|
||||
): Promise<{ document: DocumentData | undefined; range: RangeData | undefined }> {
|
||||
const document = await this.getDocumentByPath(path)
|
||||
if (!document) {
|
||||
return { document: undefined, range: undefined }
|
||||
}
|
||||
|
||||
for (const range of document.ranges.values()) {
|
||||
if (comparePosition(range, position) === 0) {
|
||||
return { document, range }
|
||||
}
|
||||
}
|
||||
|
||||
return { document: undefined, range: undefined }
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a list of ranges with document ids into a list of ranges with
|
||||
* document paths by looking into the result chunks table and parsing the
|
||||
* data associated with the given identifier.
|
||||
*
|
||||
* @param id The identifier of the definition or reference result.
|
||||
*/
|
||||
private async getResultById(id: DefinitionReferenceResultId): Promise<DocumentPathRangeId[]> {
|
||||
const { documentPaths, documentIdRangeIds } = await this.getResultChunkByResultId(id)
|
||||
const ranges = mustGet(documentIdRangeIds, id, 'documentIdRangeId')
|
||||
|
||||
return ranges.map(range => ({
|
||||
documentPath: mustGet(documentPaths, range.documentId, 'documentPath'),
|
||||
rangeId: range.rangeId,
|
||||
}))
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a parsed result chunk that contains the given identifier.
|
||||
*
|
||||
* @param id An identifier contained in the result chunk.
|
||||
*/
|
||||
private async getResultChunkByResultId(id: DefinitionReferenceResultId): Promise<ResultChunkData> {
|
||||
// Find the result chunk index this id belongs to
|
||||
const index = hashKey(id, await this.getNumResultChunks())
|
||||
|
||||
const factory = async (): Promise<EncodedJsonCacheValue<ResultChunkData>> => {
|
||||
const resultChunk = await this.withConnection(connection =>
|
||||
connection.getRepository(ResultChunkModel).findOneOrFail(index)
|
||||
)
|
||||
|
||||
return {
|
||||
size: resultChunk.data.length,
|
||||
data: await gunzipJSON<ResultChunkData>(resultChunk.data),
|
||||
}
|
||||
}
|
||||
|
||||
return await this.resultChunkCache.withValue(`${this.databasePath}::${index}`, factory, resultChunk =>
|
||||
Promise.resolve(resultChunk.data)
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the `numResultChunks` value from this database's metadata row.
|
||||
*/
|
||||
private async getNumResultChunks(): Promise<number> {
|
||||
const numResultChunks = Database.numResultChunks.get(this.databasePath)
|
||||
if (numResultChunks !== undefined) {
|
||||
return numResultChunks
|
||||
}
|
||||
|
||||
// Not in the shared map, need to query it
|
||||
const meta = await this.withConnection(connection => connection.getRepository(MetaModel).findOneOrFail(1))
|
||||
Database.numResultChunks.set(this.databasePath, meta.numResultChunks)
|
||||
return meta.numResultChunks
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new database with the same configuration but a different repository,
|
||||
* commit, and databasePath.
|
||||
*
|
||||
*
|
||||
* @param repository The repository for which this database answers queries.
|
||||
* @param commit The commit for which this database answers queries.
|
||||
* @param databasePath The path to the database file.
|
||||
*/
|
||||
private createNewDatabase(repository: string, commit: string, databasePath: string): Database {
|
||||
return new Database(
|
||||
this.storageRoot,
|
||||
this.xrepoDatabase,
|
||||
this.connectionCache,
|
||||
this.documentCache,
|
||||
this.resultChunkCache,
|
||||
repository,
|
||||
commit,
|
||||
databasePath
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Invoke `callback` with a SQLite connection object obtained from the
|
||||
* cache or created on cache miss.
|
||||
*
|
||||
* @param callback The function invoke with the SQLite connection.
|
||||
*/
|
||||
private async withConnection<T>(callback: (connection: Connection) => Promise<T>): Promise<T> {
|
||||
return await this.connectionCache.withConnection(
|
||||
this.databasePath,
|
||||
[DefinitionModel, DocumentModel, MetaModel, ReferenceModel, ResultChunkModel],
|
||||
callback
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare a position against a range. Returns 0 if the position occurs
|
||||
* within the range (inclusive bounds), -1 if the position occurs after
|
||||
* it, and +1 if the position occurs before it.
|
||||
*
|
||||
* @param range The range.
|
||||
* @param position The position.
|
||||
*/
|
||||
export function comparePosition(range: RangeData, position: lsp.Position): number {
|
||||
if (position.line < range.startLine) {
|
||||
return +1
|
||||
}
|
||||
|
||||
if (position.line > range.endLine) {
|
||||
return -1
|
||||
}
|
||||
|
||||
if (position.line === range.startLine && position.character < range.startCharacter) {
|
||||
return +1
|
||||
}
|
||||
|
||||
if (position.line === range.endLine && position.character > range.endCharacter) {
|
||||
return -1
|
||||
}
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
/**
|
||||
* Sort the monikers by kind, then scheme in order of the following
|
||||
* preferences.
|
||||
*
|
||||
* - kind: import, local, export
|
||||
* - scheme: npm, tsc
|
||||
*
|
||||
* @param monikers The list of monikers.
|
||||
*/
|
||||
export function sortMonikers(monikers: MonikerData[]): MonikerData[] {
|
||||
const monikerKindPreferences = ['import', 'local', 'export']
|
||||
const monikerSchemePreferences = ['npm', 'tsc']
|
||||
|
||||
monikers.sort((a, b) => {
|
||||
const ord = monikerKindPreferences.indexOf(a.kind) - monikerKindPreferences.indexOf(b.kind)
|
||||
if (ord !== 0) {
|
||||
return ord
|
||||
}
|
||||
|
||||
return monikerSchemePreferences.indexOf(a.scheme) - monikerSchemePreferences.indexOf(b.scheme)
|
||||
})
|
||||
|
||||
return monikers
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a URI that can be used by the frontend to switch to another
|
||||
* directory.
|
||||
*
|
||||
* @param pkg The target package.
|
||||
* @param path The path relative to the project root.
|
||||
*/
|
||||
export function createRemoteUri(pkg: PackageModel, path: string): string {
|
||||
const url = new URL(`git://${pkg.repository}`)
|
||||
url.search = pkg.commit
|
||||
url.hash = path
|
||||
return url.href
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct an LSP range from a flat range.
|
||||
*
|
||||
* @param result The start/end line/character of the range.
|
||||
*/
|
||||
function createRange(result: {
|
||||
startLine: number
|
||||
startCharacter: number
|
||||
endLine: number
|
||||
endCharacter: number
|
||||
}): lsp.Range {
|
||||
return lsp.Range.create(result.startLine, result.startCharacter, result.endLine, result.endCharacter)
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert the given range identifiers into LSP location objects.
|
||||
*
|
||||
* @param ranges The map of ranges of the document.
|
||||
* @param uri The location URI.
|
||||
* @param ids The set of range identifiers for each resulting location.
|
||||
*/
|
||||
export function mapRangesToLocations(ranges: Map<RangeId, RangeData>, uri: string, ids: Set<RangeId>): lsp.Location[] {
|
||||
const locations = []
|
||||
for (const id of ids) {
|
||||
locations.push(lsp.Location.create(uri, createRange(mustGet(ranges, id, 'range'))))
|
||||
}
|
||||
|
||||
return locations
|
||||
}
|
||||
36
lsif/src/default-map.test.ts
Normal file
36
lsif/src/default-map.test.ts
Normal file
@ -0,0 +1,36 @@
|
||||
import { DefaultMap } from './default-map'
|
||||
|
||||
describe('DefaultMap', () => {
|
||||
it('should leave get unchanged', () => {
|
||||
const map = new DefaultMap<string, string>(() => 'bar')
|
||||
expect(map.get('foo')).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should create values on access', () => {
|
||||
const map = new DefaultMap<string, string>(() => 'bar')
|
||||
expect(map.getOrDefault('foo')).toEqual('bar')
|
||||
})
|
||||
|
||||
it('should respect explicit set', () => {
|
||||
const map = new DefaultMap<string, string>(() => 'bar')
|
||||
map.set('foo', 'baz')
|
||||
expect(map.getOrDefault('foo')).toEqual('baz')
|
||||
})
|
||||
|
||||
it('should support nested gets', () => {
|
||||
const map = new DefaultMap<string, DefaultMap<string, string[]>>(
|
||||
() => new DefaultMap<string, string[]>(() => [])
|
||||
)
|
||||
|
||||
map.getOrDefault('foo')
|
||||
.getOrDefault('bar')
|
||||
.push('baz')
|
||||
|
||||
map.getOrDefault('foo')
|
||||
.getOrDefault('bar')
|
||||
.push('bonk')
|
||||
|
||||
const inner = map.get('foo')
|
||||
expect(inner && inner.get('bar')).toEqual(['baz', 'bonk'])
|
||||
})
|
||||
})
|
||||
32
lsif/src/default-map.ts
Normal file
32
lsif/src/default-map.ts
Normal file
@ -0,0 +1,32 @@
|
||||
/**
|
||||
* An extension of `Map` that defines `getOrDefault` for a type of stunted
|
||||
* autovivification. This saves a bunch of code that needs to check if a
|
||||
* nested type within a map is undefined on first access.
|
||||
*/
|
||||
export class DefaultMap<K, V> extends Map<K, V> {
|
||||
/**
|
||||
* Returns a new `DefaultMap`.
|
||||
*
|
||||
* @param defaultFactory The factory invoked when an undefined value is accessed.
|
||||
*/
|
||||
constructor(private defaultFactory: () => V) {
|
||||
super()
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a key from the map. If the key does not exist, the default factory produces
|
||||
* a value and inserted into the map before being returned.
|
||||
*
|
||||
* @param key The key to retrieve.
|
||||
*/
|
||||
public getOrDefault(key: K): V {
|
||||
let value = super.get(key)
|
||||
if (value !== undefined) {
|
||||
return value
|
||||
}
|
||||
|
||||
value = this.defaultFactory()
|
||||
this.set(key, value)
|
||||
return value
|
||||
}
|
||||
}
|
||||
42
lsif/src/encoding.test.ts
Normal file
42
lsif/src/encoding.test.ts
Normal file
@ -0,0 +1,42 @@
|
||||
import { createFilter, testFilter, gzipJSON, gunzipJSON } from './encoding'
|
||||
|
||||
describe('testFilter', () => {
|
||||
it('should test set membership', async () => {
|
||||
const filter = await createFilter(['foo', 'bar', 'baz'])
|
||||
expect(await testFilter(filter, 'foo')).toBeTruthy()
|
||||
expect(await testFilter(filter, 'bar')).toBeTruthy()
|
||||
expect(await testFilter(filter, 'baz')).toBeTruthy()
|
||||
expect(await testFilter(filter, 'bonk')).toBeFalsy()
|
||||
expect(await testFilter(filter, 'quux')).toBeFalsy()
|
||||
})
|
||||
})
|
||||
|
||||
describe('gzipJSON', () => {
|
||||
it('should preserve maps', async () => {
|
||||
const m = new Map<string, number>([['a', 1], ['b', 2], ['c', 3]])
|
||||
|
||||
const value = {
|
||||
foo: [1, 2, 3],
|
||||
bar: ['abc', 'xyz'],
|
||||
baz: m,
|
||||
}
|
||||
|
||||
const encoded = await gzipJSON(value)
|
||||
const decoded = await gunzipJSON(encoded)
|
||||
expect(decoded).toEqual(value)
|
||||
})
|
||||
|
||||
it('should preserve sets', async () => {
|
||||
const s = new Set<number>([1, 2, 3, 4, 5])
|
||||
|
||||
const value = {
|
||||
foo: [1, 2, 3],
|
||||
bar: ['abc', 'xyz'],
|
||||
baz: s,
|
||||
}
|
||||
|
||||
const encoded = await gzipJSON(value)
|
||||
const decoded = await gunzipJSON(encoded)
|
||||
expect(decoded).toEqual(value)
|
||||
})
|
||||
})
|
||||
125
lsif/src/encoding.ts
Normal file
125
lsif/src/encoding.ts
Normal file
@ -0,0 +1,125 @@
|
||||
import { gzip, gunzip } from 'mz/zlib'
|
||||
import { BloomFilter } from 'bloomfilter'
|
||||
import { readEnvInt } from './util'
|
||||
|
||||
// These parameters give us a 1 in 1.38x10^9 false positive rate if we assume
|
||||
// that the number of unique URIs referrable by an external package is of the
|
||||
// order of 10k (....but I have no idea if that is a reasonable estimate....).
|
||||
//
|
||||
// See the following link for a bloom calculator: https://hur.st/bloomfilter
|
||||
|
||||
/**
|
||||
* The number of bits allocated for new bloom filters.
|
||||
*/
|
||||
const BLOOM_FILTER_BITS = readEnvInt('BLOOM_FILTER_BITS', 64 * 1024)
|
||||
|
||||
/**
|
||||
* The number of hash functions to use to determine if a value is a member of the filter.
|
||||
*/
|
||||
const BLOOM_FILTER_NUM_HASH_FUNCTIONS = readEnvInt('BLOOM_FILTER_NUM_HASH_FUNCTIONS', 16)
|
||||
|
||||
/**
|
||||
* A type that describes a the encoded version of a bloom filter.
|
||||
*/
|
||||
export type EncodedBloomFilter = Buffer
|
||||
|
||||
/**
|
||||
* Create a bloom filter containing the given values and return an encoded verion.
|
||||
*
|
||||
* @param values The values to add to the bloom filter.
|
||||
*/
|
||||
export function createFilter(values: string[]): Promise<EncodedBloomFilter> {
|
||||
const filter = new BloomFilter(BLOOM_FILTER_BITS, BLOOM_FILTER_NUM_HASH_FUNCTIONS)
|
||||
for (const value of values) {
|
||||
filter.add(value)
|
||||
}
|
||||
|
||||
// Need to shed the type of the array
|
||||
const buckets = Array.from(filter.buckets)
|
||||
|
||||
// Store the number of hash functions used to create this as it may change after
|
||||
// this value is serialized. We don't want to test with more hash functions than
|
||||
// it was created with, otherwise we'll get false negatives.
|
||||
return gzipJSON({ numHashFunctions: BLOOM_FILTER_NUM_HASH_FUNCTIONS, buckets })
|
||||
}
|
||||
|
||||
/**
|
||||
* Decode `filter` as created by `createFilter` and determine if `value` is a
|
||||
* possible element. This may return a false positive (returning true if the
|
||||
* element is not actually a member), but will not return false negatives.
|
||||
*
|
||||
* @param filter The encoded filter.
|
||||
* @param value The value to test membership.
|
||||
*/
|
||||
export async function testFilter(filter: EncodedBloomFilter, value: string): Promise<boolean> {
|
||||
const { numHashFunctions, buckets } = await gunzipJSON(filter)
|
||||
return new BloomFilter(buckets, numHashFunctions).test(value)
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the gzipped JSON representation of `value`.
|
||||
*
|
||||
* @param value The value to encode.
|
||||
*/
|
||||
export function gzipJSON<T>(value: T): Promise<Buffer> {
|
||||
return gzip(Buffer.from(dumpJSON(value)))
|
||||
}
|
||||
|
||||
/**
|
||||
* Reverse the operation of `gzipJSON`.
|
||||
*
|
||||
* @param value The value to decode.
|
||||
*/
|
||||
export async function gunzipJSON<T>(value: Buffer): Promise<T> {
|
||||
return parseJSON((await gunzip(value)).toString())
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the JSON representation of `value`. This has special logic to
|
||||
* convert ES6 map and set structures into a JSON-representable value.
|
||||
* This method, along with `parseJSON` should be used over the raw methods
|
||||
* if the payload may contain maps.
|
||||
*
|
||||
* @param value The value to jsonify.
|
||||
*/
|
||||
function dumpJSON<T>(value: T): string {
|
||||
return JSON.stringify(value, (_, value) => {
|
||||
if (value instanceof Map) {
|
||||
return {
|
||||
type: 'map',
|
||||
value: [...value],
|
||||
}
|
||||
}
|
||||
|
||||
if (value instanceof Set) {
|
||||
return {
|
||||
type: 'set',
|
||||
value: [...value],
|
||||
}
|
||||
}
|
||||
|
||||
return value
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the JSON representation of `value`. This has special logic to
|
||||
* unmarshal map and set objects as encoded by `dumpJSON`.
|
||||
*
|
||||
* @param value The value to unmarshal.
|
||||
*/
|
||||
function parseJSON<T>(value: string): T {
|
||||
return JSON.parse(value, (_, value) => {
|
||||
if (typeof value === 'object' && value !== null) {
|
||||
if (value.type === 'map') {
|
||||
return new Map(value.value)
|
||||
}
|
||||
|
||||
if (value.type === 'set') {
|
||||
return new Set(value.value)
|
||||
}
|
||||
}
|
||||
|
||||
return value
|
||||
})
|
||||
}
|
||||
16
lsif/src/importer.test.ts
Normal file
16
lsif/src/importer.test.ts
Normal file
@ -0,0 +1,16 @@
|
||||
import { reachableMonikers } from './importer'
|
||||
import { MonikerId } from './models.database'
|
||||
|
||||
describe('reachableMonikers', () => {
|
||||
it('should traverse moniker relation graph', () => {
|
||||
const monikerSets = new Map<MonikerId, Set<MonikerId>>()
|
||||
monikerSets.set(1, new Set<MonikerId>([2]))
|
||||
monikerSets.set(2, new Set<MonikerId>([1, 4]))
|
||||
monikerSets.set(3, new Set<MonikerId>([4]))
|
||||
monikerSets.set(4, new Set<MonikerId>([2, 3]))
|
||||
monikerSets.set(5, new Set<MonikerId>([6]))
|
||||
monikerSets.set(6, new Set<MonikerId>([5]))
|
||||
|
||||
expect(reachableMonikers(monikerSets, 1)).toEqual(new Set<MonikerId>([1, 2, 3, 4]))
|
||||
})
|
||||
})
|
||||
510
lsif/src/importer.ts
Normal file
510
lsif/src/importer.ts
Normal file
@ -0,0 +1,510 @@
|
||||
import { mustGet, assertId, hashKey, readEnvInt } from './util'
|
||||
import { Correlator, ResultSetData, ResultSetId } from './correlator'
|
||||
import { DefaultMap } from './default-map'
|
||||
import {
|
||||
DefinitionModel,
|
||||
MetaModel,
|
||||
MonikerData,
|
||||
PackageInformationData,
|
||||
RangeData,
|
||||
ReferenceModel,
|
||||
ResultChunkModel,
|
||||
DocumentIdRangeId,
|
||||
DefinitionResultId,
|
||||
MonikerId,
|
||||
DefinitionReferenceResultId,
|
||||
DocumentId,
|
||||
ReferenceResultId,
|
||||
PackageInformationId,
|
||||
HoverResultId,
|
||||
DocumentData,
|
||||
DocumentModel,
|
||||
} from './models.database'
|
||||
import { Edge, MonikerKind, Vertex, RangeId } from 'lsif-protocol'
|
||||
import { gzipJSON } from './encoding'
|
||||
import { EntityManager } from 'typeorm'
|
||||
import { isEqual, uniqWith } from 'lodash'
|
||||
import { Package, SymbolReferences } from './xrepo'
|
||||
import { TableInserter } from './inserter'
|
||||
|
||||
/**
|
||||
* The internal version of our SQLite databases. We need to keep this in case
|
||||
* we add something that can't be done transparently; if we change how we process
|
||||
* something in the future we'll need to consider a number of previous version
|
||||
* while we update or re-process the already-uploaded data.
|
||||
*/
|
||||
const INTERNAL_LSIF_VERSION = '0.1.0'
|
||||
|
||||
/**
|
||||
* The target results per result chunk. This is used to determine the number of chunks
|
||||
* created during conversion, but does not guarantee that the distribution of hash keys
|
||||
* will wbe even. In practice, chunks are fairly evenly filled.
|
||||
*/
|
||||
const RESULTS_PER_RESULT_CHUNK = readEnvInt('RESULTS_PER_RESULT_CHUNK', 500)
|
||||
|
||||
/**
|
||||
* The maximum number of result chunks that will be created during conversion.
|
||||
*/
|
||||
const MAX_NUM_RESULT_CHUNKS = readEnvInt('MAX_NUM_RESULT_CHUNKS', 1000)
|
||||
|
||||
/**
|
||||
* Correlate each vertex and edge together, then populate the provided entity manager
|
||||
* with the document, definition, and reference information. Returns the package and
|
||||
* external reference data needed to populate the xrepo database.
|
||||
*
|
||||
* @param entityManager A transactional SQLite entity manager.
|
||||
* @param elements The stream of vertex and edge objects composing the LSIF dump.
|
||||
*/
|
||||
export async function importLsif(
|
||||
entityManager: EntityManager,
|
||||
elements: AsyncIterable<Vertex | Edge>
|
||||
): Promise<{ packages: Package[]; references: SymbolReferences[] }> {
|
||||
const correlator = new Correlator()
|
||||
|
||||
let line = 0
|
||||
for await (const element of elements) {
|
||||
try {
|
||||
correlator.insert(element)
|
||||
} catch (e) {
|
||||
throw Object.assign(
|
||||
new Error(`Failed to process line #${line + 1} (${JSON.stringify(element)}): ${e && e.message}`),
|
||||
{ status: 422 }
|
||||
)
|
||||
}
|
||||
|
||||
line++
|
||||
}
|
||||
|
||||
if (correlator.lsifVersion === undefined) {
|
||||
throw new Error('No metadata defined.')
|
||||
}
|
||||
|
||||
const numResults = correlator.definitionData.size + correlator.referenceData.size
|
||||
const numResultChunks = Math.min(MAX_NUM_RESULT_CHUNKS, Math.floor(numResults / RESULTS_PER_RESULT_CHUNK) || 1)
|
||||
|
||||
// Insert metadata
|
||||
const metaInserter = new TableInserter(entityManager, MetaModel, MetaModel.BatchSize)
|
||||
await populateMetadataTable(correlator, metaInserter, numResultChunks)
|
||||
await metaInserter.flush()
|
||||
|
||||
// Insert documents
|
||||
const documentInserter = new TableInserter(entityManager, DocumentModel, DocumentModel.BatchSize)
|
||||
await populateDocumentsTable(correlator, documentInserter)
|
||||
await documentInserter.flush()
|
||||
|
||||
// Insert result chunks
|
||||
const resultChunkInserter = new TableInserter(entityManager, ResultChunkModel, ResultChunkModel.BatchSize)
|
||||
await populateResultChunksTable(correlator, resultChunkInserter, numResultChunks)
|
||||
await resultChunkInserter.flush()
|
||||
|
||||
// Insert definitions and references
|
||||
const definitionInserter = new TableInserter(entityManager, DefinitionModel, DefinitionModel.BatchSize)
|
||||
const referenceInserter = new TableInserter(entityManager, ReferenceModel, ReferenceModel.BatchSize)
|
||||
await populateDefinitionsAndReferencesTables(correlator, definitionInserter, referenceInserter)
|
||||
await definitionInserter.flush()
|
||||
await referenceInserter.flush()
|
||||
|
||||
// Return data to populate xrepo database
|
||||
return { packages: getPackages(correlator), references: getReferences(correlator) }
|
||||
}
|
||||
|
||||
/**
|
||||
* Correlate, encode, and insert all document entries for this dump.
|
||||
*/
|
||||
async function populateDocumentsTable(
|
||||
correlator: Correlator,
|
||||
documentInserter: TableInserter<DocumentModel, new () => DocumentModel>
|
||||
): Promise<void> {
|
||||
// Collapse result sets data into the ranges that can reach them. The
|
||||
// remainder of this function assumes that we can completely ignore
|
||||
// the "next" edges coming from range data.
|
||||
for (const [rangeId, range] of correlator.rangeData) {
|
||||
canonicalizeItem(correlator, rangeId, range)
|
||||
}
|
||||
|
||||
// Gather and insert document data that includes the ranges contained in the document,
|
||||
// any associated hover data, and any associated moniker data/package information.
|
||||
// Each range also has identifiers that correlate to a definition or reference result
|
||||
// which can be found in a result chunk, created in the next step.
|
||||
|
||||
for (const [documentId, documentPath] of correlator.documentPaths) {
|
||||
// Create document record from the correlated information. This will also insert
|
||||
// external definitions and references into the maps initialized above, which are
|
||||
// inserted into the definitions and references table, respectively, below.
|
||||
const document = gatherDocument(correlator, documentId, documentPath)
|
||||
|
||||
// Encode and insert document record
|
||||
await documentInserter.insert({
|
||||
path: documentPath,
|
||||
data: await gzipJSON({
|
||||
ranges: document.ranges,
|
||||
hoverResults: document.hoverResults,
|
||||
monikers: document.monikers,
|
||||
packageInformation: document.packageInformation,
|
||||
}),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Correlate and insert all result chunk entries for this dump.
|
||||
*/
|
||||
async function populateResultChunksTable(
|
||||
correlator: Correlator,
|
||||
resultChunkInserter: TableInserter<ResultChunkModel, new () => ResultChunkModel>,
|
||||
numResultChunks: number
|
||||
): Promise<void> {
|
||||
// Create all the result chunks we'll be populating and inserting up-front. Data will
|
||||
// be inserted into result chunks based on hash values (modulo the number of result chunks),
|
||||
// and we don't want to create them lazily.
|
||||
|
||||
const resultChunks = new Array(numResultChunks).fill(null).map(() => ({
|
||||
paths: new Map<DocumentId, string>(),
|
||||
documentIdRangeIds: new Map<DefinitionReferenceResultId, DocumentIdRangeId[]>(),
|
||||
}))
|
||||
|
||||
const chunkResults = (data: Map<DefinitionReferenceResultId, Map<DocumentId, RangeId[]>>): void => {
|
||||
for (const [id, documentRanges] of data) {
|
||||
// Flatten map into list of ranges
|
||||
let documentIdRangeIds: DocumentIdRangeId[] = []
|
||||
for (const [documentId, rangeIds] of documentRanges) {
|
||||
documentIdRangeIds = documentIdRangeIds.concat(rangeIds.map(rangeId => ({ documentId, rangeId })))
|
||||
}
|
||||
|
||||
// Insert ranges into target result chunk
|
||||
const resultChunk = resultChunks[hashKey(id, resultChunks.length)]
|
||||
resultChunk.documentIdRangeIds.set(id, documentIdRangeIds)
|
||||
|
||||
for (const documentId of documentRanges.keys()) {
|
||||
// Add paths into the result chunk where they are used
|
||||
resultChunk.paths.set(documentId, mustGet(correlator.documentPaths, documentId, 'documentPath'))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add definitions and references to result chunks
|
||||
chunkResults(correlator.definitionData)
|
||||
chunkResults(correlator.referenceData)
|
||||
|
||||
for (const [id, resultChunk] of resultChunks.entries()) {
|
||||
// Empty chunk, no need to serialize as it will never be queried
|
||||
if (resultChunk.paths.size === 0 && resultChunk.documentIdRangeIds.size === 0) {
|
||||
continue
|
||||
}
|
||||
|
||||
const data = await gzipJSON({
|
||||
documentPaths: resultChunk.paths,
|
||||
documentIdRangeIds: resultChunk.documentIdRangeIds,
|
||||
})
|
||||
|
||||
// Encode and insert result chunk record
|
||||
await resultChunkInserter.insert({ id, data })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Correlate and insert all definition and reference entries for this dump.
|
||||
*/
|
||||
async function populateDefinitionsAndReferencesTables(
|
||||
correlator: Correlator,
|
||||
definitionInserter: TableInserter<DefinitionModel, new () => DefinitionModel>,
|
||||
referenceInserter: TableInserter<ReferenceModel, new () => ReferenceModel>
|
||||
): Promise<void> {
|
||||
// Determine the set of monikers that are attached to a definition or a
|
||||
// reference result. Correlating information in this way has two benefits:
|
||||
// (1) it reduces duplicates in the definitions and references tables
|
||||
// (2) it stop us from re-iterating over the range data of the entire
|
||||
// LSIF dump, which is by far the largest proportion of data.
|
||||
|
||||
const definitionMonikers = new DefaultMap<DefinitionResultId, Set<MonikerId>>(() => new Set())
|
||||
const referenceMonikers = new DefaultMap<ReferenceResultId, Set<MonikerId>>(() => new Set())
|
||||
|
||||
for (const range of correlator.rangeData.values()) {
|
||||
if (range.monikerIds.size === 0) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (range.definitionResultId !== undefined) {
|
||||
const set = definitionMonikers.getOrDefault(range.definitionResultId)
|
||||
for (const monikerId of range.monikerIds) {
|
||||
set.add(monikerId)
|
||||
}
|
||||
}
|
||||
|
||||
if (range.referenceResultId !== undefined) {
|
||||
const set = referenceMonikers.getOrDefault(range.referenceResultId)
|
||||
for (const monikerId of range.monikerIds) {
|
||||
set.add(monikerId)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const insertMonikerRanges = async (
|
||||
data: Map<DefinitionReferenceResultId, Map<DocumentId, RangeId[]>>,
|
||||
monikers: Map<MonikerId, Set<RangeId>>,
|
||||
inserter: TableInserter<DefinitionModel | ReferenceModel, new () => DefinitionModel | ReferenceModel>
|
||||
): Promise<void> => {
|
||||
for (const [id, documentRanges] of data) {
|
||||
// Get monikers. Nothing to insert if we don't have any.
|
||||
const monikerIds = monikers.get(id)
|
||||
if (monikerIds === undefined) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Correlate each moniker with the document/range pairs stored in
|
||||
// the result set provided by the data argument of this function.
|
||||
|
||||
for (const monikerId of monikerIds) {
|
||||
const moniker = mustGet(correlator.monikerData, monikerId, 'moniker')
|
||||
|
||||
for (const [documentId, rangeIds] of documentRanges) {
|
||||
const documentPath = mustGet(correlator.documentPaths, documentId, 'documentPath')
|
||||
|
||||
for (const rangeId of rangeIds) {
|
||||
const range = mustGet(correlator.rangeData, rangeId, 'range')
|
||||
|
||||
await inserter.insert({
|
||||
scheme: moniker.scheme,
|
||||
identifier: moniker.identifier,
|
||||
documentPath,
|
||||
...range,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Insert definitions and references records
|
||||
await insertMonikerRanges(correlator.definitionData, definitionMonikers, definitionInserter)
|
||||
await insertMonikerRanges(correlator.referenceData, referenceMonikers, referenceInserter)
|
||||
}
|
||||
|
||||
/**
|
||||
* Insert metadata row. This gives us a place to store the version of the converter that
|
||||
* created a database in case we have backwards-incompatible changes in the future that
|
||||
* require historic version flagging. This also stores the number of result chunks
|
||||
* determined above so that we can have stable hashes at query time.
|
||||
*/
|
||||
async function populateMetadataTable(
|
||||
correlator: Correlator,
|
||||
metaInserter: TableInserter<MetaModel, new () => MetaModel>,
|
||||
numResultChunks: number
|
||||
): Promise<void> {
|
||||
await metaInserter.insert({
|
||||
id: 1,
|
||||
lsifVersion: correlator.lsifVersion,
|
||||
sourcegraphVersion: INTERNAL_LSIF_VERSION,
|
||||
numResultChunks,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Gather all package information that is referenced by an exported
|
||||
* moniker. These will be the packages that are provided by the repository
|
||||
* represented by this LSIF dump.
|
||||
*/
|
||||
function getPackages(correlator: Correlator): Package[] {
|
||||
const packages: Package[] = []
|
||||
for (const id of correlator.exportedMonikers) {
|
||||
const source = mustGet(correlator.monikerData, id, 'moniker')
|
||||
const packageInformationId = assertId(source.packageInformationId)
|
||||
const packageInfo = mustGet(correlator.packageInformationData, packageInformationId, 'packageInformation')
|
||||
packages.push({
|
||||
scheme: source.scheme,
|
||||
name: packageInfo.name,
|
||||
version: packageInfo.version,
|
||||
})
|
||||
}
|
||||
|
||||
return uniqWith(packages, isEqual)
|
||||
}
|
||||
|
||||
/**
|
||||
* Gather all imported moniker identifiers along with their package
|
||||
* information. These will be the packages that are a dependency of the
|
||||
* repository represented by this LSIF dump.
|
||||
*/
|
||||
function getReferences(correlator: Correlator): SymbolReferences[] {
|
||||
const packageIdentifiers: Map<string, string[]> = new Map()
|
||||
for (const id of correlator.importedMonikers) {
|
||||
const source = mustGet(correlator.monikerData, id, 'moniker')
|
||||
const packageInformationId = assertId(source.packageInformationId)
|
||||
const packageInfo = mustGet(correlator.packageInformationData, packageInformationId, 'packageInformation')
|
||||
const pkg = JSON.stringify({
|
||||
scheme: source.scheme,
|
||||
name: packageInfo.name,
|
||||
version: packageInfo.version,
|
||||
})
|
||||
|
||||
const list = packageIdentifiers.get(pkg)
|
||||
if (list) {
|
||||
list.push(source.identifier)
|
||||
} else {
|
||||
packageIdentifiers.set(pkg, [source.identifier])
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(packageIdentifiers).map(([key, identifiers]) => ({
|
||||
package: JSON.parse(key) as Package,
|
||||
identifiers,
|
||||
}))
|
||||
}
|
||||
|
||||
/**
|
||||
* Flatten the definition result, reference result, hover results, and monikers of range
|
||||
* and result set items by following next links in the graph. This needs to be run over
|
||||
* each range before committing them to a document.
|
||||
*
|
||||
* @param correlator The correlator with all vertices and edges inserted.
|
||||
* @param id The item identifier.
|
||||
* @param item The range or result set item.
|
||||
*/
|
||||
function canonicalizeItem(correlator: Correlator, id: RangeId | ResultSetId, item: RangeData | ResultSetData): void {
|
||||
const monikers = new Set<MonikerId>()
|
||||
if (item.monikerIds.size > 0) {
|
||||
// If we have any monikers attached to this item, then we only need to look at the
|
||||
// monikers reachable from any attached moniker. All other attached monikers are
|
||||
// necessarily reachable, so we can choose any single value from the moniker set
|
||||
// as the source of the graph traversal.
|
||||
|
||||
const candidateMoniker = item.monikerIds.keys().next().value
|
||||
|
||||
for (const monikerId of reachableMonikers(correlator.monikerSets, candidateMoniker)) {
|
||||
if (mustGet(correlator.monikerData, monikerId, 'moniker').kind !== MonikerKind.local) {
|
||||
monikers.add(monikerId)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const nextId = correlator.nextData.get(id)
|
||||
if (nextId !== undefined) {
|
||||
// If we have a next edge to a result set, get it and canonicalize it first. This
|
||||
// will recursively look at any result that that it can reach that hasn't yet been
|
||||
// canonicalized.
|
||||
|
||||
const nextItem = mustGet(correlator.resultSetData, nextId, 'resultSet')
|
||||
canonicalizeItem(correlator, nextId, nextItem)
|
||||
|
||||
// Add each moniker of the next set to this item
|
||||
for (const monikerId of nextItem.monikerIds) {
|
||||
monikers.add(monikerId)
|
||||
}
|
||||
|
||||
// If we do not have a definition, reference, or hover result, take the result
|
||||
// value from the next item.
|
||||
|
||||
if (item.definitionResultId === undefined) {
|
||||
item.definitionResultId = nextItem.definitionResultId
|
||||
}
|
||||
|
||||
if (item.referenceResultId === undefined) {
|
||||
item.referenceResultId = nextItem.referenceResultId
|
||||
}
|
||||
|
||||
if (item.hoverResultId === undefined) {
|
||||
item.hoverResultId = nextItem.hoverResultId
|
||||
}
|
||||
}
|
||||
|
||||
// Update our moniker sets (our normalized sets and any monikers of our next item)
|
||||
item.monikerIds = monikers
|
||||
|
||||
// Remove the next edge so we don't traverse it a second time
|
||||
correlator.nextData.delete(id)
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a self-contained document object from the data in the given correlator. This
|
||||
* includes hover and moniker results, as well as identifiers to definition and reference
|
||||
* results (but not the actual ranges). See result chunk table for details.
|
||||
*
|
||||
* @param correlator The correlator with all vertices and edges inserted.
|
||||
* @param currentDocumentId The identifier of the document.
|
||||
* @param path The path of the document.
|
||||
*/
|
||||
function gatherDocument(correlator: Correlator, currentDocumentId: DocumentId, path: string): DocumentData {
|
||||
const document = {
|
||||
path,
|
||||
ranges: new Map<RangeId, RangeData>(),
|
||||
hoverResults: new Map<HoverResultId, string>(),
|
||||
monikers: new Map<MonikerId, MonikerData>(),
|
||||
packageInformation: new Map<PackageInformationId, PackageInformationData>(),
|
||||
}
|
||||
|
||||
const addHover = (id: HoverResultId | undefined): void => {
|
||||
if (id === undefined || document.hoverResults.has(id)) {
|
||||
return
|
||||
}
|
||||
|
||||
// Add hover result to the document, if defined and not a duplicate
|
||||
const data = mustGet(correlator.hoverData, id, 'hoverResult')
|
||||
document.hoverResults.set(id, data)
|
||||
}
|
||||
|
||||
const addPackageInformation = (id: PackageInformationId | undefined): void => {
|
||||
if (id === undefined || document.packageInformation.has(id)) {
|
||||
return
|
||||
}
|
||||
|
||||
// Add package information to the document, if defined and not a duplicate
|
||||
const data = mustGet(correlator.packageInformationData, id, 'packageInformation')
|
||||
document.packageInformation.set(id, data)
|
||||
}
|
||||
|
||||
const addMoniker = (id: MonikerId | undefined): void => {
|
||||
if (id === undefined || document.monikers.has(id)) {
|
||||
return
|
||||
}
|
||||
|
||||
// Add moniker to the document, if defined and not a duplicate
|
||||
const moniker = mustGet(correlator.monikerData, id, 'moniker')
|
||||
document.monikers.set(id, moniker)
|
||||
|
||||
// Add related package information to document
|
||||
addPackageInformation(moniker.packageInformationId)
|
||||
}
|
||||
|
||||
for (const id of mustGet(correlator.containsData, currentDocumentId, 'contains')) {
|
||||
const range = mustGet(correlator.rangeData, id, 'range')
|
||||
addHover(range.hoverResultId)
|
||||
for (const id of range.monikerIds) {
|
||||
addMoniker(id)
|
||||
}
|
||||
|
||||
document.ranges.set(id, range)
|
||||
}
|
||||
|
||||
return document
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the set of moniker identifiers which are reachable from the given value.
|
||||
* This relies on `monikerSets` being properly set up: each moniker edge `a -> b`
|
||||
* from the dump should ensure that `b` is a member of `monkerSets[a]`, and that
|
||||
* `a` is a member of `monikerSets[b]`.
|
||||
*
|
||||
* @param monikerSets A undirected graph of moniker ids.
|
||||
* @param id The initial moniker id.
|
||||
*/
|
||||
export function reachableMonikers(monikerSets: Map<MonikerId, Set<MonikerId>>, id: MonikerId): Set<MonikerId> {
|
||||
const monikerIds = new Set<MonikerId>()
|
||||
let frontier = [id]
|
||||
|
||||
while (frontier.length > 0) {
|
||||
const val = assertId(frontier.pop())
|
||||
if (monikerIds.has(val)) {
|
||||
continue
|
||||
}
|
||||
|
||||
monikerIds.add(val)
|
||||
|
||||
const nextValues = monikerSets.get(val)
|
||||
if (nextValues) {
|
||||
frontier = frontier.concat(Array.from(nextValues))
|
||||
}
|
||||
}
|
||||
|
||||
// TODO - (efritz) should we sort these ids here instead of at query time?
|
||||
return monikerIds
|
||||
}
|
||||
82
lsif/src/inserter.ts
Normal file
82
lsif/src/inserter.ts
Normal file
@ -0,0 +1,82 @@
|
||||
import { EntityManager } from 'typeorm'
|
||||
import { QueryDeepPartialEntity } from 'typeorm/query-builder/QueryPartialEntity'
|
||||
|
||||
/**
|
||||
* A batch inserter for a SQLite table. Inserting hundreds or thousands of rows in
|
||||
* a loop is too inefficient, but due to the limit of SQLITE_MAX_VARIABLE_NUMBER,
|
||||
* the entire set of values cannot be inserted in one bulk operation either.
|
||||
*
|
||||
* One inserter instance is created for each table that will receive a bulk
|
||||
* payload. The inserter will periodically perform the insert operation
|
||||
* when the number of values is at this maximum.
|
||||
*
|
||||
* See https://www.sqlite.org/limits.html#max_variable_number.
|
||||
*/
|
||||
export class TableInserter<T, M extends new () => T> {
|
||||
/**
|
||||
* The set of entity values that will be inserted in the next invocation of `executeBatch`.
|
||||
*/
|
||||
private batch: QueryDeepPartialEntity<T>[] = []
|
||||
|
||||
/**
|
||||
* Creates a new `TableInserter` with the given entity manager, the constructor
|
||||
* of the model object for the table, and the maximum batch size. This number
|
||||
* should be calculated by floor(MAX_VAR_NUMBER / fields_in_record).
|
||||
*
|
||||
* @param entityManager A transactional SQLite entity manager.
|
||||
* @param model The model object constructor.
|
||||
* @param maxBatchSize The maximum number of records that can be inserted at once.
|
||||
* @param ignoreConflicts Whether or not to ignore conflicting data on unique constraint violations.
|
||||
*/
|
||||
constructor(
|
||||
private entityManager: EntityManager,
|
||||
private model: M,
|
||||
private maxBatchSize: number,
|
||||
private ignoreConflicts: boolean = false
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Submit a model for insertion. This may happen immediately, on a
|
||||
* subsequent call to insert, or when the `flush` method is called.
|
||||
*
|
||||
* @param model The instance to save.
|
||||
*/
|
||||
public async insert(model: QueryDeepPartialEntity<T>): Promise<void> {
|
||||
this.batch.push(model)
|
||||
|
||||
if (this.batch.length >= this.maxBatchSize) {
|
||||
await this.executeBatch()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure any outstanding records are inserted into the database.
|
||||
*/
|
||||
public flush(): Promise<void> {
|
||||
return this.executeBatch()
|
||||
}
|
||||
|
||||
/**
|
||||
* If the current batch is non-empty, then perform an insert operation
|
||||
* and reset the batch array.
|
||||
*/
|
||||
private async executeBatch(): Promise<void> {
|
||||
if (this.batch.length === 0) {
|
||||
return
|
||||
}
|
||||
|
||||
let query = this.entityManager
|
||||
.createQueryBuilder()
|
||||
.insert()
|
||||
.into(this.model)
|
||||
.values(this.batch)
|
||||
|
||||
if (this.ignoreConflicts) {
|
||||
query = query.onConflict('do nothing')
|
||||
}
|
||||
|
||||
await query.execute()
|
||||
|
||||
this.batch = []
|
||||
}
|
||||
}
|
||||
374
lsif/src/models.database.ts
Normal file
374
lsif/src/models.database.ts
Normal file
@ -0,0 +1,374 @@
|
||||
import { Column, Entity, Index, PrimaryColumn } from 'typeorm'
|
||||
import { Id, MonikerKind } from 'lsif-protocol'
|
||||
import { getBatchSize } from './util'
|
||||
|
||||
export type DocumentId = Id
|
||||
export type DocumentPath = string
|
||||
export type RangeId = Id
|
||||
export type DefinitionResultId = Id
|
||||
export type ReferenceResultId = Id
|
||||
export type DefinitionReferenceResultId = DefinitionResultId | ReferenceResultId
|
||||
export type HoverResultId = Id
|
||||
export type MonikerId = Id
|
||||
export type PackageInformationId = Id
|
||||
|
||||
/**
|
||||
* A type that describes a gzipped and JSON-encoded value of type `T`.
|
||||
*/
|
||||
export type JSONEncoded<T> = Buffer
|
||||
|
||||
/**
|
||||
* A type of hashed value created by hashing a value of type `T` and performing
|
||||
* the modulus with a value of type `U`. This is to link the index of a result
|
||||
* chunk to the hashed value of the identifiers stored within it.
|
||||
*/
|
||||
export type HashMod<T, U> = number
|
||||
|
||||
/**
|
||||
n entity within the database describing LSIF data for a single repository
|
||||
* and commit pair. There should be only one metadata entity per database.
|
||||
*/
|
||||
@Entity({ name: 'meta' })
|
||||
export class MetaModel {
|
||||
/**
|
||||
* The number of model instances that can be inserted at once.
|
||||
*/
|
||||
public static BatchSize = getBatchSize(4)
|
||||
|
||||
/**
|
||||
* A unique ID required by typeorm entities: always zero here.
|
||||
*/
|
||||
@PrimaryColumn('int')
|
||||
public id!: number
|
||||
|
||||
/**
|
||||
* The version string of the input LSIF that created this database.
|
||||
*/
|
||||
@Column('text')
|
||||
public lsifVersion!: string
|
||||
|
||||
/**
|
||||
* The internal version of the LSIF server that created this database.
|
||||
*/
|
||||
@Column('text')
|
||||
public sourcegraphVersion!: string
|
||||
|
||||
/**
|
||||
* The number of result chunks allocated when converting the dump stored
|
||||
* in this database. This is used as an upper bound for the hash into the
|
||||
* `resultChunks` table and must be record to keep the hash generation
|
||||
* stable.
|
||||
*/
|
||||
@Column('int')
|
||||
public numResultChunks!: number
|
||||
}
|
||||
|
||||
/**
|
||||
* An entity within the database describing LSIF data for a single repository and
|
||||
* commit pair. This contains a JSON-encoded `DocumentData` object that describes
|
||||
* relations within a single file of the dump.
|
||||
*/
|
||||
@Entity({ name: 'documents' })
|
||||
export class DocumentModel {
|
||||
/**
|
||||
* The number of model instances that can be inserted at once.
|
||||
*/
|
||||
public static BatchSize = getBatchSize(2)
|
||||
|
||||
/**
|
||||
* The root-relative path of the document.
|
||||
*/
|
||||
@PrimaryColumn('text')
|
||||
public path!: DocumentPath
|
||||
|
||||
/**
|
||||
* The JSON-encoded document data.
|
||||
*/
|
||||
@Column('blob')
|
||||
public data!: JSONEncoded<DocumentData>
|
||||
}
|
||||
|
||||
/**
|
||||
* An entity within the database describing LSIF data for a single repository and
|
||||
* commit pair. This contains a JSON-encoded `ResultChunk` object that describes
|
||||
* a subset of the definition and reference results of the dump.
|
||||
*/
|
||||
@Entity({ name: 'resultChunks' })
|
||||
export class ResultChunkModel {
|
||||
/**
|
||||
* The number of model instances that can be inserted at once.
|
||||
*/
|
||||
public static BatchSize = getBatchSize(2)
|
||||
|
||||
/**
|
||||
* The identifier of the chunk. This is also the index of the chunk during its
|
||||
* construction, and the identifiers contained in this chunk hash to this index
|
||||
* (modulo the total number of chunks for the dump).
|
||||
*/
|
||||
@PrimaryColumn('int')
|
||||
public id!: HashMod<DefinitionReferenceResultId, MetaModel['numResultChunks']>
|
||||
|
||||
/**
|
||||
* The JSON-encoded chunk data.
|
||||
*/
|
||||
@Column('blob')
|
||||
public data!: JSONEncoded<ResultChunkData>
|
||||
}
|
||||
|
||||
/**
|
||||
* The base class for `DefinitionModel` and `ReferenceModel` as they have identical
|
||||
* column descriptions.
|
||||
*/
|
||||
class Symbols {
|
||||
/**
|
||||
* The number of model instances that can be inserted at once.
|
||||
*/
|
||||
public static BatchSize = getBatchSize(7)
|
||||
|
||||
/**
|
||||
* A unique ID required by typeorm entities.
|
||||
*/
|
||||
@PrimaryColumn('int')
|
||||
public id!: number
|
||||
|
||||
/**
|
||||
* The name of the package type (e.g. npm, pip).
|
||||
*/
|
||||
@Column('text')
|
||||
public scheme!: string
|
||||
|
||||
/**
|
||||
* The unique identifier of the moniker.
|
||||
*/
|
||||
@Column('text')
|
||||
public identifier!: string
|
||||
|
||||
/**
|
||||
* The path of the document to which this reference belongs.
|
||||
*/
|
||||
@Column('text')
|
||||
public documentPath!: DocumentPath
|
||||
|
||||
/**
|
||||
* The zero-indexed line describing the start of this range.
|
||||
*/
|
||||
@Column('int')
|
||||
public startLine!: number
|
||||
|
||||
/**
|
||||
* The zero-indexed line describing the end of this range.
|
||||
*/
|
||||
@Column('int')
|
||||
public endLine!: number
|
||||
|
||||
/**
|
||||
* The zero-indexed line describing the start of this range.
|
||||
*/
|
||||
@Column('int')
|
||||
public startCharacter!: number
|
||||
|
||||
/**
|
||||
* The zero-indexed line describing the end of this range.
|
||||
*/
|
||||
@Column('int')
|
||||
public endCharacter!: number
|
||||
}
|
||||
|
||||
/**
|
||||
* An entity within the database describing LSIF data for a single repository and commit
|
||||
* pair. This maps external monikers to their range and the document that contains the
|
||||
* definition of the moniker.
|
||||
*/
|
||||
@Entity({ name: 'definitions' })
|
||||
@Index(['scheme', 'identifier'])
|
||||
export class DefinitionModel extends Symbols {}
|
||||
|
||||
/**
|
||||
* An entity within the database describing LSIF data for a single repository and commit
|
||||
* pair. This maps imported monikers to their range and the document that contains a
|
||||
* reference to the moniker.
|
||||
*/
|
||||
@Entity({ name: 'references' })
|
||||
@Index(['scheme', 'identifier'])
|
||||
export class ReferenceModel extends Symbols {}
|
||||
|
||||
/**
|
||||
* Data for a single document within an LSIF dump. The data here can answer definitions,
|
||||
* references, and hover queries if the results are all contained within the same document.
|
||||
*/
|
||||
export interface DocumentData {
|
||||
/**
|
||||
* A mapping from range identifiers to range data.
|
||||
*/
|
||||
ranges: Map<RangeId, RangeData>
|
||||
|
||||
/**
|
||||
* A map of hover result identifiers to hover results normalized as a single
|
||||
* string.
|
||||
*/
|
||||
hoverResults: Map<HoverResultId, string>
|
||||
|
||||
/**
|
||||
* A map of moniker identifiers to moniker data.
|
||||
*/
|
||||
monikers: Map<MonikerId, MonikerData>
|
||||
|
||||
/**
|
||||
* A map of package information identifiers to package information data.
|
||||
*/
|
||||
packageInformation: Map<PackageInformationId, PackageInformationData>
|
||||
}
|
||||
|
||||
/**
|
||||
* A range identifier that also specifies the identifier of the document to
|
||||
* which it belongs. This is sometimes necessary as we hold definition and
|
||||
* reference results between packages, but the identifier of the range must be
|
||||
* looked up in a map of another encoded document.
|
||||
*/
|
||||
export interface DocumentIdRangeId {
|
||||
/**
|
||||
* The identifier of the document. The path of the document can be queried
|
||||
* by this identifier in the containing document.
|
||||
*/
|
||||
documentId: DocumentId
|
||||
|
||||
/**
|
||||
* The identifier of the range in the referenced document.
|
||||
*/
|
||||
rangeId: RangeId
|
||||
}
|
||||
|
||||
/**
|
||||
* A range identifier that also specifies the path of the document to which it
|
||||
* belongs. This is generally created by determining the path from an instance of
|
||||
* `DocumentIdRangeId`.
|
||||
*/
|
||||
export interface DocumentPathRangeId {
|
||||
/**
|
||||
* The path of the document.
|
||||
*/
|
||||
documentPath: DocumentPath
|
||||
|
||||
/**
|
||||
* The identifier of the range in the referenced document.
|
||||
*/
|
||||
rangeId: RangeId
|
||||
}
|
||||
|
||||
/**
|
||||
* A result chunk is a subset of the definition and reference result data for the
|
||||
* LSIF dump. Results are inserted into chunks based on the hash code of their
|
||||
* identifier (thus every chunk has a roughly proportional amount of data).
|
||||
*/
|
||||
export interface ResultChunkData {
|
||||
/**
|
||||
* A map from document identifiers to document paths. The document identifiers
|
||||
* in the `documentIdRangeIds` field reference a concrete path stored here.
|
||||
*/
|
||||
documentPaths: Map<DocumentId, DocumentPath>
|
||||
|
||||
/**
|
||||
* A map from definition or reference result identifiers to the ranges that
|
||||
* compose the result set. Each range is paired with the identifier of the
|
||||
* document in which it can be found.
|
||||
*/
|
||||
documentIdRangeIds: Map<DefinitionReferenceResultId, DocumentIdRangeId[]>
|
||||
}
|
||||
|
||||
/**
|
||||
* An internal representation of a range vertex from an LSIF dump. It contains the same
|
||||
* relevant edge data, which can be subsequently queried in the containing document. The
|
||||
* data that was reachable via a result set has been collapsed into this object during
|
||||
* import.
|
||||
*/
|
||||
export interface RangeData {
|
||||
/**
|
||||
* The line on which the range starts (0-indexed, inclusive).
|
||||
*/
|
||||
startLine: number
|
||||
|
||||
/**
|
||||
* The line on which the range ends (0-indexed, inclusive).
|
||||
*/
|
||||
startCharacter: number
|
||||
|
||||
/**
|
||||
* The character on which the range starts (0-indexed, inclusive).
|
||||
*/
|
||||
endLine: number
|
||||
|
||||
/**
|
||||
* The character on which the range ends (0-indexed, inclusive).
|
||||
*/
|
||||
endCharacter: number
|
||||
|
||||
/**
|
||||
* The identifier of the definition result attached to this range, if one exists.
|
||||
* The definition result object can be queried by its * identifier within the containing
|
||||
* document.
|
||||
*/
|
||||
definitionResultId?: DefinitionResultId
|
||||
|
||||
/**
|
||||
* The identifier of the reference result attached to this range, if one exists.
|
||||
* The reference result object can be queried by its identifier within the containing
|
||||
* document.
|
||||
*/
|
||||
referenceResultId?: ReferenceResultId
|
||||
|
||||
/**
|
||||
* The identifier of the hover result attached to this range, if one exists. The
|
||||
* hover result object can be queried by its identifier within the containing
|
||||
* document.
|
||||
*/
|
||||
hoverResultId?: HoverResultId
|
||||
|
||||
/**
|
||||
* The set of moniker identifiers directly attached to this range. The moniker
|
||||
* object can be queried by its identifier within the
|
||||
* containing document.
|
||||
*/
|
||||
monikerIds: Set<MonikerId>
|
||||
}
|
||||
|
||||
/**
|
||||
* Data about a moniker attached to a range.
|
||||
*/
|
||||
export interface MonikerData {
|
||||
/**
|
||||
* The kind of moniker (e.g. local, import, export).
|
||||
*/
|
||||
kind: MonikerKind
|
||||
|
||||
/**
|
||||
* The name of the package type (e.g. npm, pip).
|
||||
*/
|
||||
scheme: string
|
||||
|
||||
/**
|
||||
* The unique identifier of the moniker.
|
||||
*/
|
||||
identifier: string
|
||||
|
||||
/**
|
||||
* The identifier of the package information to this moniker, if one exists.
|
||||
* The package information object can be queried by its identifier within the
|
||||
* containing document.
|
||||
*/
|
||||
packageInformationId?: PackageInformationId
|
||||
}
|
||||
|
||||
/**
|
||||
* Additional data about a non-local moniker.
|
||||
*/
|
||||
export interface PackageInformationData {
|
||||
/**
|
||||
* The name of the package the moniker describes.
|
||||
*/
|
||||
name: string
|
||||
|
||||
/**
|
||||
* The version of the package the moniker describes.
|
||||
*/
|
||||
version: string | null
|
||||
}
|
||||
81
lsif/src/models.xrepo.ts
Normal file
81
lsif/src/models.xrepo.ts
Normal file
@ -0,0 +1,81 @@
|
||||
import { PrimaryGeneratedColumn, Column, Entity, Index } from 'typeorm'
|
||||
import { getBatchSize } from './util'
|
||||
import { EncodedBloomFilter } from './encoding'
|
||||
|
||||
/**
|
||||
* The base class for `PackageModel` and `ReferenceModel` as they have nearly
|
||||
* identical column descriptions.
|
||||
*/
|
||||
class Package {
|
||||
/**
|
||||
* A unique ID required by typeorm entities.
|
||||
*/
|
||||
@PrimaryGeneratedColumn('increment', { type: 'int' })
|
||||
public id!: number
|
||||
|
||||
/**
|
||||
* The name of the package type (e.g. npm, pip).
|
||||
*/
|
||||
@Column('text')
|
||||
public scheme!: string
|
||||
|
||||
/**
|
||||
* The name of the package this repository and commit provides.
|
||||
*/
|
||||
@Column('text')
|
||||
public name!: string
|
||||
|
||||
/**
|
||||
* The version of the package this repository and commit provides.
|
||||
*/
|
||||
@Column('text', { nullable: true })
|
||||
public version!: string | null
|
||||
|
||||
/**
|
||||
* The name of the source repository.
|
||||
*/
|
||||
@Column('text')
|
||||
public repository!: string
|
||||
|
||||
/**
|
||||
* The source commit.
|
||||
*/
|
||||
@Column('text')
|
||||
public commit!: string
|
||||
}
|
||||
|
||||
/**
|
||||
* An entity within the xrepo database. This maps a given repository and commit
|
||||
* pair to the package that it provides to other projects.
|
||||
*/
|
||||
@Entity({ name: 'packages' })
|
||||
@Index(['scheme', 'name', 'version'], { unique: true })
|
||||
@Index(['repository', 'commit'])
|
||||
export class PackageModel extends Package {
|
||||
/**
|
||||
* The number of model instances that can be inserted at once.
|
||||
*/
|
||||
public static BatchSize = getBatchSize(5)
|
||||
}
|
||||
|
||||
/**
|
||||
* An entity within the xrepo database. This lists the dependencies of a given
|
||||
* repository and commit pair to support find global reference operations.
|
||||
*/
|
||||
@Entity({ name: 'references' })
|
||||
@Index(['scheme', 'name', 'version'])
|
||||
@Index(['repository', 'commit'])
|
||||
export class ReferenceModel extends Package {
|
||||
/**
|
||||
* The number of model instances that can be inserted at once.
|
||||
*/
|
||||
public static BatchSize = getBatchSize(6)
|
||||
|
||||
/**
|
||||
* A serialized bloom filter that encodes the set of symbols that this repository
|
||||
* and commit imports from the given package. Testing this filter will prevent the
|
||||
* backend from opening databases that will yield no results for a particular symbol.
|
||||
*/
|
||||
@Column('blob')
|
||||
public filter!: EncodedBloomFilter
|
||||
}
|
||||
52
lsif/src/query-cpp.test.ts
Normal file
52
lsif/src/query-cpp.test.ts
Normal file
@ -0,0 +1,52 @@
|
||||
import * as fs from 'mz/fs'
|
||||
import * as zlib from 'mz/zlib'
|
||||
import rmfr from 'rmfr'
|
||||
import { ConnectionCache, DocumentCache, ResultChunkCache } from './cache'
|
||||
import { createBackend } from './backend'
|
||||
import { createCommit, createLocation } from './test-utils'
|
||||
|
||||
describe('Database', () => {
|
||||
let storageRoot!: string
|
||||
const connectionCache = new ConnectionCache(10)
|
||||
const documentCache = new DocumentCache(10)
|
||||
const resultChunkCache = new ResultChunkCache(10)
|
||||
|
||||
beforeAll(async () => {
|
||||
storageRoot = await fs.mkdtemp('cpp-', { encoding: 'utf8' })
|
||||
const backend = await createBackend(storageRoot, connectionCache, documentCache, resultChunkCache)
|
||||
const input = fs.createReadStream('./test-data/cpp/data/data.lsif.gz').pipe(zlib.createGunzip())
|
||||
await backend.insertDump(input, 'five', createCommit('five'))
|
||||
})
|
||||
|
||||
afterAll(async () => await rmfr(storageRoot))
|
||||
|
||||
it('should find all defs of `four` from main.cpp', async () => {
|
||||
const backend = await createBackend(storageRoot, connectionCache, documentCache, resultChunkCache)
|
||||
const db = await backend.createDatabase('five', createCommit('five'))
|
||||
const definitions = await db.definitions('main.cpp', { line: 12, character: 3 })
|
||||
// TODO - (FIXME) currently the dxr indexer returns zero-width ranges
|
||||
expect(definitions).toEqual([createLocation('main.cpp', 6, 4, 6, 4)])
|
||||
})
|
||||
|
||||
it('should find all defs of `five` from main.cpp', async () => {
|
||||
const backend = await createBackend(storageRoot, connectionCache, documentCache, resultChunkCache)
|
||||
const db = await backend.createDatabase('five', createCommit('five'))
|
||||
const definitions = await db.definitions('main.cpp', { line: 11, character: 3 })
|
||||
// TODO - (FIXME) currently the dxr indexer returns zero-width ranges
|
||||
expect(definitions).toEqual([createLocation('five.cpp', 2, 4, 2, 4)])
|
||||
})
|
||||
|
||||
it('should find all refs of `five` from main.cpp', async () => {
|
||||
const backend = await createBackend(storageRoot, connectionCache, documentCache, resultChunkCache)
|
||||
const db = await backend.createDatabase('five', createCommit('five'))
|
||||
const references = await db.references('main.cpp', { line: 11, character: 3 })
|
||||
|
||||
// TODO - should the definition be in this result set?
|
||||
expect(references).toContainEqual(createLocation('five.h', 1, 4, 1, 8))
|
||||
// TODO - (FIXME) currently the dxr indexer returns zero-width ranges
|
||||
expect(references).toContainEqual(createLocation('five.cpp', 2, 4, 2, 4))
|
||||
expect(references).toContainEqual(createLocation('main.cpp', 11, 2, 11, 6))
|
||||
expect(references).toContainEqual(createLocation('main.cpp', 13, 2, 13, 6))
|
||||
expect(references).toHaveLength(4)
|
||||
})
|
||||
})
|
||||
174
lsif/src/query-typescript.test.ts
Normal file
174
lsif/src/query-typescript.test.ts
Normal file
@ -0,0 +1,174 @@
|
||||
import * as fs from 'mz/fs'
|
||||
import * as zlib from 'mz/zlib'
|
||||
import rmfr from 'rmfr'
|
||||
import { ConnectionCache, DocumentCache, ResultChunkCache } from './cache'
|
||||
import { createBackend } from './backend'
|
||||
import { createCommit, createLocation, createRemoteLocation } from './test-utils'
|
||||
import { Readable } from 'stream'
|
||||
|
||||
describe('Database', () => {
|
||||
let storageRoot!: string
|
||||
const connectionCache = new ConnectionCache(10)
|
||||
const documentCache = new DocumentCache(10)
|
||||
const resultChunkCache = new ResultChunkCache(10)
|
||||
|
||||
beforeAll(async () => {
|
||||
storageRoot = await fs.mkdtemp('typescript-', { encoding: 'utf8' })
|
||||
const backend = await createBackend(storageRoot, connectionCache, documentCache, resultChunkCache)
|
||||
const inputs: { input: Readable; repository: string; commit: string }[] = []
|
||||
|
||||
for (const repository of ['a', 'b1', 'b2', 'b3', 'c1', 'c2', 'c3']) {
|
||||
const input = fs
|
||||
.createReadStream(`./test-data/typescript/data/${repository}.lsif.gz`)
|
||||
.pipe(zlib.createGunzip())
|
||||
const commit = createCommit(repository)
|
||||
inputs.push({ input, repository, commit })
|
||||
}
|
||||
|
||||
for (const { input, repository, commit } of inputs) {
|
||||
await backend.insertDump(input, repository, commit)
|
||||
}
|
||||
})
|
||||
|
||||
afterAll(async () => await rmfr(storageRoot))
|
||||
|
||||
it('should find all defs of `add` from repo a', async () => {
|
||||
const backend = await createBackend(storageRoot, connectionCache, documentCache, resultChunkCache)
|
||||
const db = await backend.createDatabase('a', createCommit('a'))
|
||||
const definitions = await db.definitions('src/index.ts', { line: 11, character: 18 })
|
||||
expect(definitions).toContainEqual(createLocation('src/index.ts', 0, 16, 0, 19))
|
||||
expect(definitions && definitions.length).toEqual(1)
|
||||
})
|
||||
|
||||
it('should find all defs of `add` from repo b1', async () => {
|
||||
const backend = await createBackend(storageRoot, connectionCache, documentCache, resultChunkCache)
|
||||
const db = await backend.createDatabase('b1', createCommit('b1'))
|
||||
const definitions = await db.definitions('src/index.ts', { line: 3, character: 12 })
|
||||
expect(definitions).toContainEqual(createRemoteLocation('a', 'src/index.ts', 0, 16, 0, 19))
|
||||
expect(definitions && definitions.length).toEqual(1)
|
||||
})
|
||||
|
||||
it('should find all defs of `mul` from repo b1', async () => {
|
||||
const backend = await createBackend(storageRoot, connectionCache, documentCache, resultChunkCache)
|
||||
const db = await backend.createDatabase('b1', createCommit('b1'))
|
||||
const definitions = await db.definitions('src/index.ts', { line: 3, character: 16 })
|
||||
expect(definitions).toContainEqual(createRemoteLocation('a', 'src/index.ts', 4, 16, 4, 19))
|
||||
expect(definitions && definitions.length).toEqual(1)
|
||||
})
|
||||
|
||||
it('should find all refs of `mul` from repo a', async () => {
|
||||
const backend = await createBackend(storageRoot, connectionCache, documentCache, resultChunkCache)
|
||||
const db = await backend.createDatabase('a', createCommit('a'))
|
||||
// TODO - (FIXME) why are these garbage results in the index
|
||||
const references = (await db.references('src/index.ts', { line: 4, character: 19 }))!.filter(
|
||||
l => !l.uri.includes('node_modules')
|
||||
)
|
||||
|
||||
// TODO - should the definition be in this result set?
|
||||
expect(references).toContainEqual(createLocation('src/index.ts', 4, 16, 4, 19)) // def
|
||||
expect(references).toContainEqual(createRemoteLocation('b1', 'src/index.ts', 0, 14, 0, 17)) // import
|
||||
expect(references).toContainEqual(createRemoteLocation('b1', 'src/index.ts', 3, 15, 3, 18)) // 1st use
|
||||
expect(references).toContainEqual(createRemoteLocation('b1', 'src/index.ts', 3, 26, 3, 29)) // 2nd use
|
||||
expect(references).toContainEqual(createRemoteLocation('b2', 'src/index.ts', 0, 14, 0, 17)) // import
|
||||
expect(references).toContainEqual(createRemoteLocation('b2', 'src/index.ts', 3, 15, 3, 18)) // 1st use
|
||||
expect(references).toContainEqual(createRemoteLocation('b2', 'src/index.ts', 3, 26, 3, 29)) // 2nd use
|
||||
expect(references).toContainEqual(createRemoteLocation('b3', 'src/index.ts', 0, 14, 0, 17)) // import
|
||||
expect(references).toContainEqual(createRemoteLocation('b3', 'src/index.ts', 3, 15, 3, 18)) // 1st use
|
||||
expect(references).toContainEqual(createRemoteLocation('b3', 'src/index.ts', 3, 26, 3, 29)) // 2nd use
|
||||
|
||||
// Ensure no additional references
|
||||
expect(references && references.length).toEqual(10)
|
||||
})
|
||||
|
||||
it('should find all refs of `mul` from repo b1', async () => {
|
||||
const backend = await createBackend(storageRoot, connectionCache, documentCache, resultChunkCache)
|
||||
const db = await backend.createDatabase('b1', createCommit('b1'))
|
||||
// TODO - (FIXME) why are these garbage results in the index
|
||||
const references = (await db.references('src/index.ts', { line: 3, character: 16 }))!.filter(
|
||||
l => !l.uri.includes('node_modules')
|
||||
)
|
||||
|
||||
// TODO - should the definition be in this result set?
|
||||
expect(references).toContainEqual(createRemoteLocation('a', 'src/index.ts', 4, 16, 4, 19)) // def
|
||||
expect(references).toContainEqual(createLocation('src/index.ts', 0, 14, 0, 17)) // import
|
||||
expect(references).toContainEqual(createLocation('src/index.ts', 3, 15, 3, 18)) // 1st use
|
||||
expect(references).toContainEqual(createLocation('src/index.ts', 3, 26, 3, 29)) // 2nd use
|
||||
expect(references).toContainEqual(createRemoteLocation('b2', 'src/index.ts', 0, 14, 0, 17)) // import
|
||||
expect(references).toContainEqual(createRemoteLocation('b2', 'src/index.ts', 3, 15, 3, 18)) // 1st use
|
||||
expect(references).toContainEqual(createRemoteLocation('b2', 'src/index.ts', 3, 26, 3, 29)) // 2nd use
|
||||
expect(references).toContainEqual(createRemoteLocation('b3', 'src/index.ts', 0, 14, 0, 17)) // import
|
||||
expect(references).toContainEqual(createRemoteLocation('b3', 'src/index.ts', 3, 15, 3, 18)) // 1st use
|
||||
expect(references).toContainEqual(createRemoteLocation('b3', 'src/index.ts', 3, 26, 3, 29)) // 2nd use
|
||||
|
||||
// Ensure no additional references
|
||||
expect(references && references.length).toEqual(10)
|
||||
})
|
||||
|
||||
it('should find all refs of `add` from repo a', async () => {
|
||||
const backend = await createBackend(storageRoot, connectionCache, documentCache, resultChunkCache)
|
||||
const db = await backend.createDatabase('a', createCommit('a'))
|
||||
// TODO - (FIXME) why are these garbage results in the index
|
||||
const references = (await db.references('src/index.ts', { line: 0, character: 17 }))!.filter(
|
||||
l => !l.uri.includes('node_modules')
|
||||
)
|
||||
|
||||
// TODO - should the definition be in this result set?
|
||||
expect(references).toContainEqual(createLocation('src/index.ts', 0, 16, 0, 19)) // def
|
||||
expect(references).toContainEqual(createLocation('src/index.ts', 11, 18, 11, 21)) // 1st use
|
||||
expect(references).toContainEqual(createRemoteLocation('b1', 'src/index.ts', 0, 9, 0, 12)) // import
|
||||
expect(references).toContainEqual(createRemoteLocation('b1', 'src/index.ts', 3, 11, 3, 14)) // 1st use
|
||||
expect(references).toContainEqual(createRemoteLocation('b2', 'src/index.ts', 0, 9, 0, 12)) // import
|
||||
expect(references).toContainEqual(createRemoteLocation('b2', 'src/index.ts', 3, 11, 3, 14)) // 1st use
|
||||
expect(references).toContainEqual(createRemoteLocation('b3', 'src/index.ts', 0, 9, 0, 12)) // import
|
||||
expect(references).toContainEqual(createRemoteLocation('b3', 'src/index.ts', 3, 11, 3, 14)) // 1st use
|
||||
expect(references).toContainEqual(createRemoteLocation('c1', 'src/index.ts', 0, 9, 0, 12)) // import
|
||||
expect(references).toContainEqual(createRemoteLocation('c1', 'src/index.ts', 3, 11, 3, 14)) // 1st use
|
||||
expect(references).toContainEqual(createRemoteLocation('c1', 'src/index.ts', 3, 15, 3, 18)) // 2nd use
|
||||
expect(references).toContainEqual(createRemoteLocation('c1', 'src/index.ts', 3, 26, 3, 29)) // 3rd use
|
||||
expect(references).toContainEqual(createRemoteLocation('c2', 'src/index.ts', 0, 9, 0, 12)) // import
|
||||
expect(references).toContainEqual(createRemoteLocation('c2', 'src/index.ts', 3, 11, 3, 14)) // 1st use
|
||||
expect(references).toContainEqual(createRemoteLocation('c2', 'src/index.ts', 3, 15, 3, 18)) // 2nd use
|
||||
expect(references).toContainEqual(createRemoteLocation('c2', 'src/index.ts', 3, 26, 3, 29)) // 3rd use
|
||||
expect(references).toContainEqual(createRemoteLocation('c3', 'src/index.ts', 0, 9, 0, 12)) // import
|
||||
expect(references).toContainEqual(createRemoteLocation('c3', 'src/index.ts', 3, 11, 3, 14)) // 1st use
|
||||
expect(references).toContainEqual(createRemoteLocation('c3', 'src/index.ts', 3, 15, 3, 18)) // 2nd use
|
||||
expect(references).toContainEqual(createRemoteLocation('c3', 'src/index.ts', 3, 26, 3, 29)) // 3rd use
|
||||
|
||||
// Ensure no additional references
|
||||
expect(references && references.length).toEqual(20)
|
||||
})
|
||||
|
||||
it('should find all refs of `add` from repo c1', async () => {
|
||||
const backend = await createBackend(storageRoot, connectionCache, documentCache, resultChunkCache)
|
||||
const db = await backend.createDatabase('c1', createCommit('c1'))
|
||||
// TODO - (FIXME) why are these garbage results in the index
|
||||
const references = (await db.references('src/index.ts', { line: 3, character: 16 }))!.filter(
|
||||
l => !l.uri.includes('node_modules')
|
||||
)
|
||||
|
||||
// TODO - should the definition be in this result set?
|
||||
expect(references).toContainEqual(createRemoteLocation('a', 'src/index.ts', 0, 16, 0, 19)) // def
|
||||
expect(references).toContainEqual(createRemoteLocation('a', 'src/index.ts', 11, 18, 11, 21)) // 1st use
|
||||
expect(references).toContainEqual(createRemoteLocation('b1', 'src/index.ts', 0, 9, 0, 12)) // import
|
||||
expect(references).toContainEqual(createRemoteLocation('b1', 'src/index.ts', 3, 11, 3, 14)) // 1st use
|
||||
expect(references).toContainEqual(createRemoteLocation('b2', 'src/index.ts', 0, 9, 0, 12)) // import
|
||||
expect(references).toContainEqual(createRemoteLocation('b2', 'src/index.ts', 3, 11, 3, 14)) // 1st use
|
||||
expect(references).toContainEqual(createRemoteLocation('b3', 'src/index.ts', 0, 9, 0, 12)) // import
|
||||
expect(references).toContainEqual(createRemoteLocation('b3', 'src/index.ts', 3, 11, 3, 14)) // 1st use
|
||||
expect(references).toContainEqual(createLocation('src/index.ts', 0, 9, 0, 12)) // import
|
||||
expect(references).toContainEqual(createLocation('src/index.ts', 3, 11, 3, 14)) // 1st use
|
||||
expect(references).toContainEqual(createLocation('src/index.ts', 3, 15, 3, 18)) // 2nd use
|
||||
expect(references).toContainEqual(createLocation('src/index.ts', 3, 26, 3, 29)) // 3rd use
|
||||
expect(references).toContainEqual(createRemoteLocation('c2', 'src/index.ts', 0, 9, 0, 12)) // import
|
||||
expect(references).toContainEqual(createRemoteLocation('c2', 'src/index.ts', 3, 11, 3, 14)) // 1st use
|
||||
expect(references).toContainEqual(createRemoteLocation('c2', 'src/index.ts', 3, 15, 3, 18)) // 2nd use
|
||||
expect(references).toContainEqual(createRemoteLocation('c2', 'src/index.ts', 3, 26, 3, 29)) // 3rd use
|
||||
expect(references).toContainEqual(createRemoteLocation('c3', 'src/index.ts', 0, 9, 0, 12)) // import
|
||||
expect(references).toContainEqual(createRemoteLocation('c3', 'src/index.ts', 3, 11, 3, 14)) // 1st use
|
||||
expect(references).toContainEqual(createRemoteLocation('c3', 'src/index.ts', 3, 15, 3, 18)) // 2nd use
|
||||
expect(references).toContainEqual(createRemoteLocation('c3', 'src/index.ts', 3, 26, 3, 29)) // 3rd use
|
||||
|
||||
// Ensure no additional references
|
||||
expect(references && references.length).toEqual(20)
|
||||
})
|
||||
})
|
||||
174
lsif/src/server.ts
Normal file
174
lsif/src/server.ts
Normal file
@ -0,0 +1,174 @@
|
||||
import bodyParser from 'body-parser'
|
||||
import express from 'express'
|
||||
import { ConnectionCache, DocumentCache, ResultChunkCache } from './cache'
|
||||
import { ERRNOLSIFDATA, createBackend } from './backend'
|
||||
import { hasErrorCode, readEnvInt } from './util'
|
||||
import { wrap } from 'async-middleware'
|
||||
import * as zlib from 'mz/zlib'
|
||||
|
||||
/**
|
||||
* Which port to run the LSIF server on. Defaults to 3186.
|
||||
*/
|
||||
const HTTP_PORT = readEnvInt('HTTP_PORT', 3186)
|
||||
|
||||
/**
|
||||
* The number of SQLite connections that can be opened at once. This
|
||||
* value may be exceeded for a short period if many handles are held
|
||||
* at once.
|
||||
*/
|
||||
const CONNECTION_CACHE_SIZE = readEnvInt('CONNECTION_CACHE_SIZE', 1000)
|
||||
|
||||
/**
|
||||
* The maximum number of documents that can be held in memory at once.
|
||||
*/
|
||||
const DOCUMENT_CACHE_SIZE = readEnvInt('DOCUMENT_CACHE_SIZE', 1000)
|
||||
|
||||
/**
|
||||
* The maximum number of result chunks that can be held in memory at once.
|
||||
*/
|
||||
const RESULT_CHUNK_CACHE_SIZE = readEnvInt('RESULT_CHUNK_CACHE_SIZE', 1000)
|
||||
|
||||
/**
|
||||
* Whether or not to log a message when the HTTP server is ready and listening.
|
||||
*/
|
||||
const LOG_READY = process.env.DEPLOY_TYPE === 'dev'
|
||||
|
||||
/**
|
||||
* Where on the file system to store LSIF files.
|
||||
*/
|
||||
const STORAGE_ROOT = process.env.LSIF_STORAGE_ROOT || 'lsif-storage'
|
||||
|
||||
/**
|
||||
* Runs the HTTP server which accepts LSIF dump uploads and responds to LSIF requests.
|
||||
*/
|
||||
async function main(): Promise<void> {
|
||||
const connectionCache = new ConnectionCache(CONNECTION_CACHE_SIZE)
|
||||
const documentCache = new DocumentCache(DOCUMENT_CACHE_SIZE)
|
||||
const resultChunkCache = new ResultChunkCache(RESULT_CHUNK_CACHE_SIZE)
|
||||
const backend = await createBackend(STORAGE_ROOT, connectionCache, documentCache, resultChunkCache)
|
||||
const app = express()
|
||||
app.use(errorHandler)
|
||||
|
||||
app.get('/ping', (_, res) => {
|
||||
res.send({ pong: 'pong' })
|
||||
})
|
||||
|
||||
app.post(
|
||||
'/upload',
|
||||
wrap(
|
||||
async (req: express.Request, res: express.Response, next: express.NextFunction): Promise<void> => {
|
||||
const { repository, commit } = req.query
|
||||
checkRepository(repository)
|
||||
checkCommit(commit)
|
||||
const input = req.pipe(zlib.createGunzip()).on('error', next)
|
||||
await backend.insertDump(input, repository, commit)
|
||||
res.json(null)
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
app.post(
|
||||
'/exists',
|
||||
wrap(
|
||||
async (req: express.Request, res: express.Response): Promise<void> => {
|
||||
const { repository, commit, file } = req.query
|
||||
checkRepository(repository)
|
||||
checkCommit(commit)
|
||||
|
||||
try {
|
||||
const db = await backend.createDatabase(repository, commit)
|
||||
const result = !file || (await db.exists(file))
|
||||
res.json(result)
|
||||
} catch (e) {
|
||||
if (hasErrorCode(e, ERRNOLSIFDATA)) {
|
||||
res.json(false)
|
||||
return
|
||||
}
|
||||
|
||||
throw e
|
||||
}
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
app.post(
|
||||
'/request',
|
||||
bodyParser.json({ limit: '1mb' }),
|
||||
wrap(
|
||||
async (req: express.Request, res: express.Response): Promise<void> => {
|
||||
const { repository, commit } = req.query
|
||||
const { path, position, method } = req.body
|
||||
checkRepository(repository)
|
||||
checkCommit(commit)
|
||||
checkMethod(method, ['definitions', 'references', 'hover'])
|
||||
const cleanMethod = method as 'definitions' | 'references' | 'hover'
|
||||
|
||||
try {
|
||||
const db = await backend.createDatabase(repository, commit)
|
||||
res.json(await db[cleanMethod](path, position))
|
||||
} catch (e) {
|
||||
if (hasErrorCode(e, ERRNOLSIFDATA)) {
|
||||
throw Object.assign(e, { status: 404 })
|
||||
}
|
||||
|
||||
throw e
|
||||
}
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
app.listen(HTTP_PORT, () => {
|
||||
if (LOG_READY) {
|
||||
console.log(`Listening for HTTP requests on port ${HTTP_PORT}`)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Middleware function used to convert uncaught exceptions into 500 responses.
|
||||
*/
|
||||
function errorHandler(err: any, req: express.Request, res: express.Response, next: express.NextFunction): void {
|
||||
if (err && err.status) {
|
||||
res.status(err.status).send({ message: err.message })
|
||||
return
|
||||
}
|
||||
|
||||
console.error(err)
|
||||
res.status(500).send({ message: 'Unknown error' })
|
||||
}
|
||||
|
||||
/**
|
||||
* Throws an error with status 400 if the repository string is invalid.
|
||||
*/
|
||||
export function checkRepository(repository: any): void {
|
||||
if (typeof repository !== 'string') {
|
||||
throw Object.assign(new Error('Must specify the repository (usually of the form github.com/user/repo)'), {
|
||||
status: 400,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Throws an error with status 400 if the commit string is invalid.
|
||||
*/
|
||||
export function checkCommit(commit: any): void {
|
||||
if (typeof commit !== 'string' || commit.length !== 40 || !/^[0-9a-f]+$/.test(commit)) {
|
||||
throw Object.assign(new Error('Must specify the commit as a 40 character hash ' + commit), { status: 400 })
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Throws an error with status 422 if the requested method is not supported.
|
||||
*/
|
||||
export function checkMethod(method: string, supportedMethods: string[]): void {
|
||||
if (!supportedMethods.includes(method)) {
|
||||
throw Object.assign(new Error(`Method must be one of ${Array.from(supportedMethods).join(', ')}`), {
|
||||
status: 422,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
main().catch(e => {
|
||||
console.error(e)
|
||||
setTimeout(() => process.exit(1), 100)
|
||||
})
|
||||
33
lsif/src/test-utils.ts
Normal file
33
lsif/src/test-utils.ts
Normal file
@ -0,0 +1,33 @@
|
||||
import { lsp } from 'lsif-protocol'
|
||||
|
||||
export function createLocation(
|
||||
uri: string,
|
||||
startLine: number,
|
||||
startCharacter: number,
|
||||
endLine: number,
|
||||
endCharacter: number
|
||||
): lsp.Location {
|
||||
return lsp.Location.create(uri, {
|
||||
start: { line: startLine, character: startCharacter },
|
||||
end: { line: endLine, character: endCharacter },
|
||||
})
|
||||
}
|
||||
|
||||
export function createRemoteLocation(
|
||||
repository: string,
|
||||
path: string,
|
||||
startLine: number,
|
||||
startCharacter: number,
|
||||
endLine: number,
|
||||
endCharacter: number
|
||||
): lsp.Location {
|
||||
const url = new URL(`git://${repository}`)
|
||||
url.search = createCommit(repository)
|
||||
url.hash = path
|
||||
|
||||
return createLocation(url.href, startLine, startCharacter, endLine, endCharacter)
|
||||
}
|
||||
|
||||
export function createCommit(repository: string): string {
|
||||
return repository.repeat(40).substring(0, 40)
|
||||
}
|
||||
13
lsif/src/util.test.ts
Normal file
13
lsif/src/util.test.ts
Normal file
@ -0,0 +1,13 @@
|
||||
import { mustGetFromEither } from './util'
|
||||
|
||||
describe('mustGetFromEither', () => {
|
||||
it('should return first defined value', () => {
|
||||
const map1 = new Map<string, string>()
|
||||
const map2 = new Map<string, string>()
|
||||
|
||||
map2.set('foo', 'baz')
|
||||
expect(mustGetFromEither(map1, map2, 'foo', '')).toEqual('baz')
|
||||
map1.set('foo', 'bar')
|
||||
expect(mustGetFromEither(map1, map2, 'foo', '')).toEqual('bar')
|
||||
})
|
||||
})
|
||||
107
lsif/src/util.ts
Normal file
107
lsif/src/util.ts
Normal file
@ -0,0 +1,107 @@
|
||||
import { Id } from 'lsif-protocol'
|
||||
import { DefinitionReferenceResultId } from './models.database'
|
||||
|
||||
/**
|
||||
* Reads an integer from an environment variable or defaults to the given value.
|
||||
*
|
||||
* @param key The environment variable name.
|
||||
* @param defaultValue The default value.
|
||||
*/
|
||||
export function readEnvInt(key: string, defaultValue: number): number {
|
||||
return (process.env[key] && parseInt(process.env[key] || '', 10)) || defaultValue
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if an exception value has the given error code.
|
||||
*
|
||||
* @param e The exception value.
|
||||
* @param expectedCode The expected error code.
|
||||
*/
|
||||
export function hasErrorCode(e: any, expectedCode: string): boolean {
|
||||
return e && e.code === expectedCode
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the value of the given key from the given map. If the key does not
|
||||
* exist in the map, an exception is thrown with the given error text.
|
||||
*
|
||||
* @param map The map to query.
|
||||
* @param key The key to search for.
|
||||
* @param elementType The type of element (used for exception message).
|
||||
*/
|
||||
export function mustGet<K, V>(map: Map<K, V>, key: K, elementType: string): V {
|
||||
const value = map.get(key)
|
||||
if (value !== undefined) {
|
||||
return value
|
||||
}
|
||||
|
||||
throw new Error(`Unknown ${elementType} '${key}'.`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the value of the given key from one of the given maps. The first
|
||||
* non-undefined value to be found is returned. If the key does not exist in
|
||||
* either map, an exception is thrown with the given error text.
|
||||
*
|
||||
* @param map1 The first map to query.
|
||||
* @param map2 The second map to query.
|
||||
* @param key The key to search for.
|
||||
* @param elementType The type of element (used for exception message).
|
||||
*/
|
||||
export function mustGetFromEither<K, V>(map1: Map<K, V>, map2: Map<K, V>, key: K, elementType: string): V {
|
||||
for (const map of [map1, map2]) {
|
||||
const value = map.get(key)
|
||||
if (value !== undefined) {
|
||||
return value
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(`Unknown ${elementType} '${key}'.`)
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the value of `id`, or throw an exception if it is undefined.
|
||||
*
|
||||
* @param id The identifier.
|
||||
*/
|
||||
export function assertId<T extends Id>(id: T | undefined): T {
|
||||
if (id !== undefined) {
|
||||
return id
|
||||
}
|
||||
|
||||
throw new Error('id is undefined')
|
||||
}
|
||||
|
||||
/**
|
||||
* Hash a string or numeric identifier into the range `[0, maxIndex)`. The
|
||||
* hash algorithm here is similar to the one used in Java's String.hashCode.
|
||||
*
|
||||
* @param id The identifier to hash.
|
||||
* @param maxIndex The maximum of the range.
|
||||
*/
|
||||
export function hashKey(id: DefinitionReferenceResultId, maxIndex: number): number {
|
||||
const s = `${id}`
|
||||
|
||||
let hash = 0
|
||||
for (let i = 0; i < s.length; i++) {
|
||||
const chr = s.charCodeAt(i)
|
||||
hash = (hash << 5) - hash + chr
|
||||
hash |= 0
|
||||
}
|
||||
|
||||
// Hash value may be negative - must unset sign bit before modulus
|
||||
return Math.abs(hash) % maxIndex
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine the table inserter batch size for an entity given the number of
|
||||
* fields inserted for that entity. We cannot perform an insert operation with
|
||||
* more than 999 placeholder variables, so we need to flush our batch before
|
||||
* we reach that amount. If fields are added to the models, the argument to
|
||||
* this function also needs to change.
|
||||
*
|
||||
* @param numFields The number of fields for an entity.
|
||||
*/
|
||||
export function getBatchSize(numFields: number): number {
|
||||
return Math.floor(999 / numFields)
|
||||
}
|
||||
210
lsif/src/xrepo.ts
Normal file
210
lsif/src/xrepo.ts
Normal file
@ -0,0 +1,210 @@
|
||||
import { Connection, EntityManager } from 'typeorm'
|
||||
import { testFilter, createFilter } from './encoding'
|
||||
import { ConnectionCache } from './cache'
|
||||
import { ReferenceModel, PackageModel } from './models.xrepo'
|
||||
import { TableInserter } from './inserter'
|
||||
|
||||
/**
|
||||
* Represents a package provided by a project or a package that is a dependency
|
||||
* of a project, depending on its use.
|
||||
*/
|
||||
export interface Package {
|
||||
/**
|
||||
* The scheme of the package (e.g. npm, pip).
|
||||
*/
|
||||
scheme: string
|
||||
|
||||
/**
|
||||
* The name of the package.
|
||||
*/
|
||||
name: string
|
||||
|
||||
/**
|
||||
* The version of the package.
|
||||
*/
|
||||
version: string | null
|
||||
}
|
||||
|
||||
/**
|
||||
* Represents a use of a set of symbols from a particular dependent package of
|
||||
* a project.
|
||||
*/
|
||||
export interface SymbolReferences {
|
||||
/**
|
||||
* The package from which the symbols are imported.
|
||||
*/
|
||||
package: Package
|
||||
|
||||
/**
|
||||
* The unique identifiers of the symbols imported from the package.
|
||||
*/
|
||||
identifiers: string[]
|
||||
}
|
||||
|
||||
/**
|
||||
* A wrapper around a SQLite database that stitches together the references
|
||||
* between projects at a specific commit. This is used for cross-repository
|
||||
* jump to definition and find references features.
|
||||
*/
|
||||
export class XrepoDatabase {
|
||||
/**
|
||||
* Create a new ` XrepoDatabase` backed by the given database filename.
|
||||
*
|
||||
* @param connectionCache The cache of SQLite connections.
|
||||
* @param database The filename of the database.
|
||||
*/
|
||||
constructor(private connectionCache: ConnectionCache, private database: string) {}
|
||||
|
||||
/**
|
||||
* Find the package that defines the given `scheme`, `name`, and `version`.
|
||||
*
|
||||
* @param scheme The package manager scheme (e.g. npm, pip).
|
||||
* @param name The package name.
|
||||
* @param version The package version.
|
||||
*/
|
||||
public async getPackage(scheme: string, name: string, version: string | null): Promise<PackageModel | undefined> {
|
||||
return await this.withConnection(connection =>
|
||||
connection.getRepository(PackageModel).findOne({
|
||||
where: {
|
||||
scheme,
|
||||
name,
|
||||
version,
|
||||
},
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Correlate a `repository` and `commit` with a set of unique packages.
|
||||
*
|
||||
* @param repository The repository that defines the given package.
|
||||
* @param commit The commit of the that defines the given package.
|
||||
* @param packages The package list (scheme, name, and version).
|
||||
*/
|
||||
public async addPackages(repository: string, commit: string, packages: Package[]): Promise<void> {
|
||||
return await this.withTransactionalEntityManager(async entityManager => {
|
||||
// We replace on conflict here: the first LSIF upload to provide a package will be
|
||||
// the repository and commit used in cross-repository jump-to-definition queries.
|
||||
|
||||
const inserter = new TableInserter(entityManager, PackageModel, PackageModel.BatchSize, true)
|
||||
for (const pkg of packages) {
|
||||
await inserter.insert({ repository, commit, ...pkg })
|
||||
}
|
||||
|
||||
await inserter.flush()
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Find all repository/commit pairs that reference `value` in the given package. The
|
||||
* returned results will include only repositories that have a dependency on the given
|
||||
* package. The returned results may (but is not likely to) include a repository/commit
|
||||
* pair that does not reference `value`. See cache.ts for configuration values that tune
|
||||
* the bloom filter false positive rates.
|
||||
*
|
||||
* @param scheme The package manager scheme (e.g. npm, pip).
|
||||
* @param name The package name.
|
||||
* @param version The package version.
|
||||
* @param value The value to test.
|
||||
*/
|
||||
public async getReferences({
|
||||
scheme,
|
||||
name,
|
||||
version,
|
||||
value,
|
||||
}: {
|
||||
scheme: string
|
||||
name: string
|
||||
version: string | null
|
||||
value: string
|
||||
}): Promise<ReferenceModel[]> {
|
||||
const results = await this.withConnection(connection =>
|
||||
connection.getRepository(ReferenceModel).find({
|
||||
where: {
|
||||
scheme,
|
||||
name,
|
||||
version,
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
// Test the bloom filter of each reference model concurrently
|
||||
const keepFlags = await Promise.all(results.map(result => testFilter(result.filter, value)))
|
||||
|
||||
// Drop any result that did not pass bloom filter
|
||||
return results.filter((_, i) => keepFlags[i])
|
||||
}
|
||||
|
||||
/**
|
||||
* Correlate the given `repository` and `commit` with the the names referenced from a
|
||||
* particular remote package.
|
||||
*
|
||||
* @param repository The repository that depends on the given package.
|
||||
* @param commit The commit that depends on the given package.
|
||||
* @param references The package data (scheme, name, and version) and the symbols that the package references.
|
||||
*/
|
||||
public async addReferences(repository: string, commit: string, references: SymbolReferences[]): Promise<void> {
|
||||
return await this.withTransactionalEntityManager(async entityManager => {
|
||||
const inserter = new TableInserter(entityManager, ReferenceModel, ReferenceModel.BatchSize)
|
||||
for (const reference of references) {
|
||||
await inserter.insert({
|
||||
repository,
|
||||
commit,
|
||||
filter: await createFilter(reference.identifiers),
|
||||
...reference.package,
|
||||
})
|
||||
}
|
||||
|
||||
await inserter.flush()
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove references to the given repository and commit from both packages and
|
||||
* references table.
|
||||
*
|
||||
* @param repository The repository.
|
||||
* @param commit The commit.
|
||||
*/
|
||||
public async clearCommit(repository: string, commit: string): Promise<void> {
|
||||
return await this.withTransactionalEntityManager(async entityManager => {
|
||||
await entityManager
|
||||
.createQueryBuilder()
|
||||
.delete()
|
||||
.from(PackageModel)
|
||||
.where({ repository, commit })
|
||||
.execute()
|
||||
|
||||
await entityManager
|
||||
.createQueryBuilder()
|
||||
.delete()
|
||||
.from(ReferenceModel)
|
||||
.where({ repository, commit })
|
||||
.execute()
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Invoke `callback` with a SQLite connection object obtained from the
|
||||
* cache or created on cache miss.
|
||||
*
|
||||
* @param callback The function invoke with the SQLite connection.
|
||||
*/
|
||||
private async withConnection<T>(callback: (connection: Connection) => Promise<T>): Promise<T> {
|
||||
return await this.connectionCache.withConnection(this.database, [PackageModel, ReferenceModel], callback)
|
||||
}
|
||||
|
||||
/**
|
||||
* Invoke `callback` with a transactional SQLite manager manager object
|
||||
* obtained from the cache or created on cache miss.
|
||||
*
|
||||
* @param callback The function invoke with the entity manager.
|
||||
*/
|
||||
private async withTransactionalEntityManager<T>(callback: (connection: EntityManager) => Promise<T>): Promise<T> {
|
||||
return await this.connectionCache.withTransactionalEntityManager(
|
||||
this.database,
|
||||
[PackageModel, ReferenceModel],
|
||||
callback
|
||||
)
|
||||
}
|
||||
}
|
||||
40
lsif/test-data/cpp/README.md
Normal file
40
lsif/test-data/cpp/README.md
Normal file
@ -0,0 +1,40 @@
|
||||
# LSIF test data for C++ via DXR Plugin
|
||||
|
||||
Getting the DXR plugin to work with macOS is an ongoing project. For now, we just version control some output. The reference source is given below.
|
||||
|
||||
### main.cpp
|
||||
|
||||
```
|
||||
#include "five.h"
|
||||
|
||||
#define TABLE_SIZE 100
|
||||
|
||||
int x = TABLE_SIZE;
|
||||
|
||||
int four(int y) {
|
||||
return y;
|
||||
}
|
||||
|
||||
int main() {
|
||||
five(x);
|
||||
four(x);
|
||||
five(six);
|
||||
}
|
||||
```
|
||||
|
||||
### five.h
|
||||
|
||||
```cpp
|
||||
#include "five.h"
|
||||
|
||||
int five(int x) {
|
||||
return 5;
|
||||
}
|
||||
```
|
||||
|
||||
### five.cpp
|
||||
|
||||
```cpp
|
||||
int six;
|
||||
int five(int x);
|
||||
```
|
||||
BIN
lsif/test-data/cpp/data/data.lsif.gz
Normal file
BIN
lsif/test-data/cpp/data/data.lsif.gz
Normal file
Binary file not shown.
23
lsif/test-data/typescript/README.md
Normal file
23
lsif/test-data/typescript/README.md
Normal file
@ -0,0 +1,23 @@
|
||||
# LSIF test data for TypeScript
|
||||
|
||||
The `./bin/generate.sh` to create LSIF dumps for a set of TypeScript projects that reference each other. This will create seven repositories and gzipped LSIF dump files which is used in the tests found in `query-typescript.test.ts`.
|
||||
|
||||
The dump files used for testing are under version control, but can be regenerated to test changes in the indexer utilities.
|
||||
|
||||
### Dump Layout
|
||||
|
||||
The repository `a` defines the `math-util` package containing functions `add` and `mul`. The latter function is defined in terms of the former (and thus contains a eference to it).
|
||||
|
||||
The repositories `b{1,2,3}` have a dependency on `math-util` and import both `add` and `mul` functions.
|
||||
|
||||
The repositories `c{1,2,3}` have a dependency on `math-util` and import only the `add` function.
|
||||
|
||||
The TypeScript source for each project is contained in the script that generates the project, `./bin/generate-{a,b,c}.sh`.
|
||||
|
||||
### Requirements
|
||||
|
||||
This script requires you install [`lsif-tsc` and `lsif-npm`](https://github.com/microsoft/lsif-node). The `lsif-tsc` tool can be installed via npm. Unfortunately, Microsoft's implementation of `lsif-npm` is currently broken (but fixed with this [pull request](https://github.com/microsoft/lsif-node/pull/66)). The script must, for now, be run with Sourcegraph's fork of [lsif-npm](https://github.com/sourcegraph/lsif-node), which contains the update from the pull request. The location of the `lsif-npm` binary can be switched as follows.
|
||||
|
||||
```bash
|
||||
LSIF_NPM=~/path/to/lsif-npm ./bin/generate.sh`
|
||||
```
|
||||
47
lsif/test-data/typescript/bin/generate-a.sh
Executable file
47
lsif/test-data/typescript/bin/generate-a.sh
Executable file
@ -0,0 +1,47 @@
|
||||
#!/bin/bash -u
|
||||
|
||||
mkdir -p "${DIR}/${REPO}/src"
|
||||
|
||||
cat << EOF > "${DIR}/${REPO}/src/index.ts"
|
||||
export function add(a: number, b: number): number {
|
||||
return a + b
|
||||
}
|
||||
|
||||
export function mul(a: number, b: number): number {
|
||||
if (b === 0) {
|
||||
return 0
|
||||
}
|
||||
|
||||
let product = a
|
||||
for (let i = 0; i < b; i++) {
|
||||
product = add(product, a)
|
||||
}
|
||||
|
||||
return product
|
||||
}
|
||||
EOF
|
||||
|
||||
cat << EOF > "${DIR}/${REPO}/package.json"
|
||||
{
|
||||
"name": "math-util",
|
||||
"license": "MIT",
|
||||
"version": "0.1.0",
|
||||
"dependencies": {},
|
||||
"scripts": {
|
||||
"build": "tsc"
|
||||
}
|
||||
}
|
||||
EOF
|
||||
|
||||
cat << EOF > "${DIR}/${REPO}/tsconfig.json"
|
||||
{
|
||||
"compilerOptions": {
|
||||
"module": "commonjs",
|
||||
"target": "esnext",
|
||||
"moduleResolution": "node",
|
||||
"typeRoots": []
|
||||
},
|
||||
"include": ["src/*"],
|
||||
"exclude": ["node_modules"]
|
||||
}
|
||||
EOF
|
||||
40
lsif/test-data/typescript/bin/generate-b.sh
Executable file
40
lsif/test-data/typescript/bin/generate-b.sh
Executable file
@ -0,0 +1,40 @@
|
||||
#!/bin/bash -u
|
||||
|
||||
mkdir -p "${DIR}/${REPO}/src"
|
||||
|
||||
cat << EOF > "${DIR}/${REPO}/src/index.ts"
|
||||
import { add, mul } from 'math-util/src'
|
||||
|
||||
export function foobar(a: number, b: number): number {
|
||||
return add(mul(a, b), mul(b, a))
|
||||
}
|
||||
EOF
|
||||
|
||||
cat << EOF > "${DIR}/${REPO}/package.json"
|
||||
{
|
||||
"name": "${REPO}",
|
||||
"license": "MIT",
|
||||
"version": "0.1.0",
|
||||
"dependencies": {
|
||||
"math-util": "link:${DEP}"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsc"
|
||||
}
|
||||
}
|
||||
EOF
|
||||
|
||||
cat << EOF > "${DIR}/${REPO}/tsconfig.json"
|
||||
{
|
||||
"compilerOptions": {
|
||||
"module": "commonjs",
|
||||
"target": "esnext",
|
||||
"moduleResolution": "node",
|
||||
"typeRoots": []
|
||||
},
|
||||
"include": ["src/*"],
|
||||
"exclude": ["node_modules"]
|
||||
}
|
||||
EOF
|
||||
|
||||
yarn --cwd "${DIR}/${REPO}" > /dev/null
|
||||
40
lsif/test-data/typescript/bin/generate-c.sh
Executable file
40
lsif/test-data/typescript/bin/generate-c.sh
Executable file
@ -0,0 +1,40 @@
|
||||
#!/bin/bash -u
|
||||
|
||||
mkdir -p "${DIR}/${REPO}/src"
|
||||
|
||||
cat << EOF > "${DIR}/${REPO}/src/index.ts"
|
||||
import { add } from 'math-util/src'
|
||||
|
||||
export function foobar(a: number, b: number): number {
|
||||
return add(add(a, b), add(b, a))
|
||||
}
|
||||
EOF
|
||||
|
||||
cat << EOF > "${DIR}/${REPO}/package.json"
|
||||
{
|
||||
"name": "${REPO}",
|
||||
"license": "MIT",
|
||||
"version": "0.1.0",
|
||||
"dependencies": {
|
||||
"math-util": "link:${DEP}"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsc"
|
||||
}
|
||||
}
|
||||
EOF
|
||||
|
||||
cat << EOF > "${DIR}/${REPO}/tsconfig.json"
|
||||
{
|
||||
"compilerOptions": {
|
||||
"module": "commonjs",
|
||||
"target": "esnext",
|
||||
"moduleResolution": "node",
|
||||
"typeRoots": []
|
||||
},
|
||||
"include": ["src/*"],
|
||||
"exclude": ["node_modules"]
|
||||
}
|
||||
EOF
|
||||
|
||||
yarn --cwd "${DIR}/${REPO}" > /dev/null
|
||||
24
lsif/test-data/typescript/bin/generate.sh
Executable file
24
lsif/test-data/typescript/bin/generate.sh
Executable file
@ -0,0 +1,24 @@
|
||||
#!/bin/bash
|
||||
|
||||
LSIF_TSC=${LSIF_TSC:-`which lsif-tsc`}
|
||||
LSIF_NPM=${LSIF_NPM:-`which lsif-npm`}
|
||||
|
||||
trap "{ rm -r ./repos; }" EXIT
|
||||
|
||||
DIR=./repos REPO=a ./bin/generate-a.sh
|
||||
DIR=./repos REPO=b1 DEP=`pwd`/repos/a ./bin/generate-b.sh
|
||||
DIR=./repos REPO=b2 DEP=`pwd`/repos/a ./bin/generate-b.sh
|
||||
DIR=./repos REPO=b3 DEP=`pwd`/repos/a ./bin/generate-b.sh
|
||||
DIR=./repos REPO=c1 DEP=`pwd`/repos/a ./bin/generate-c.sh
|
||||
DIR=./repos REPO=c2 DEP=`pwd`/repos/a ./bin/generate-c.sh
|
||||
DIR=./repos REPO=c3 DEP=`pwd`/repos/a ./bin/generate-c.sh
|
||||
|
||||
mkdir -p data
|
||||
|
||||
for repo in `ls repos`; do
|
||||
cd "./repos/${repo}"
|
||||
${LSIF_TSC} -p tsconfig.json --noContents --stdout | ${LSIF_NPM} --stdin --out "../../data/${repo}.lsif"
|
||||
cd - > /dev/null
|
||||
done
|
||||
|
||||
gzip ./data/*.lsif
|
||||
BIN
lsif/test-data/typescript/data/a.lsif.gz
Normal file
BIN
lsif/test-data/typescript/data/a.lsif.gz
Normal file
Binary file not shown.
BIN
lsif/test-data/typescript/data/b1.lsif.gz
Normal file
BIN
lsif/test-data/typescript/data/b1.lsif.gz
Normal file
Binary file not shown.
BIN
lsif/test-data/typescript/data/b2.lsif.gz
Normal file
BIN
lsif/test-data/typescript/data/b2.lsif.gz
Normal file
Binary file not shown.
BIN
lsif/test-data/typescript/data/b3.lsif.gz
Normal file
BIN
lsif/test-data/typescript/data/b3.lsif.gz
Normal file
Binary file not shown.
BIN
lsif/test-data/typescript/data/c1.lsif.gz
Normal file
BIN
lsif/test-data/typescript/data/c1.lsif.gz
Normal file
Binary file not shown.
BIN
lsif/test-data/typescript/data/c2.lsif.gz
Normal file
BIN
lsif/test-data/typescript/data/c2.lsif.gz
Normal file
Binary file not shown.
BIN
lsif/test-data/typescript/data/c3.lsif.gz
Normal file
BIN
lsif/test-data/typescript/data/c3.lsif.gz
Normal file
Binary file not shown.
25
lsif/tsconfig.json
Normal file
25
lsif/tsconfig.json
Normal file
@ -0,0 +1,25 @@
|
||||
{
|
||||
"extends": "@sourcegraph/tsconfig",
|
||||
"compilerOptions": {
|
||||
"allowJs": false,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"emitDecoratorMetadata": true,
|
||||
"esModuleInterop": true,
|
||||
"experimentalDecorators": true,
|
||||
"importHelpers": true,
|
||||
"lib": ["esnext", "dom", "dom.iterable"],
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"noErrorTruncation": true,
|
||||
"noImplicitAny": true,
|
||||
"noImplicitReturns": true,
|
||||
"outDir": "out",
|
||||
"skipLibCheck": true,
|
||||
"inlineSourceMap": true,
|
||||
"strict": true,
|
||||
"target": "esnext",
|
||||
},
|
||||
"files": [],
|
||||
"include": ["src"],
|
||||
"exclude": ["node_modules"],
|
||||
}
|
||||
6
lsif/tslint.json
Normal file
6
lsif/tslint.json
Normal file
@ -0,0 +1,6 @@
|
||||
{
|
||||
"extends": ["../tslint.config.js"],
|
||||
"linterOptions": {
|
||||
"exclude": ["node_modules/**"]
|
||||
}
|
||||
}
|
||||
12
lsif/upload.sh
Normal file → Executable file
12
lsif/upload.sh
Normal file → Executable file
@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
urlencode() {
|
||||
echo "$1" | curl -Gso /dev/null -w %{url_effective} --data-urlencode @- "" | cut -c 3- | sed -e 's/%0A//'
|
||||
echo "$1" | curl -Gso /dev/null -w %{url_effective} --data-urlencode @- "" | cut -c 3- | sed -e 's/%0A//'
|
||||
}
|
||||
|
||||
file="$1"
|
||||
@ -18,11 +18,11 @@ usage() {
|
||||
}
|
||||
|
||||
if [[ -z "$SRC_LSIF_UPLOAD_TOKEN" || -z "$REPOSITORY" || -z "$COMMIT" || -z "$file" ]]; then
|
||||
usage
|
||||
exit 1
|
||||
usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
curl \
|
||||
-H "Content-Type: application/x-ndjson+lsif" \
|
||||
"$SRC_ENDPOINT/.api/lsif/upload?repository=$(urlencode "$REPOSITORY")&commit=$(urlencode "$COMMIT")&upload_token=$(urlencode "$SRC_LSIF_UPLOAD_TOKEN")" \
|
||||
--data-binary "@$file"
|
||||
-H "Content-Type: application/x-ndjson+lsif" \
|
||||
"$SRC_ENDPOINT/.api/lsif/upload?repository=$(urlencode "$REPOSITORY")&commit=$(urlencode "$COMMIT")&upload_token=$(urlencode "$SRC_LSIF_UPLOAD_TOKEN")" \
|
||||
--data-binary "@$file"
|
||||
|
||||
4568
lsif/yarn.lock
Normal file
4568
lsif/yarn.lock
Normal file
File diff suppressed because it is too large
Load Diff
@ -49,6 +49,7 @@
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.5.5",
|
||||
"@babel/plugin-proposal-class-properties": "^7.5.5",
|
||||
"@babel/plugin-proposal-decorators": "^7.4.4",
|
||||
"@babel/plugin-syntax-dynamic-import": "^7.2.0",
|
||||
"@babel/preset-env": "^7.5.5",
|
||||
"@babel/preset-react": "^7.0.0",
|
||||
|
||||
11
yarn.lock
11
yarn.lock
@ -99,7 +99,7 @@
|
||||
"@babel/traverse" "^7.4.4"
|
||||
"@babel/types" "^7.4.4"
|
||||
|
||||
"@babel/helper-create-class-features-plugin@^7.4.0", "@babel/helper-create-class-features-plugin@^7.5.5":
|
||||
"@babel/helper-create-class-features-plugin@^7.4.0", "@babel/helper-create-class-features-plugin@^7.4.4", "@babel/helper-create-class-features-plugin@^7.5.5":
|
||||
version "7.5.5"
|
||||
resolved "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.5.5.tgz#401f302c8ddbc0edd36f7c6b2887d8fa1122e5a4"
|
||||
integrity sha512-ZsxkyYiRA7Bg+ZTRpPvB6AbOFKTFFK4LrvTet8lInm0V468MWCaSYJE+I7v2z2r8KNLtYiV+K5kTCnR7dvyZjg==
|
||||
@ -299,6 +299,15 @@
|
||||
"@babel/helper-plugin-utils" "^7.0.0"
|
||||
"@babel/plugin-syntax-decorators" "^7.2.0"
|
||||
|
||||
"@babel/plugin-proposal-decorators@^7.4.4":
|
||||
version "7.4.4"
|
||||
resolved "https://registry.npmjs.org/@babel/plugin-proposal-decorators/-/plugin-proposal-decorators-7.4.4.tgz#de9b2a1a8ab0196f378e2a82f10b6e2a36f21cc0"
|
||||
integrity sha512-z7MpQz3XC/iQJWXH9y+MaWcLPNSMY9RQSthrLzak8R8hCj0fuyNk+Dzi9kfNe/JxxlWQ2g7wkABbgWjW36MTcw==
|
||||
dependencies:
|
||||
"@babel/helper-create-class-features-plugin" "^7.4.4"
|
||||
"@babel/helper-plugin-utils" "^7.0.0"
|
||||
"@babel/plugin-syntax-decorators" "^7.2.0"
|
||||
|
||||
"@babel/plugin-proposal-dynamic-import@^7.5.0":
|
||||
version "7.5.0"
|
||||
resolved "https://registry.npmjs.org/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.5.0.tgz#e532202db4838723691b10a67b8ce509e397c506"
|
||||
|
||||
Loading…
Reference in New Issue
Block a user