diff --git a/server/CHANGELOG.md b/server/CHANGELOG.md index 71d019d9b..97dd5248c 100644 --- a/server/CHANGELOG.md +++ b/server/CHANGELOG.md @@ -1,5 +1,9 @@ # Bash Language Server +## 4.1.0 + +- Symbols across files are now only included based on sourced files (using non dynamic statements like `source file.sh` or `. ~/file.inc`) instead of including symbols from all files in the workspace. We now also support jump-to-definition on the file path used in a source command. The new behavior can be disabled by turning on the `includeAllWorkspaceSymbols` configuration option. https://github.com/bash-lsp/bash-language-server/pull/244 + ## 4.0.1 - **Breaking**: Drop support for Node 12, which reached its official end of life on April 30th 2022. Doing so enables new features. https://github.com/bash-lsp/bash-language-server/pull/584 diff --git a/server/package.json b/server/package.json index 8f5fb21c4..21911c8b7 100644 --- a/server/package.json +++ b/server/package.json @@ -3,7 +3,7 @@ "description": "A language server for Bash", "author": "Mads Hartmann", "license": "MIT", - "version": "4.0.1", + "version": "4.1.0", "publisher": "mads-hartmann", "main": "./out/server.js", "typings": "./out/server.d.ts", diff --git a/server/src/__tests__/__snapshots__/analyzer.test.ts.snap b/server/src/__tests__/__snapshots__/analyzer.test.ts.snap index 5d2e25cb9..3b3671146 100644 --- a/server/src/__tests__/__snapshots__/analyzer.test.ts.snap +++ b/server/src/__tests__/__snapshots__/analyzer.test.ts.snap @@ -38,24 +38,6 @@ Array [ ] `; -exports[`findDefinition returns a list of locations if parameter is found 1`] = ` -Array [ - Object { - "range": Object { - "end": Object { - "character": 37, - "line": 148, - }, - "start": Object { - "character": 0, - "line": 148, - }, - }, - "uri": "dummy-uri.sh", - }, -] -`; - exports[`findReferences returns a list of locations if parameter is found 1`] = ` Array [ Object { diff --git a/server/src/__tests__/analyzer.test.ts b/server/src/__tests__/analyzer.test.ts index 19d45e4c5..230f41a0d 100644 --- a/server/src/__tests__/analyzer.test.ts +++ b/server/src/__tests__/analyzer.test.ts @@ -1,4 +1,9 @@ -import FIXTURES, { FIXTURE_FOLDER } from '../../../testing/fixtures' +import { + FIXTURE_DOCUMENT, + FIXTURE_FOLDER, + FIXTURE_URI, + REPO_ROOT_FOLDER, +} from '../../../testing/fixtures' import { getMockConnection } from '../../../testing/mocks' import Analyzer from '../analyser' import { getDefaultConfiguration } from '../config' @@ -17,23 +22,36 @@ const defaultConfig = getDefaultConfiguration() beforeAll(async () => { const parser = await initializeParser() - analyzer = new Analyzer({ console: mockConsole, parser }) + analyzer = new Analyzer({ + console: mockConsole, + parser, + workspaceFolder: FIXTURE_FOLDER, + }) }) describe('analyze', () => { it('returns an empty list of errors for a file with no parsing errors', () => { - const result = analyzer.analyze(CURRENT_URI, FIXTURES.INSTALL) + const result = analyzer.analyze({ + uri: CURRENT_URI, + document: FIXTURE_DOCUMENT.INSTALL, + }) expect(result).toEqual([]) }) it('returns a list of errors for a file with a missing node', () => { - const result = analyzer.analyze(CURRENT_URI, FIXTURES.MISSING_NODE) + const result = analyzer.analyze({ + uri: CURRENT_URI, + document: FIXTURE_DOCUMENT.MISSING_NODE, + }) expect(result).not.toEqual([]) expect(result).toMatchSnapshot() }) it('returns a list of errors for a file with parsing errors', () => { - const result = analyzer.analyze(CURRENT_URI, FIXTURES.PARSE_PROBLEMS) + const result = analyzer.analyze({ + uri: CURRENT_URI, + document: FIXTURE_DOCUMENT.PARSE_PROBLEMS, + }) expect(result).not.toEqual([]) expect(result).toMatchSnapshot() }) @@ -41,28 +59,112 @@ describe('analyze', () => { describe('findDefinition', () => { it('returns an empty list if word is not found', () => { - analyzer.analyze(CURRENT_URI, FIXTURES.INSTALL) - const result = analyzer.findDefinition({ word: 'foobar' }) + analyzer.analyze({ uri: CURRENT_URI, document: FIXTURE_DOCUMENT.INSTALL }) + const result = analyzer.findDefinition({ uri: CURRENT_URI, word: 'foobar' }) expect(result).toEqual([]) }) + it('returns a location to a file if word is the path in a sourcing statement', () => { + const document = FIXTURE_DOCUMENT.SOURCING + const { uri } = document + analyzer.analyze({ uri, document }) + const result = analyzer.findDefinition({ + uri, + word: './extension.inc', + position: { character: 10, line: 2 }, + }) + expect(result).toMatchInlineSnapshot(` + Array [ + Object { + "range": Object { + "end": Object { + "character": 0, + "line": 0, + }, + "start": Object { + "character": 0, + "line": 0, + }, + }, + "uri": "file://${FIXTURE_FOLDER}extension.inc", + }, + ] + `) + }) + + it('returns a location to a file if word is the path in a sourcing statement (resolved by using the workspacePath)', async () => { + const document = FIXTURE_DOCUMENT.SOURCING + const { uri } = document + + const parser = await initializeParser() + const connection = getMockConnection() + const newAnalyzer = new Analyzer({ + console: connection.console, + parser, + workspaceFolder: REPO_ROOT_FOLDER, + }) + + newAnalyzer.analyze({ uri, document }) + const result = newAnalyzer.findDefinition({ + uri, + word: './scripts/tag-release.inc', + position: { character: 10, line: 16 }, + }) + expect(result).toMatchInlineSnapshot(` + Array [ + Object { + "range": Object { + "end": Object { + "character": 0, + "line": 0, + }, + "start": Object { + "character": 0, + "line": 0, + }, + }, + "uri": "file://${REPO_ROOT_FOLDER}/scripts/tag-release.inc", + }, + ] + `) + }) + it('returns a list of locations if parameter is found', () => { - analyzer.analyze(CURRENT_URI, FIXTURES.INSTALL) - const result = analyzer.findDefinition({ word: 'node_version' }) + analyzer.analyze({ uri: CURRENT_URI, document: FIXTURE_DOCUMENT.INSTALL }) + const result = analyzer.findDefinition({ + uri: CURRENT_URI, + word: 'node_version', + }) expect(result).not.toEqual([]) - expect(result).toMatchSnapshot() + expect(result).toMatchInlineSnapshot(` + Array [ + Object { + "range": Object { + "end": Object { + "character": 37, + "line": 148, + }, + "start": Object { + "character": 0, + "line": 148, + }, + }, + "uri": "dummy-uri.sh", + }, + ] + `) }) }) describe('findReferences', () => { it('returns empty list if parameter is not found', () => { - analyzer.analyze(CURRENT_URI, FIXTURES.INSTALL) + analyzer.analyze({ uri: CURRENT_URI, document: FIXTURE_DOCUMENT.INSTALL }) const result = analyzer.findReferences('foobar') expect(result).toEqual([]) }) it('returns a list of locations if parameter is found', () => { - analyzer.analyze(CURRENT_URI, FIXTURES.INSTALL) + analyzer.analyze({ uri: CURRENT_URI, document: FIXTURE_DOCUMENT.INSTALL }) const result = analyzer.findReferences('node_version') expect(result).not.toEqual([]) expect(result).toMatchSnapshot() @@ -71,29 +173,85 @@ describe('findReferences', () => { describe('findSymbolsForFile', () => { it('returns empty list if uri is not found', () => { - analyzer.analyze(CURRENT_URI, FIXTURES.INSTALL) + analyzer.analyze({ uri: CURRENT_URI, document: FIXTURE_DOCUMENT.INSTALL }) const result = analyzer.findSymbolsForFile({ uri: 'foobar.sh' }) expect(result).toEqual([]) }) it('returns a list of SymbolInformation if uri is found', () => { - analyzer.analyze(CURRENT_URI, FIXTURES.INSTALL) + analyzer.analyze({ uri: CURRENT_URI, document: FIXTURE_DOCUMENT.INSTALL }) const result = analyzer.findSymbolsForFile({ uri: CURRENT_URI }) expect(result).not.toEqual([]) expect(result).toMatchSnapshot() }) it('issue 101', () => { - analyzer.analyze(CURRENT_URI, FIXTURES.ISSUE101) + analyzer.analyze({ uri: CURRENT_URI, document: FIXTURE_DOCUMENT.ISSUE101 }) const result = analyzer.findSymbolsForFile({ uri: CURRENT_URI }) expect(result).not.toEqual([]) expect(result).toMatchSnapshot() }) }) +describe('findAllSourcedUris', () => { + it('returns references to sourced files', async () => { + const parser = await initializeParser() + const connection = getMockConnection() + + const newAnalyzer = new Analyzer({ + console: connection.console, + parser, + workspaceFolder: REPO_ROOT_FOLDER, + }) + await newAnalyzer.initiateBackgroundAnalysis({ + backgroundAnalysisMaxFiles: defaultConfig.backgroundAnalysisMaxFiles, + globPattern: defaultConfig.globPattern, + }) + + const result = newAnalyzer.findAllSourcedUris({ uri: FIXTURE_URI.SOURCING }) + expect(result).toEqual( + new Set([ + `file://${REPO_ROOT_FOLDER}/scripts/tag-release.inc`, // resolved based on repoRootFolder + `file://${FIXTURE_FOLDER}issue101.sh`, // resolved based on current file + `file://${FIXTURE_FOLDER}extension.inc`, // resolved based on current file + ]), + ) + }) + + it('returns references to sourced files without file extension', async () => { + const parser = await initializeParser() + const connection = getMockConnection() + + const newAnalyzer = new Analyzer({ + console: connection.console, + parser, + workspaceFolder: FIXTURE_FOLDER, + }) + await newAnalyzer.initiateBackgroundAnalysis({ + backgroundAnalysisMaxFiles: defaultConfig.backgroundAnalysisMaxFiles, + globPattern: defaultConfig.globPattern, + }) + + // Parse the file without extension + newAnalyzer.analyze({ + uri: FIXTURE_URI.MISSING_EXTENSION, + document: FIXTURE_DOCUMENT.MISSING_EXTENSION, + }) + + const result = newAnalyzer.findAllSourcedUris({ uri: FIXTURE_URI.MISSING_EXTENSION }) + expect(result).toEqual( + new Set([ + `file://${FIXTURE_FOLDER}extension.inc`, + `file://${FIXTURE_FOLDER}issue101.sh`, + `file://${FIXTURE_FOLDER}sourcing.sh`, + ]), + ) + }) +}) + describe('wordAtPoint', () => { it('returns current word at a given point', () => { - analyzer.analyze(CURRENT_URI, FIXTURES.INSTALL) + analyzer.analyze({ uri: CURRENT_URI, document: FIXTURE_DOCUMENT.INSTALL }) expect(analyzer.wordAtPoint(CURRENT_URI, 25, 0)).toEqual(null) expect(analyzer.wordAtPoint(CURRENT_URI, 25, 1)).toEqual(null) expect(analyzer.wordAtPoint(CURRENT_URI, 25, 2)).toEqual(null) @@ -121,7 +279,7 @@ describe('wordAtPoint', () => { describe('commandNameAtPoint', () => { it('returns current command name at a given point', () => { - analyzer.analyze(CURRENT_URI, FIXTURES.INSTALL) + analyzer.analyze({ uri: CURRENT_URI, document: FIXTURE_DOCUMENT.INSTALL }) expect(analyzer.commandNameAtPoint(CURRENT_URI, 15, 0)).toEqual(null) expect(analyzer.commandNameAtPoint(CURRENT_URI, 20, 2)).toEqual('curl') @@ -137,13 +295,28 @@ describe('commandNameAtPoint', () => { }) }) -describe('findSymbolCompletions', () => { - it('return a list of symbols across the workspace', () => { - analyzer.analyze('install.sh', FIXTURES.INSTALL) - analyzer.analyze('sourcing-sh', FIXTURES.SOURCING) +describe('findSymbolsMatchingWord', () => { + it('return a list of symbols across the workspace when includeAllWorkspaceSymbols is true', async () => { + const parser = await initializeParser() + const connection = getMockConnection() + + const analyzer = new Analyzer({ + console: connection.console, + parser, + includeAllWorkspaceSymbols: true, + workspaceFolder: FIXTURE_FOLDER, + }) + await analyzer.initiateBackgroundAnalysis({ + backgroundAnalysisMaxFiles: defaultConfig.backgroundAnalysisMaxFiles, + globPattern: defaultConfig.globPattern, + }) expect( - analyzer.findSymbolsMatchingWord({ word: 'npm_config_logl', exactMatch: false }), + analyzer.findSymbolsMatchingWord({ + word: 'npm_config_logl', + uri: FIXTURE_URI.INSTALL, + exactMatch: false, + }), ).toMatchInlineSnapshot(` Array [ Object { @@ -159,7 +332,7 @@ describe('findSymbolCompletions', () => { "line": 40, }, }, - "uri": "dummy-uri.sh", + "uri": "file://${FIXTURE_FOLDER}install.sh", }, "name": "npm_config_loglevel", }, @@ -176,93 +349,167 @@ describe('findSymbolCompletions', () => { "line": 48, }, }, - "uri": "dummy-uri.sh", + "uri": "file://${FIXTURE_FOLDER}install.sh", }, "name": "npm_config_loglevel", }, + ] + `) + + expect( + analyzer.findSymbolsMatchingWord({ + word: 'xxxxxxxx', + uri: FIXTURE_URI.INSTALL, + exactMatch: false, + }), + ).toMatchInlineSnapshot(`Array []`) + + expect( + analyzer.findSymbolsMatchingWord({ + word: 'BLU', + uri: FIXTURE_URI.INSTALL, + exactMatch: false, + }), + ).toMatchInlineSnapshot(` + Array [ Object { "kind": 13, "location": Object { "range": Object { "end": Object { - "character": 27, - "line": 40, + "character": 19, + "line": 6, }, "start": Object { "character": 0, - "line": 40, + "line": 6, }, }, - "uri": "install.sh", + "uri": "file://${FIXTURE_FOLDER}extension.inc", }, - "name": "npm_config_loglevel", + "name": "BLUE", }, + ] + `) + + expect( + analyzer.findSymbolsMatchingWord({ + word: 'BLU', + uri: FIXTURE_URI.SOURCING, + exactMatch: false, + }), + ).toMatchInlineSnapshot(` + Array [ Object { "kind": 13, "location": Object { "range": Object { "end": Object { - "character": 31, - "line": 48, + "character": 19, + "line": 6, }, "start": Object { - "character": 2, - "line": 48, + "character": 0, + "line": 6, }, }, - "uri": "install.sh", + "uri": "file://${FIXTURE_FOLDER}extension.inc", }, - "name": "npm_config_loglevel", + "name": "BLUE", }, ] `) + }) + + it('return a list of symbols accessible to the uri when includeAllWorkspaceSymbols is false', async () => { + const parser = await initializeParser() + const connection = getMockConnection() + + const analyzer = new Analyzer({ + console: connection.console, + parser, + includeAllWorkspaceSymbols: false, + workspaceFolder: FIXTURE_FOLDER, + }) + await analyzer.initiateBackgroundAnalysis({ + backgroundAnalysisMaxFiles: defaultConfig.backgroundAnalysisMaxFiles, + globPattern: defaultConfig.globPattern, + }) expect( - analyzer.findSymbolsMatchingWord({ word: 'xxxxxxxx', exactMatch: false }), + analyzer.findSymbolsMatchingWord({ + word: 'BLU', + uri: FIXTURE_URI.INSTALL, + exactMatch: false, + }), ).toMatchInlineSnapshot(`Array []`) expect( - analyzer.findSymbolsMatchingWord({ word: 'BLU', exactMatch: false }), - ).toMatchInlineSnapshot(`Array []`) + analyzer.findSymbolsMatchingWord({ + word: 'BLU', + uri: FIXTURE_URI.SOURCING, + exactMatch: false, + }), + ).toMatchInlineSnapshot(` + Array [ + Object { + "kind": 13, + "location": Object { + "range": Object { + "end": Object { + "character": 19, + "line": 6, + }, + "start": Object { + "character": 0, + "line": 6, + }, + }, + "uri": "file://${FIXTURE_FOLDER}extension.inc", + }, + "name": "BLUE", + }, + ] + `) }) }) describe('commentsAbove', () => { it('returns a string of a comment block above a line', () => { - analyzer.analyze(CURRENT_URI, FIXTURES.COMMENT_DOC) + analyzer.analyze({ uri: CURRENT_URI, document: FIXTURE_DOCUMENT.COMMENT_DOC }) expect(analyzer.commentsAbove(CURRENT_URI, 22)).toEqual( '```txt\ndoc for func_one\n```', ) }) it('handles line breaks in comments', () => { - analyzer.analyze(CURRENT_URI, FIXTURES.COMMENT_DOC) + analyzer.analyze({ uri: CURRENT_URI, document: FIXTURE_DOCUMENT.COMMENT_DOC }) expect(analyzer.commentsAbove(CURRENT_URI, 28)).toEqual( '```txt\ndoc for func_two\nhas two lines\n```', ) }) it('only returns connected comments', () => { - analyzer.analyze(CURRENT_URI, FIXTURES.COMMENT_DOC) + analyzer.analyze({ uri: CURRENT_URI, document: FIXTURE_DOCUMENT.COMMENT_DOC }) expect(analyzer.commentsAbove(CURRENT_URI, 36)).toEqual( '```txt\ndoc for func_three\n```', ) }) it('returns null if no comment found', () => { - analyzer.analyze(CURRENT_URI, FIXTURES.COMMENT_DOC) + analyzer.analyze({ uri: CURRENT_URI, document: FIXTURE_DOCUMENT.COMMENT_DOC }) expect(analyzer.commentsAbove(CURRENT_URI, 45)).toEqual(null) }) it('works for variables', () => { - analyzer.analyze(CURRENT_URI, FIXTURES.COMMENT_DOC) + analyzer.analyze({ uri: CURRENT_URI, document: FIXTURE_DOCUMENT.COMMENT_DOC }) expect(analyzer.commentsAbove(CURRENT_URI, 42)).toEqual( '```txt\nworks for variables\n```', ) }) it('returns connected comments with empty comment line', () => { - analyzer.analyze(CURRENT_URI, FIXTURES.COMMENT_DOC) + analyzer.analyze({ uri: CURRENT_URI, document: FIXTURE_DOCUMENT.COMMENT_DOC }) expect(analyzer.commentsAbove(CURRENT_URI, 51)).toEqual( '```txt\nthis is also included\n\ndoc for func_four\n```', ) @@ -277,11 +524,14 @@ describe('initiateBackgroundAnalysis', () => { const connection = getMockConnection() - const newAnalyzer = new Analyzer({ console: connection.console, parser }) + const newAnalyzer = new Analyzer({ + console: connection.console, + parser, + workspaceFolder: FIXTURE_FOLDER, + }) const { filesParsed } = await newAnalyzer.initiateBackgroundAnalysis({ backgroundAnalysisMaxFiles: defaultConfig.backgroundAnalysisMaxFiles, globPattern: defaultConfig.globPattern, - rootPath: FIXTURE_FOLDER, }) expect(connection.window.showWarningMessage).not.toHaveBeenCalled() @@ -305,11 +555,14 @@ describe('initiateBackgroundAnalysis', () => { const connection = getMockConnection() - const newAnalyzer = new Analyzer({ console: connection.console, parser }) + const newAnalyzer = new Analyzer({ + console: connection.console, + parser, + workspaceFolder: FIXTURE_FOLDER, + }) const { filesParsed } = await newAnalyzer.initiateBackgroundAnalysis({ backgroundAnalysisMaxFiles: defaultConfig.backgroundAnalysisMaxFiles, globPattern: defaultConfig.globPattern, - rootPath: FIXTURE_FOLDER, }) expect(connection.window.showWarningMessage).not.toHaveBeenCalled() @@ -324,11 +577,14 @@ describe('initiateBackgroundAnalysis', () => { const connection = getMockConnection() - const newAnalyzer = new Analyzer({ console: connection.console, parser }) + const newAnalyzer = new Analyzer({ + console: connection.console, + parser, + workspaceFolder: FIXTURE_FOLDER, + }) const { filesParsed } = await newAnalyzer.initiateBackgroundAnalysis({ backgroundAnalysisMaxFiles: 0, globPattern: defaultConfig.globPattern, - rootPath: FIXTURE_FOLDER, }) expect(connection.window.showWarningMessage).not.toHaveBeenCalled() @@ -337,3 +593,147 @@ describe('initiateBackgroundAnalysis', () => { expect(filesParsed).toEqual(0) }) }) + +describe('getAllVariableSymbols', () => { + it('returns all variable symbols', async () => { + const document = FIXTURE_DOCUMENT.SOURCING + const { uri } = document + + const parser = await initializeParser() + const connection = getMockConnection() + + const newAnalyzer = new Analyzer({ + console: connection.console, + parser, + workspaceFolder: REPO_ROOT_FOLDER, + }) + // NOTE: no background analysis + + newAnalyzer.analyze({ uri, document }) + + expect(newAnalyzer.getAllVariableSymbols({ uri })).toMatchInlineSnapshot(` + Array [ + Object { + "kind": 13, + "location": Object { + "range": Object { + "end": Object { + "character": 16, + "line": 10, + }, + "start": Object { + "character": 0, + "line": 10, + }, + }, + "uri": "file://${FIXTURE_FOLDER}sourcing.sh", + }, + "name": "BOLD", + }, + Object { + "kind": 13, + "location": Object { + "range": Object { + "end": Object { + "character": 18, + "line": 4, + }, + "start": Object { + "character": 0, + "line": 4, + }, + }, + "uri": "file://${FIXTURE_FOLDER}extension.inc", + }, + "name": "RED", + }, + Object { + "kind": 13, + "location": Object { + "range": Object { + "end": Object { + "character": 20, + "line": 5, + }, + "start": Object { + "character": 0, + "line": 5, + }, + }, + "uri": "file://${FIXTURE_FOLDER}extension.inc", + }, + "name": "GREEN", + }, + Object { + "kind": 13, + "location": Object { + "range": Object { + "end": Object { + "character": 19, + "line": 6, + }, + "start": Object { + "character": 0, + "line": 6, + }, + }, + "uri": "file://${FIXTURE_FOLDER}extension.inc", + }, + "name": "BLUE", + }, + Object { + "kind": 13, + "location": Object { + "range": Object { + "end": Object { + "character": 16, + "line": 7, + }, + "start": Object { + "character": 0, + "line": 7, + }, + }, + "uri": "file://${FIXTURE_FOLDER}extension.inc", + }, + "name": "BOLD", + }, + Object { + "kind": 13, + "location": Object { + "range": Object { + "end": Object { + "character": 17, + "line": 8, + }, + "start": Object { + "character": 0, + "line": 8, + }, + }, + "uri": "file://${FIXTURE_FOLDER}extension.inc", + }, + "name": "RESET", + }, + Object { + "containerName": "tagRelease", + "kind": 13, + "location": Object { + "range": Object { + "end": Object { + "character": 8, + "line": 5, + }, + "start": Object { + "character": 2, + "line": 5, + }, + }, + "uri": "file://${REPO_ROOT_FOLDER}/scripts/tag-release.inc", + }, + "name": "tag", + }, + ] + `) + }) +}) diff --git a/server/src/__tests__/config.test.ts b/server/src/__tests__/config.test.ts index 938a853a8..82be2125c 100644 --- a/server/src/__tests__/config.test.ts +++ b/server/src/__tests__/config.test.ts @@ -1,23 +1,27 @@ import { ConfigSchema, getConfigFromEnvironmentVariables } from '../config' describe('ConfigSchema', () => { - it('parses an object', () => { + it('returns a default', () => { expect(ConfigSchema.parse({})).toMatchInlineSnapshot(` Object { "backgroundAnalysisMaxFiles": 500, "explainshellEndpoint": "", "globPattern": "**/*@(.sh|.inc|.bash|.command)", "highlightParsingErrors": false, + "includeAllWorkspaceSymbols": false, "shellcheckArguments": Array [], "shellcheckPath": "shellcheck", } `) + }) + it('parses an object', () => { expect( ConfigSchema.parse({ backgroundAnalysisMaxFiles: 1, explainshellEndpoint: 'localhost:8080', globPattern: '**/*@(.sh)', highlightParsingErrors: true, + includeAllWorkspaceSymbols: true, shellcheckArguments: ' -e SC2001 -e SC2002 ', shellcheckPath: '', }), @@ -27,6 +31,7 @@ describe('ConfigSchema', () => { "explainshellEndpoint": "localhost:8080", "globPattern": "**/*@(.sh)", "highlightParsingErrors": true, + "includeAllWorkspaceSymbols": true, "shellcheckArguments": Array [ "-e", "SC2001", diff --git a/server/src/__tests__/server.test.ts b/server/src/__tests__/server.test.ts index 33aef8c4d..d55a98b8d 100644 --- a/server/src/__tests__/server.test.ts +++ b/server/src/__tests__/server.test.ts @@ -3,18 +3,25 @@ import * as Path from 'path' import * as LSP from 'vscode-languageserver/node' import { CodeAction } from 'vscode-languageserver/node' -import FIXTURE_DOCUMENT, { FIXTURE_FOLDER, FIXTURE_URI } from '../../../testing/fixtures' +import { + FIXTURE_DOCUMENT, + FIXTURE_FOLDER, + FIXTURE_URI, + REPO_ROOT_FOLDER, +} from '../../../testing/fixtures' import { getMockConnection } from '../../../testing/mocks' import LspServer from '../server' import { CompletionItemDataType } from '../types' -async function initializeServer() { +async function initializeServer( + { rootPath }: { rootPath?: string } = { rootPath: FIXTURE_FOLDER }, +) { const diagnostics: Array<LSP.PublishDiagnosticsParams | undefined> = [] const connection = getMockConnection() const server = await LspServer.initialize(connection, { - rootPath: FIXTURE_FOLDER, + rootPath, rootUri: null, processId: 42, capabilities: {} as any, @@ -102,10 +109,94 @@ describe('server', () => { ) expect(result).toBeDefined() - expect(result).toEqual({ - contents: - '### Function: **hello_world** - *defined on line 8*\n\n```txt\nthis is a comment\ndescribing the function\nhello_world\nthis function takes two arguments\n```', - }) + expect(result).toMatchInlineSnapshot(` + Object { + "contents": Object { + "kind": "markdown", + "value": "Function: **hello_world** - *defined on line 8* + + \`\`\`txt + this is a comment + describing the function + hello_world + this function takes two arguments + \`\`\`", + }, + } + `) + }) + + it('responds to onDefinition', async () => { + const { connection } = await initializeServer() + + const onDefinition = connection.onDefinition.mock.calls[0][0] + + const result = await onDefinition( + { + textDocument: { + uri: FIXTURE_URI.SOURCING, + }, + position: { character: 10, line: 2 }, + }, + {} as any, + {} as any, + ) + + expect(result).toMatchInlineSnapshot(` + Array [ + Object { + "range": Object { + "end": Object { + "character": 0, + "line": 0, + }, + "start": Object { + "character": 0, + "line": 0, + }, + }, + "uri": "file://${FIXTURE_FOLDER}extension.inc", + }, + ] + `) + }) + + it('responds to onDocumentSymbol', async () => { + const { connection } = await initializeServer() + + const onDocumentSymbol = connection.onDocumentSymbol.mock.calls[0][0] + + const result = await onDocumentSymbol( + { + textDocument: { + uri: FIXTURE_URI.SOURCING, + }, + }, + {} as any, + {} as any, + ) + + expect(result).toMatchInlineSnapshot(` + Array [ + Object { + "kind": 13, + "location": Object { + "range": Object { + "end": Object { + "character": 16, + "line": 10, + }, + "start": Object { + "character": 0, + "line": 10, + }, + }, + "uri": "file://${FIXTURE_FOLDER}sourcing.sh", + }, + "name": "BOLD", + }, + ] + `) }) it('responds to onDocumentHighlight', async () => { @@ -411,7 +502,10 @@ describe('server', () => { "name": "BLUE", "type": 3, }, - "documentation": "### Variable: **BLUE** - *defined in ../extension.inc*", + "documentation": Object { + "kind": "markdown", + "value": "Variable: **BLUE** - *defined in extension.inc*", + }, "kind": 6, "label": "BLUE", }, @@ -440,11 +534,14 @@ describe('server', () => { "name": "add_a_user", "type": 3, }, - "documentation": "### Function: **add_a_user** - *defined in ../issue101.sh* + "documentation": Object { + "kind": "markdown", + "value": "Function: **add_a_user** - *defined in issue101.sh* \`\`\`txt Helper function to add a user \`\`\`", + }, "kind": 3, "label": "add_a_user", }, @@ -488,7 +585,7 @@ describe('server', () => { }) it('responds to onCompletion with all variables when starting to expand parameters', async () => { - const { connection } = await initializeServer() + const { connection } = await initializeServer({ rootPath: REPO_ROOT_FOLDER }) const onCompletion = connection.onCompletion.mock.calls[0][0] @@ -511,6 +608,119 @@ describe('server', () => { expect(Array.from(new Set(result.map((item: any) => item.kind)))).toEqual([ LSP.CompletionItemKind.Variable, ]) + expect(result).toMatchInlineSnapshot(` + Array [ + Object { + "data": Object { + "name": "BOLD", + "type": 3, + }, + "documentation": undefined, + "kind": 6, + "label": "BOLD", + }, + Object { + "data": Object { + "name": "RED", + "type": 3, + }, + "documentation": Object { + "kind": "markdown", + "value": "Variable: **RED** - *defined in extension.inc*", + }, + "kind": 6, + "label": "RED", + }, + Object { + "data": Object { + "name": "GREEN", + "type": 3, + }, + "documentation": Object { + "kind": "markdown", + "value": "Variable: **GREEN** - *defined in extension.inc*", + }, + "kind": 6, + "label": "GREEN", + }, + Object { + "data": Object { + "name": "BLUE", + "type": 3, + }, + "documentation": Object { + "kind": "markdown", + "value": "Variable: **BLUE** - *defined in extension.inc*", + }, + "kind": 6, + "label": "BLUE", + }, + Object { + "data": Object { + "name": "RESET", + "type": 3, + }, + "documentation": Object { + "kind": "markdown", + "value": "Variable: **RESET** - *defined in extension.inc*", + }, + "kind": 6, + "label": "RESET", + }, + Object { + "data": Object { + "name": "USER", + "type": 3, + }, + "documentation": Object { + "kind": "markdown", + "value": "Variable: **USER** - *defined in issue101.sh*", + }, + "kind": 6, + "label": "USER", + }, + Object { + "data": Object { + "name": "PASSWORD", + "type": 3, + }, + "documentation": Object { + "kind": "markdown", + "value": "Variable: **PASSWORD** - *defined in issue101.sh*", + }, + "kind": 6, + "label": "PASSWORD", + }, + Object { + "data": Object { + "name": "COMMENTS", + "type": 3, + }, + "documentation": Object { + "kind": "markdown", + "value": "Variable: **COMMENTS** - *defined in issue101.sh* + + \`\`\`txt + Having shifted twice, the rest is now comments ... + \`\`\`", + }, + "kind": 6, + "label": "COMMENTS", + }, + Object { + "data": Object { + "name": "tag", + "type": 3, + }, + "documentation": Object { + "kind": "markdown", + "value": "Variable: **tag** - *defined in ../../scripts/tag-release.inc*", + }, + "kind": 6, + "label": "tag", + }, + ] + `) }) it('responds to onCodeAction', async () => { diff --git a/server/src/analyser.ts b/server/src/analyser.ts index f2e9f2098..150448dc3 100644 --- a/server/src/analyser.ts +++ b/server/src/analyser.ts @@ -11,6 +11,7 @@ import * as Parser from 'web-tree-sitter' import { flattenArray, flattenObjectValues } from './util/flatten' import { getFilePaths } from './util/fs' import { analyzeShebang } from './util/shebang' +import * as sourcing from './util/sourcing' import * as TreeSitterUtil from './util/tree-sitter' const readFileAsync = promisify(fs.readFile) @@ -36,6 +37,7 @@ export default class Analyzer { // We need this to find the word at a given point etc. private uriToFileContent: Texts = {} private uriToDeclarations: FileDeclarations = {} + private uriToSourcedUris: { [uri: string]: Set<string> } = {} private treeSitterTypeToLSPKind: Kinds = { // These keys are using underscores as that's the naming convention in tree-sitter. environment_variable_assignment: LSP.SymbolKind.Variable, @@ -43,30 +45,45 @@ export default class Analyzer { variable_assignment: LSP.SymbolKind.Variable, } + private includeAllWorkspaceSymbols: boolean + private workspaceFolder: string | null + public constructor({ console, + includeAllWorkspaceSymbols = false, parser, + workspaceFolder, }: { console: LSP.RemoteConsole + includeAllWorkspaceSymbols?: boolean parser: Parser + workspaceFolder: string | null }) { - this.parser = parser this.console = console + this.includeAllWorkspaceSymbols = includeAllWorkspaceSymbols + this.parser = parser + this.workspaceFolder = workspaceFolder } /** - * Initiates a background analysis of the files in the given rootPath to + * Initiates a background analysis of the files in the workspaceFolder to * enable features across files. + * + * NOTE: if the source aware feature works well, we can likely remove this and + * rely on parsing files as they are sourced. */ public async initiateBackgroundAnalysis({ backgroundAnalysisMaxFiles, globPattern, - rootPath, }: { backgroundAnalysisMaxFiles: number globPattern: string - rootPath: string }): Promise<{ filesParsed: number }> { + const rootPath = this.workspaceFolder + if (!rootPath) { + return { filesParsed: 0 } + } + if (backgroundAnalysisMaxFiles <= 0) { this.console.log( `BackgroundAnalysis: skipping as backgroundAnalysisMaxFiles was 0...`, @@ -115,7 +132,10 @@ export default class Analyzer { continue } - this.analyze(uri, TextDocument.create(uri, 'shell', 1, fileContent)) + this.analyze({ + document: TextDocument.create(uri, 'shell', 1, fileContent), + uri, + }) } catch (error) { const errorMessage = error instanceof Error ? error.message : error this.console.warn( @@ -133,13 +153,39 @@ export default class Analyzer { /** * Find all the locations where something has been defined. */ - public findDefinition({ word }: { word: string }): LSP.Location[] { - const symbols: LSP.SymbolInformation[] = [] - Object.keys(this.uriToDeclarations).forEach((uri) => { - const declarationNames = this.uriToDeclarations[uri][word] || [] - declarationNames.forEach((d) => symbols.push(d)) - }) - return symbols.map((s) => s.location) + public findDefinition({ + position, + uri, + word, + }: { + position?: { line: number; character: number } + uri: string + word: string + }): LSP.Location[] { + const tree = this.uriToTreeSitterTrees[uri] + if (position && tree) { + // NOTE: when a word is a file path to a sourced file, we return a location it. + const sourcedLocation = sourcing.getSourcedLocation({ + position, + rootPath: this.workspaceFolder, + tree, + uri, + word, + }) + if (sourcedLocation) { + return [sourcedLocation] + } + } + + const fileDeclarations = this.getReachableUriToDeclarations({ uri }) + + return Object.keys(fileDeclarations) + .reduce((symbols, uri) => { + const declarationNames = fileDeclarations[uri][word] || [] + declarationNames.forEach((d) => symbols.push(d)) + return symbols + }, [] as LSP.SymbolInformation[]) + .map((symbol) => symbol.location) } /** @@ -261,29 +307,29 @@ export default class Analyzer { /** * Find symbol completions for the given word. - * - * TODO: if a file is not included we probably shouldn't include it declarations from it. */ public findSymbolsMatchingWord({ exactMatch, + uri, word, }: { exactMatch: boolean + uri: string word: string }): LSP.SymbolInformation[] { - const symbols: LSP.SymbolInformation[] = [] + const fileDeclarations = this.getReachableUriToDeclarations({ uri }) - Object.keys(this.uriToDeclarations).forEach((uri) => { - const declarationsInFile = this.uriToDeclarations[uri] || {} + return Object.keys(fileDeclarations).reduce((symbols, uri) => { + const declarationsInFile = fileDeclarations[uri] Object.keys(declarationsInFile).map((name) => { const match = exactMatch ? name === word : name.startsWith(word) if (match) { declarationsInFile[name].forEach((symbol) => symbols.push(symbol)) } }) - }) - return symbols + return symbols + }, [] as LSP.SymbolInformation[]) } /** @@ -293,18 +339,33 @@ export default class Analyzer { * Returns all, if any, syntax errors that occurred while parsing the file. * */ - public analyze(uri: string, document: TextDocument): LSP.Diagnostic[] { + public analyze({ + document, + uri, // NOTE: we don't use document.uri to make testing easier + }: { + document: TextDocument + uri: string + }): LSP.Diagnostic[] { const contents = document.getText() const tree = this.parser.parse(contents) + // TODO: would be nicer to save one map from uri to object containing all + // these fields. this.uriToTextDocument[uri] = document this.uriToTreeSitterTrees[uri] = tree this.uriToDeclarations[uri] = {} this.uriToFileContent[uri] = contents + this.uriToSourcedUris[uri] = sourcing.getSourcedUris({ + fileContent: contents, + fileUri: uri, + rootPath: this.workspaceFolder, + tree, + }) const problems: LSP.Diagnostic[] = [] + // TODO: move this somewhere TreeSitterUtil.forEach(tree.rootNode, (n: Parser.SyntaxNode) => { if (n.type === 'ERROR') { problems.push( @@ -369,6 +430,29 @@ export default class Analyzer { return problems } + public findAllSourcedUris({ uri }: { uri: string }): Set<string> { + const allSourcedUris = new Set<string>([]) + + const addSourcedFilesFromUri = (fromUri: string) => { + const sourcedUris = this.uriToSourcedUris[fromUri] + + if (!sourcedUris) { + return + } + + sourcedUris.forEach((sourcedUri) => { + if (!allSourcedUris.has(sourcedUri)) { + allSourcedUris.add(sourcedUri) + addSourcedFilesFromUri(sourcedUri) + } + }) + } + + addSourcedFilesFromUri(uri) + + return allSourcedUris + } + /** * Find the node at the given point. */ @@ -474,23 +558,57 @@ export default class Analyzer { return null } - public getAllVariableSymbols(): LSP.SymbolInformation[] { - return this.getAllSymbols().filter( + public getAllVariableSymbols({ uri }: { uri: string }): LSP.SymbolInformation[] { + return this.getAllSymbols({ uri }).filter( (symbol) => symbol.kind === LSP.SymbolKind.Variable, ) } - private getAllSymbols(): LSP.SymbolInformation[] { - // NOTE: this could be cached, it takes < 1 ms to generate for a project with 250 bash files... - const symbols: LSP.SymbolInformation[] = [] + public setIncludeAllWorkspaceSymbols(includeAllWorkspaceSymbols: boolean): void { + this.includeAllWorkspaceSymbols = includeAllWorkspaceSymbols + } + + private getReachableUriToDeclarations({ + uri: fromUri, + }: { uri?: string } = {}): FileDeclarations { + if (!fromUri || this.includeAllWorkspaceSymbols) { + return this.uriToDeclarations + } - Object.keys(this.uriToDeclarations).forEach((uri) => { - Object.keys(this.uriToDeclarations[uri]).forEach((name) => { + const uris = [fromUri, ...Array.from(this.findAllSourcedUris({ uri: fromUri }))] + + // for each uri (sourced and base file) we find the declarations + return uris.reduce((fileDeclarations, uri) => { + if (!this.uriToDeclarations[uri]) { + // Either the background analysis didn't run or the file is outside + // the workspace. Let us try to analyze the file. + try { + const fileContent = fs.readFileSync(new URL(uri), 'utf8') + this.analyze({ + document: TextDocument.create(uri, 'shell', 1, fileContent), + uri, + }) + } catch (err) { + this.console.log(`Error while analyzing sourced file ${uri}: ${err}`) + } + } + + fileDeclarations[uri] = this.uriToDeclarations[uri] || {} + return fileDeclarations + }, {} as FileDeclarations) + } + + private getAllSymbols({ uri }: { uri?: string } = {}): LSP.SymbolInformation[] { + const fileDeclarations = this.getReachableUriToDeclarations({ uri }) + + return Object.keys(fileDeclarations).reduce((symbols, uri) => { + const declarationsInFile = fileDeclarations[uri] + Object.keys(declarationsInFile).forEach((name) => { const declarationNames = this.uriToDeclarations[uri][name] || [] declarationNames.forEach((d) => symbols.push(d)) }) - }) - return symbols + return symbols + }, [] as LSP.SymbolInformation[]) } } diff --git a/server/src/config.ts b/server/src/config.ts index 9e9beadee..9023e79ac 100644 --- a/server/src/config.ts +++ b/server/src/config.ts @@ -2,18 +2,23 @@ import { z } from 'zod' export const ConfigSchema = z .object({ + // Maximum number of files to analyze in the background. Set to 0 to disable background analysis. + backgroundAnalysisMaxFiles: z.number().int().min(0).default(500), + // Glob pattern for finding and parsing shell script files in the workspace. Used by the background analysis features across files. globPattern: z.string().trim().default('**/*@(.sh|.inc|.bash|.command)'), - // Controls if Treesitter parsing errors will be highlighted as problems. - highlightParsingErrors: z.boolean().default(false), - // Configure explainshell server endpoint in order to get hover documentation on flags and options. // And empty string will disable the feature. explainshellEndpoint: z.string().trim().default(''), - // Controls the executable used for ShellCheck linting information. An empty string will disable linting. - shellcheckPath: z.string().trim().default('shellcheck'), + // Controls if Treesitter parsing errors will be highlighted as problems. + highlightParsingErrors: z.boolean().default(false), + + // Controls how symbols (e.g. variables and functions) are included and used for completion and documentation. + // If false, then we only include symbols from sourced files (i.e. using non dynamic statements like 'source file.sh' or '. file.sh'). + // If true, then all symbols from the workspace are included. + includeAllWorkspaceSymbols: z.boolean().default(false), // Additional ShellCheck arguments. Note that we already add the following arguments: --shell, --format, --external-sources." shellcheckArguments: z @@ -31,8 +36,8 @@ export const ConfigSchema = z }, z.array(z.string())) .default([]), - // Maximum number of files to analyze in the background. Set to 0 to disable background analysis. - backgroundAnalysisMaxFiles: z.number().int().min(0).default(500), + // Controls the executable used for ShellCheck linting information. An empty string will disable linting. + shellcheckPath: z.string().trim().default('shellcheck'), }) .strict() @@ -42,18 +47,14 @@ export function getConfigFromEnvironmentVariables(): { config: z.infer<typeof ConfigSchema> environmentVariablesUsed: string[] } { - const { HIGHLIGHT_PARSING_ERRORS } = process.env - const rawConfig = { - globPattern: process.env.GLOB_PATTERN, - highlightParsingErrors: - typeof HIGHLIGHT_PARSING_ERRORS !== 'undefined' - ? toBoolean(HIGHLIGHT_PARSING_ERRORS) - : undefined, + backgroundAnalysisMaxFiles: process.env.BACKGROUND_ANALYSIS_MAX_FILES, explainshellEndpoint: process.env.EXPLAINSHELL_ENDPOINT, - shellcheckPath: process.env.SHELLCHECK_PATH, + globPattern: process.env.GLOB_PATTERN, + highlightParsingErrors: toBoolean(process.env.HIGHLIGHT_PARSING_ERRORS), + includeAllWorkspaceSymbols: toBoolean(process.env.INCLUDE_ALL_WORKSPACE_SYMBOLS), shellcheckArguments: process.env.SHELLCHECK_ARGUMENTS, - backgroundAnalysisMaxFiles: process.env.BACKGROUND_ANALYSIS_MAX_FILES, + shellcheckPath: process.env.SHELLCHECK_PATH, } const environmentVariablesUsed = Object.entries(rawConfig) @@ -69,4 +70,5 @@ export function getDefaultConfiguration(): z.infer<typeof ConfigSchema> { return ConfigSchema.parse({}) } -const toBoolean = (s: string): boolean => s === 'true' || s === '1' +const toBoolean = (s?: string): boolean | undefined => + typeof s !== 'undefined' ? s === 'true' || s === '1' : undefined diff --git a/server/src/server.ts b/server/src/server.ts index 45f0d113c..944cee957 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -1,5 +1,5 @@ -import * as Process from 'child_process' -import * as Path from 'path' +import { spawnSync } from 'child_process' +import * as path from 'path' import * as TurndownService from 'turndown' import { isDeepStrictEqual } from 'util' import * as LSP from 'vscode-languageserver/node' @@ -32,7 +32,7 @@ export default class BashServer { private documents: LSP.TextDocuments<TextDocument> = new LSP.TextDocuments(TextDocument) private executables: Executables private linter?: Linter - private workspaceFolder: string | null | undefined + private workspaceFolder: string | null private uriToCodeActions: { [uri: string]: CodeAction[] | undefined } = {} private constructor({ @@ -48,7 +48,7 @@ export default class BashServer { connection: LSP.Connection executables: Executables linter?: Linter - workspaceFolder: string | null | undefined + workspaceFolder: string | null }) { this.analyzer = analyzer this.clientCapabilities = capabilities @@ -69,14 +69,18 @@ export default class BashServer { ): // TODO: use workspaceFolders instead of rootPath Promise<BashServer> { const { PATH } = process.env - const workspaceFolder = rootUri || rootPath + const workspaceFolder = rootUri || rootPath || null if (!PATH) { throw new Error('Expected PATH environment variable to be set') } const parser = await initializeParser() - const analyzer = new Analyzer({ console: connection.console, parser }) + const analyzer = new Analyzer({ + console: connection.console, + parser, + workspaceFolder, + }) const executables = await Executables.fromPath(PATH) @@ -192,7 +196,6 @@ export default class BashServer { return this.analyzer.initiateBackgroundAnalysis({ globPattern: this.config.globPattern, backgroundAnalysisMaxFiles: this.config.backgroundAnalysisMaxFiles, - rootPath: workspaceFolder, }) } @@ -224,6 +227,10 @@ export default class BashServer { }) } + this.analyzer.setIncludeAllWorkspaceSymbols( + this.config.includeAllWorkspaceSymbols, + ) + return true } } catch (err) { @@ -243,7 +250,7 @@ export default class BashServer { let diagnostics: LSP.Diagnostic[] = [] // Load the tree for the modified contents into the analyzer: - const analyzeDiagnostics = this.analyzer.analyze(uri, document) + const analyzeDiagnostics = this.analyzer.analyze({ uri, document }) // Treesitter's diagnostics can be a bit inaccurate, so we only merge the // analyzer's diagnostics if the setting is enabled: if (this.config.highlightParsingErrors) { @@ -338,19 +345,24 @@ export default class BashServer { }: { symbol: LSP.SymbolInformation currentUri: string - }): string { + }): LSP.MarkupContent { const symbolUri = symbol.location.uri - const symbolStarLine = symbol.location.range.start.line + const symbolStartLine = symbol.location.range.start.line - const commentAboveSymbol = this.analyzer.commentsAbove(symbolUri, symbolStarLine) + const commentAboveSymbol = this.analyzer.commentsAbove(symbolUri, symbolStartLine) const symbolDocumentation = commentAboveSymbol ? `\n\n${commentAboveSymbol}` : '' - const hoverHeader = `### ${symbolKindToDescription(symbol.kind)}: **${symbol.name}**` + const hoverHeader = `${symbolKindToDescription(symbol.kind)}: **${symbol.name}**` const symbolLocation = symbolUri !== currentUri - ? `in ${Path.relative(currentUri, symbolUri)}` - : `on line ${symbolStarLine + 1}` + ? `in ${path.relative(path.dirname(currentUri), symbolUri)}` + : `on line ${symbolStartLine + 1}` + + // TODO: An improvement could be to add show the symbol definition in the hover instead + // of the defined location – similar to how VSCode works for languages like TypeScript. - return `${hoverHeader} - *defined ${symbolLocation}*${symbolDocumentation}` + return getMarkdownContent( + `${hoverHeader} - *defined ${symbolLocation}*${symbolDocumentation}`, + ) } private getCompletionItemsForSymbols({ @@ -422,12 +434,13 @@ export default class BashServer { ) { const shellDocumentation = await getShellDocumentation({ word }) if (shellDocumentation) { - return { contents: getMarkdownContent(shellDocumentation) } + return { contents: getMarkdownContent(shellDocumentation, 'man') } } } else { const symbolDocumentation = deduplicateSymbols({ symbols: this.analyzer.findSymbolsMatchingWord({ exactMatch: true, + uri: currentUri, word, }), currentUri, @@ -452,7 +465,11 @@ export default class BashServer { if (!word) { return null } - return this.analyzer.findDefinition({ word }) + return this.analyzer.findDefinition({ + position: params.position, + uri: params.textDocument.uri, + word, + }) } private onDocumentSymbol(params: LSP.DocumentSymbolParams): LSP.SymbolInformation[] { @@ -541,9 +558,10 @@ export default class BashServer { ? [] : this.getCompletionItemsForSymbols({ symbols: shouldCompleteOnVariables - ? this.analyzer.getAllVariableSymbols() + ? this.analyzer.getAllVariableSymbols({ uri: currentUri }) : this.analyzer.findSymbolsMatchingWord({ exactMatch: false, + uri: currentUri, word, }), currentUri, @@ -677,7 +695,7 @@ export default class BashServer { return documentation ? { ...item, - documentation: getMarkdownContent(documentation), + documentation: getMarkdownContent(documentation, 'man'), } : item } catch (error) { @@ -780,18 +798,19 @@ function symbolKindToDescription(s: LSP.SymbolKind): string { } } -const getMarkdownContent = (documentation: string): LSP.MarkupContent => ({ - value: ['``` man', documentation, '```'].join('\n'), - // Passed as markdown for syntax highlighting - kind: 'markdown' as const, -}) +function getMarkdownContent(documentation: string, language?: string): LSP.MarkupContent { + return { + value: language + ? // eslint-disable-next-line prefer-template + ['``` ' + language, documentation, '```'].join('\n') + : documentation, + kind: LSP.MarkupKind.Markdown, + } +} function getCommandOptions(name: string, word: string): string[] { // TODO: The options could be cached. - const options = Process.spawnSync(Path.join(__dirname, '../src/get-options.sh'), [ - name, - word, - ]) + const options = spawnSync(path.join(__dirname, '../src/get-options.sh'), [name, word]) if (options.status !== 0) { return [] diff --git a/server/src/util/__tests__/sourcing.test.ts b/server/src/util/__tests__/sourcing.test.ts new file mode 100644 index 000000000..4d2e62fa8 --- /dev/null +++ b/server/src/util/__tests__/sourcing.test.ts @@ -0,0 +1,94 @@ +import * as fs from 'fs' +import * as os from 'os' +import * as Parser from 'web-tree-sitter' + +import { initializeParser } from '../../parser' +import { getSourcedUris } from '../sourcing' + +const fileDirectory = '/Users/bash' +const fileUri = `${fileDirectory}/file.sh` + +let parser: Parser +beforeAll(async () => { + parser = await initializeParser() +}) + +jest.spyOn(fs, 'existsSync').mockImplementation(() => true) + +// mock os.homedir() to return a fixed path +jest.spyOn(os, 'homedir').mockImplementation(() => '/Users/bash-user') + +describe('getSourcedUris', () => { + it('returns an empty set if no files were sourced', () => { + const fileContent = '' + const result = getSourcedUris({ + fileContent, + fileUri, + rootPath: null, + tree: parser.parse(fileContent), + }) + expect(result).toEqual(new Set([])) + }) + + it('returns a set of sourced files', () => { + const fileContent = ` + source file-in-path.sh # does not contain a slash (i.e. is maybe somewhere on the path) + + source /bin/extension.inc # absolute path + + source ./x a b c # some arguments + + . ../scripts/release-client.sh + + source ~/myscript + + # source ... + + source "./issue206.sh" # quoted file in fixtures folder + + source "$LIBPATH" # dynamic imports not supported + + # conditional is currently not supported + if [[ -z $__COMPLETION_LIB_LOADED ]]; then source "$LIBPATH" ; fi + + show () + { + about 'Shows SVN proxy settings' + group 'proxy' + + echo "SVN Proxy Settings" + echo "==================" + python2 - <<END + import ConfigParser + source ./this-should-be-ignored.sh + END + } + + cat $f | python -c ' + import sys + source also-ignore.sh + ' | sort > $counts_file + fi + done + + ` + + const result = getSourcedUris({ + fileContent, + fileUri, + rootPath: null, + tree: parser.parse(fileContent), + }) + + expect(result).toMatchInlineSnapshot(` + Set { + "file:///Users/bash/file-in-path.sh", + "file:///bin/extension.inc", + "file:///Users/bash/x", + "file:///Users/scripts/release-client.sh", + "file:///Users/bash-user/myscript", + "file:///Users/bash/issue206.sh", + } + `) + }) +}) diff --git a/server/src/util/sourcing.ts b/server/src/util/sourcing.ts new file mode 100644 index 000000000..ff1266d10 --- /dev/null +++ b/server/src/util/sourcing.ts @@ -0,0 +1,159 @@ +import * as fs from 'fs' +import * as path from 'path' +import * as LSP from 'vscode-languageserver' +import * as Parser from 'web-tree-sitter' + +import { untildify } from './fs' + +// Until the grammar supports sourcing, we use this little regular expression +const SOURCING_STATEMENTS = /^(?:\t|[ ])*(?:source|[.])\s*(\S*)/ +const SOURCING_COMMANDS = ['source', '.'] + +/** + * Analysis the given file content and returns a set of URIs that are + * sourced. Note that the URIs are resolved. + */ +export function getSourcedUris({ + fileContent, + fileUri, + rootPath, + tree, +}: { + fileContent: string + fileUri: string + rootPath: string | null + tree: Parser.Tree +}): Set<string> { + const uris: Set<string> = new Set([]) + const rootPaths = [path.dirname(fileUri), rootPath].filter(Boolean) as string[] + + fileContent.split(/\r?\n/).forEach((line, lineIndex) => { + const match = line.match(SOURCING_STATEMENTS) + if (match) { + const [statement, word] = match + + if (tree.rootNode) { + const node = tree.rootNode.descendantForPosition({ + row: lineIndex, + column: statement.length - 2, + }) + if (['heredoc_body', 'raw_string'].includes(node?.type)) { + return + } + } + + const sourcedUri = getSourcedUri({ rootPaths, word }) + if (sourcedUri) { + uris.add(sourcedUri) + } + } + }) + + return uris +} + +/** + * Investigates if the given position is a path to a sourced file and maps it + * to a location. Useful for jump to definition. + * @returns an optional location + */ +export function getSourcedLocation({ + position, + rootPath, + tree, + uri, + word, +}: { + position: { line: number; character: number } + rootPath: string | null + tree: Parser.Tree + uri: string + word: string +}): LSP.Location | null { + // NOTE: when a word is a file path to a sourced file, we return a location to + // that file. + if (tree.rootNode) { + const node = tree.rootNode.descendantForPosition({ + row: position.line, + column: position.character, + }) + + if (!node || node.text.trim() !== word) { + throw new Error('Implementation error: word was not found at the given position') + } + + const isSourced = node.previousNamedSibling + ? SOURCING_COMMANDS.includes(node.previousNamedSibling.text.trim()) + : false + + const rootPaths = [path.dirname(uri), rootPath].filter(Boolean) as string[] + + const sourcedUri = isSourced ? getSourcedUri({ word, rootPaths }) : null + + if (sourcedUri) { + return LSP.Location.create(sourcedUri, LSP.Range.create(0, 0, 0, 0)) + } + } + + return null +} + +const stripQuotes = (path: string): string => { + const first = path[0] + const last = path[path.length - 1] + + if (first === last && [`"`, `'`].includes(first)) { + return path.slice(1, -1) + } + + return path +} + +/** + * Tries to parse the given path and returns a URI if possible. + * - Filters out dynamic sources + * - Converts a relative paths to absolute paths + * - Converts a tilde path to an absolute path + * - Resolves the path + * + * NOTE: for future improvements: + * "If filename does not contain a slash, file names in PATH are used to find + * the directory containing filename." (see https://ss64.com/osx/source.html) + */ +function getSourcedUri({ + rootPaths, + word, +}: { + rootPaths: string[] + word: string +}): string | null { + let unquotedPath = stripQuotes(word) + + if (unquotedPath.includes('$')) { + // NOTE: we don't support dynamic sourcing + return null + } + + if (unquotedPath.startsWith('~')) { + unquotedPath = untildify(unquotedPath) + } + + if (unquotedPath.startsWith('/')) { + if (fs.existsSync(unquotedPath)) { + return `file://${unquotedPath}` + } + return null + } + + // resolve relative path + for (const rootPath of rootPaths) { + const potentialPath = path.join(rootPath.replace('file://', ''), unquotedPath) + + // check if path is a file + if (fs.existsSync(potentialPath)) { + return `file://${potentialPath}` + } + } + + return null +} diff --git a/testing/fixtures.ts b/testing/fixtures.ts index f1ba38c35..628070087 100644 --- a/testing/fixtures.ts +++ b/testing/fixtures.ts @@ -19,6 +19,7 @@ export const FIXTURE_URI = { INSTALL: `file://${path.join(FIXTURE_FOLDER, 'install.sh')}`, ISSUE101: `file://${path.join(FIXTURE_FOLDER, 'issue101.sh')}`, ISSUE206: `file://${path.join(FIXTURE_FOLDER, 'issue206.sh')}`, + MISSING_EXTENSION: `file://${path.join(FIXTURE_FOLDER, 'extension')}`, MISSING_NODE: `file://${path.join(FIXTURE_FOLDER, 'missing-node.sh')}`, PARSE_PROBLEMS: `file://${path.join(FIXTURE_FOLDER, 'parse-problems.sh')}`, SOURCING: `file://${path.join(FIXTURE_FOLDER, 'sourcing.sh')}`, @@ -34,4 +35,4 @@ export const FIXTURE_DOCUMENT: Record<FIXTURE_KEY, TextDocument> = ( return acc }, {} as any) -export default FIXTURE_DOCUMENT +export const REPO_ROOT_FOLDER = path.resolve(path.join(FIXTURE_FOLDER, '../..')) diff --git a/testing/fixtures/extension b/testing/fixtures/extension index 81cf153fe..03312803f 100644 --- a/testing/fixtures/extension +++ b/testing/fixtures/extension @@ -1,3 +1,5 @@ #!/bin/sh +. sourcing.sh + echo "It works, but is not parsed initially" diff --git a/testing/fixtures/extension.inc b/testing/fixtures/extension.inc index 40c13ab44..73b151c1c 100644 --- a/testing/fixtures/extension.inc +++ b/testing/fixtures/extension.inc @@ -1,5 +1,7 @@ #!/bin/sh +source ./issue101.sh + RED=`tput setaf 1` GREEN=`tput setaf 2` BLUE=`tput setaf 4` diff --git a/testing/fixtures/sourcing.sh b/testing/fixtures/sourcing.sh index 4c090617f..981c0ff48 100644 --- a/testing/fixtures/sourcing.sh +++ b/testing/fixtures/sourcing.sh @@ -13,3 +13,7 @@ BOLD=`tput bold` # redefined echo $BOL echo "$" + +source ./scripts/tag-release.inc + +tagRelease '1.0.0' # FIXME: the "defined on" looks wierd diff --git a/vscode-client/package.json b/vscode-client/package.json index c2b3bd7f3..6ff8d4664 100644 --- a/vscode-client/package.json +++ b/vscode-client/package.json @@ -52,6 +52,11 @@ "default": false, "description": "Controls if Treesitter parsing errors will be highlighted as problems." }, + "bashIde.includeAllWorkspaceSymbols": { + "type": "boolean", + "default": false, + "description": "Controls how symbols (e.g. variables and functions) are included and used for completion and documentation. If false (default and recommended), then we only include symbols from sourced files (i.e. using non dynamic statements like 'source file.sh' or '. file.sh'). If true, then all symbols from the workspace are included." + }, "bashIde.shellcheckPath": { "type": "string", "default": "shellcheck",