mirror of
https://github.com/Myzel394/config-lsp.git
synced 2025-06-18 23:15:26 +02:00
Compare commits
101 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
66c93938fc | ||
e187040dd0 | |||
ac97ec77ef | |||
a2decaeff3 | |||
d5ec3694db | |||
5c6ca95912 | |||
6fe41b5040 | |||
0dda74c8cb | |||
d10655996f | |||
c578b4b448 | |||
3c9ee9da53 | |||
429c2cd4be | |||
9a1686a7d8 | |||
fa45351ec5 | |||
b520ada4ed | |||
![]() |
9b306f339e | ||
e1140ae757 | |||
9f22689cac | |||
e4d7521a4c | |||
c5fefad56d | |||
ef625f9bf6 | |||
db4e1bae4c | |||
5de2711b03 | |||
5e535741d2 | |||
ce7264aded | |||
e69edeaece | |||
15ce5958da | |||
3857bd5694 | |||
![]() |
e2c2fac98c | ||
25218161b9 | |||
1d7e746545 | |||
![]() |
7feb034a84 | ||
![]() |
7377d952c8 | ||
15471894d4 | |||
![]() |
8350458ae5 | ||
![]() |
d81f978771 | ||
![]() |
437985d72f | ||
![]() |
00976cec95 | ||
![]() |
f29bb12d84 | ||
![]() |
1cfb9bbfba | ||
![]() |
0b2690910f | ||
![]() |
b94d987565 | ||
![]() |
e14866bcdc | ||
![]() |
1227949f26 | ||
![]() |
d3f6122eef | ||
![]() |
ff9b5db18a | ||
![]() |
a0dca94b9d | ||
![]() |
020cc8ad67 | ||
![]() |
eb076dbf53 | ||
![]() |
36950fe271 | ||
![]() |
ba056d6ae9 | ||
![]() |
bf05d07fc9 | ||
![]() |
98f76fd839 | ||
![]() |
019726e28a | ||
![]() |
78ca195a9d | ||
![]() |
706b8137dd | ||
![]() |
dbf543db66 | ||
![]() |
e569516aae | ||
![]() |
0c827b04cd | ||
![]() |
3ac3ebbe50 | ||
![]() |
026e0349a1 | ||
![]() |
67c7f7f4b7 | ||
![]() |
5d03b4598c | ||
![]() |
dd6bcc4301 | ||
![]() |
c6ca63477c | ||
![]() |
091de78cb5 | ||
![]() |
50819894af | ||
![]() |
fe8a42d37b | ||
![]() |
3131023ef2 | ||
![]() |
cc82080cc1 | ||
![]() |
c2f7de5f13 | ||
![]() |
757f24a521 | ||
![]() |
e1ec57f576 | ||
![]() |
be93c874db | ||
![]() |
ea667653cd | ||
![]() |
f7a5556710 | ||
![]() |
4a3ab6e040 | ||
![]() |
558da3ba15 | ||
![]() |
66f8b36e74 | ||
![]() |
387c2f2b61 | ||
![]() |
d745b39f4b | ||
![]() |
a32900cef8 | ||
![]() |
fd5749f594 | ||
![]() |
610ca6a3c3 | ||
![]() |
4bd84b9774 | ||
![]() |
1f6ecaff90 | ||
![]() |
132efb0954 | ||
![]() |
fcdd70030f | ||
![]() |
e225910678 | ||
![]() |
88b9a51e95 | ||
![]() |
9d48d52509 | ||
![]() |
7cdf25cc3b | ||
![]() |
f56951cb8d | ||
![]() |
57ce6c1f4f | ||
![]() |
87c68de419 | ||
![]() |
9378392927 | ||
![]() |
ec4deeb59f | ||
![]() |
5798dd45f1 | ||
![]() |
750f3a97e1 | ||
![]() |
aa5f9a6630 | ||
![]() |
4e183d543c |
13
.deepsource.toml
Normal file
13
.deepsource.toml
Normal file
@ -0,0 +1,13 @@
|
||||
version = 1
|
||||
|
||||
[[analyzers]]
|
||||
name = "go"
|
||||
|
||||
[analyzers.meta]
|
||||
import_root = "github.com/Myzel394/config-lsp"
|
||||
|
||||
[[analyzers]]
|
||||
name = "javascript"
|
||||
|
||||
[[analyzers]]
|
||||
name = "shell"
|
18
.github/workflows/pr-tests.yaml
vendored
18
.github/workflows/pr-tests.yaml
vendored
@ -5,9 +5,6 @@ on: [pull_request]
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
go-version: [ '1.22.x' ]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
@ -16,9 +13,24 @@ jobs:
|
||||
with:
|
||||
github_access_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Check if project can be linted
|
||||
run: nix develop --command bash -c "just lint" && git diff --exit-code
|
||||
|
||||
- name: Check if antlr parsers are up to date
|
||||
run: nix develop --command bash -c "just update-antlr-parsers" && git diff --exit-code
|
||||
|
||||
- name: Check Nix flake
|
||||
run: nix flake check
|
||||
|
||||
- name: Build app
|
||||
run: nix develop --command bash -c "cd server && go build"
|
||||
|
||||
- name: Build VS Code extension
|
||||
run: nix build .#vs-code-extension
|
||||
|
||||
- name: Upload VS Code extension
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: vs-code-extension
|
||||
path: result/config-lsp-*.vsix
|
||||
|
||||
|
27
.github/workflows/release.yaml
vendored
27
.github/workflows/release.yaml
vendored
@ -1,4 +1,4 @@
|
||||
name: Build nightly release
|
||||
name: build and release
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
@ -29,7 +29,7 @@ jobs:
|
||||
- name: Check version in code matches flake version
|
||||
shell: bash
|
||||
run: |
|
||||
if ! [ $(grep '// CI:CD-VERSION$' server/root-handler/handler.go | cut -d'"' -f 2) = $(grep '# CI:CD-VERSION$' flake.nix | cut -d'"' -f 2) ];
|
||||
if ! [ $(grep '// CI:CD-VERSION$' server/root-handler/common.go | cut -d'"' -f 2) = $(grep '# CI:CD-VERSION$' flake.nix | cut -d'"' -f 2) ];
|
||||
then
|
||||
echo "Version mismatch between code and flake"
|
||||
exit 1
|
||||
@ -59,7 +59,7 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.GH_CONFIGLSP_TOKEN }}
|
||||
|
||||
build-extension:
|
||||
name: Build extension for ${{ matrix.target }}
|
||||
name: Build extension for ${{ matrix.action_name }}
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
# Wait for server to build so that we know the checks have passed
|
||||
@ -71,23 +71,29 @@ jobs:
|
||||
- goos: linux
|
||||
goarch: amd64
|
||||
vscode_target: linux-x64
|
||||
action_name: Linux x64
|
||||
- goos: linux
|
||||
goarch: arm64
|
||||
vscode_target: linux-arm64
|
||||
action_name: Linux ARM
|
||||
|
||||
- goos: darwin
|
||||
goarch: amd64
|
||||
vscode_target: darwin-x64
|
||||
action_name: macOS x64
|
||||
- goos: darwin
|
||||
goarch: arm64
|
||||
vscode_target: darwin-arm64
|
||||
action_name: macOS ARM
|
||||
|
||||
- goos: windows
|
||||
goarch: amd64
|
||||
vscode_target: win32-x64
|
||||
action_name: Windows x64
|
||||
- goos: windows
|
||||
goarch: arm64
|
||||
vscode_target: win32-arm64
|
||||
action_name: Windows ARM
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
@ -108,13 +114,21 @@ jobs:
|
||||
run: nix build .#"vs-code-extension-bare"
|
||||
|
||||
- name: Build extension
|
||||
if: ${{ matrix.goos != 'windows' }}
|
||||
run: cd server && GOOS=${{ matrix.goos }} GOARCH=${{ matrix.goarch }} go build -a -gcflags=all="-l -B" -ldflags="-s -w" -o config-lsp
|
||||
- name: Build extension
|
||||
if: ${{ matrix.goos == 'windows' }}
|
||||
run: cd server && GOOS=${{ matrix.goos }} GOARCH=${{ matrix.goarch }} go build -a -gcflags=all="-l -B" -ldflags="-s -w" -o config-lsp.exe
|
||||
|
||||
- name: Prepare folder
|
||||
run: cp -rL result dist && chmod -R 777 dist
|
||||
|
||||
- name: Move binary to extension
|
||||
if: ${{ matrix.goos != 'windows' }}
|
||||
run: mv server/config-lsp dist/out/
|
||||
- name: Move binary to extension
|
||||
if: ${{ matrix.goos == 'windows' }}
|
||||
run: mv server/config-lsp.exe dist/out/
|
||||
|
||||
- name: Shrink binary
|
||||
if: ${{ matrix.goos == 'linux' }}
|
||||
@ -130,11 +144,6 @@ jobs:
|
||||
with:
|
||||
files: '*.vsix'
|
||||
|
||||
- name: Set .env
|
||||
run: |
|
||||
touch .env
|
||||
echo "VSCE_PAT=${{ secrets.VSCE_PAT }}" >> .env
|
||||
|
||||
- name: Upload extension to VS Code Marketplace
|
||||
run: nix develop .#"vs-code-extension" --command bash -c "source .env && vsce publish --packagePath *.vsix -p ${{ secrets.VSCE_PAT }}"
|
||||
run: nix develop .#"vs-code-extension" --command bash -c "vsce publish --packagePath *.vsix -p ${{ secrets.VSCE_PAT }}"
|
||||
|
||||
|
@ -19,7 +19,7 @@ builds:
|
||||
dir: ./server
|
||||
|
||||
archives:
|
||||
- format: tar.gz
|
||||
- formats: [ 'tar.gz' ]
|
||||
# this name template makes the OS and Arch compatible with the results of `uname`.
|
||||
name_template: >-
|
||||
{{ .ProjectName }}_
|
||||
@ -31,7 +31,7 @@ archives:
|
||||
# use zip for windows archives
|
||||
format_overrides:
|
||||
- goos: windows
|
||||
format: zip
|
||||
formats: [ 'zip' ]
|
||||
|
||||
changelog:
|
||||
sort: asc
|
||||
|
53
README.md
53
README.md
@ -28,11 +28,52 @@ You are welcome to request any config file, as far as it's fairly well known.
|
||||
|
||||
## Installation
|
||||
|
||||
### VS Code Extension
|
||||
|
||||
[Install the extension from the marketplace](https://marketplace.visualstudio.com/items?itemName=myzel394.config-lsp)
|
||||
|
||||
Alternatively, you can also manually install the extension:
|
||||
|
||||
1. Download the latest extension version from the [release page](https://github.com/Myzel394/config-lsp/releases) - You can find the extension under the "assets" section. The filename ends with `.vsix`
|
||||
2. Open VS Code
|
||||
3. Open the extensions sidebar
|
||||
4. In the top bar, click on the three dots and select "Install from VSIX..."
|
||||
5. Select the just downloaded `.vsix` file
|
||||
6. You may need to restart VS Code
|
||||
7. Enjoy!
|
||||
|
||||
### Manual installation
|
||||
|
||||
To use `config-lsp` in any other editor, you'll need to install it manually.
|
||||
Don't worry, it's easy!
|
||||
|
||||
#### Installing the latest Binary
|
||||
|
||||
##### Brew
|
||||
|
||||
```sh
|
||||
brew install myzel394/formulae/config-lsp
|
||||
```
|
||||
|
||||
##### Manual Binary
|
||||
|
||||
Download the latest binary from the [releases page](https://github.com/Myzel394/config-lsp/releases) and put it in your PATH.
|
||||
|
||||
Follow the instructions for your editor below.
|
||||
##### Compiling
|
||||
|
||||
### Neovim installation
|
||||
You can either compile the binary using go:
|
||||
|
||||
```sh
|
||||
go build -o config-lsp
|
||||
```
|
||||
|
||||
or build it using Nix:
|
||||
|
||||
```sh
|
||||
nix flake build
|
||||
```
|
||||
|
||||
#### Neovim installation
|
||||
|
||||
Using [nvim-lspconfig](https://github.com/neovim/nvim-lspconfig) you can add `config-lsp` by adding the following to your `lsp.lua` (filename might differ):
|
||||
|
||||
@ -57,14 +98,6 @@ end
|
||||
lspconfig.config_lsp.setup {}
|
||||
`````
|
||||
|
||||
### VS Code installation
|
||||
|
||||
The VS Code extension is currently in development. An official extension will be released soon.
|
||||
|
||||
However, at the moment you can also compile the extension yourself and run it in development mode.
|
||||
|
||||
**Do not create an extension and publish it yourself. Contribute to the official extension instead.**
|
||||
|
||||
## Supporting config-lsp
|
||||
|
||||
You can either contribute to the project, [see CONTRIBUTING.md](CONTRIBUTING.md), or you can sponsor me via [GitHub Sponsors](https://github.com/sponsors/Myzel394) or via [crypto currencies](https://github.com/Myzel394/contact-me?tab=readme-ov-file#donations).
|
||||
|
18
flake.lock
generated
18
flake.lock
generated
@ -26,11 +26,11 @@
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1728509152,
|
||||
"narHash": "sha256-tQo1rg3TlwgyI8eHnLvZSlQx9d/o2Rb4oF16TfaTOw0=",
|
||||
"lastModified": 1742209644,
|
||||
"narHash": "sha256-jMy1XqXqD0/tJprEbUmKilTkvbDY/C0ZGSsJJH4TNCE=",
|
||||
"owner": "tweag",
|
||||
"repo": "gomod2nix",
|
||||
"rev": "d5547e530464c562324f171006fc8f639aa01c9f",
|
||||
"rev": "8f3534eb8f6c5c3fce799376dc3b91bae6b11884",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@ -41,11 +41,11 @@
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1728888510,
|
||||
"narHash": "sha256-nsNdSldaAyu6PE3YUA+YQLqUDJh+gRbBooMMekZJwvI=",
|
||||
"lastModified": 1742669843,
|
||||
"narHash": "sha256-G5n+FOXLXcRx+3hCJ6Rt6ZQyF1zqQ0DL0sWAMn2Nk0w=",
|
||||
"owner": "nixos",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "a3c0b3b21515f74fd2665903d4ce6bc4dc81c77c",
|
||||
"rev": "1e5b653dff12029333a6546c11e108ede13052eb",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@ -97,11 +97,11 @@
|
||||
"systems": "systems_2"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1726560853,
|
||||
"narHash": "sha256-X6rJYSESBVr3hBoH0WbKE5KvhPU5bloyZ2L4K60/fPQ=",
|
||||
"lastModified": 1731533236,
|
||||
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "c1dfcf08411b08f6b8615f7d8971a2bfa81d5e8a",
|
||||
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
47
flake.nix
47
flake.nix
@ -3,12 +3,12 @@
|
||||
|
||||
inputs = {
|
||||
nixpkgs.url = "github:nixos/nixpkgs?ref=nixos-unstable";
|
||||
utils.url = "github:numtide/flake-utils";
|
||||
gomod2nix = {
|
||||
url = "github:tweag/gomod2nix";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
inputs.utils.follows = "utils";
|
||||
};
|
||||
utils.url = "github:numtide/flake-utils";
|
||||
};
|
||||
|
||||
outputs = { self, nixpkgs, utils, gomod2nix }:
|
||||
@ -23,26 +23,27 @@
|
||||
"aarch64-windows"
|
||||
] (system:
|
||||
let
|
||||
version = "0.1.1"; # CI:CD-VERSION
|
||||
version = "0.2.2"; # CI:CD-VERSION
|
||||
pkgs = import nixpkgs {
|
||||
inherit system;
|
||||
overlays = [
|
||||
(final: prev: {
|
||||
go = prev.go_1_22;
|
||||
buildGoModule = prev.buildGo122Module;
|
||||
go = prev.go_1_24;
|
||||
buildGoModule = prev.buildGo124Module;
|
||||
})
|
||||
gomod2nix.overlays.default
|
||||
];
|
||||
};
|
||||
inputs = [
|
||||
pkgs.go_1_22
|
||||
pkgs.go_1_24
|
||||
];
|
||||
serverUncompressed = pkgs.buildGoModule {
|
||||
nativeBuildInputs = inputs;
|
||||
pname = "github.com/Myzel394/config-lsp";
|
||||
version = version;
|
||||
src = ./server;
|
||||
vendorHash = "sha256-eO1eY+2XuOCd/dKwgFtu05+bnn/Cv8ZbUIwRjCwJF+U=";
|
||||
vendorHash = "sha256-0/oMmrdQGnx7opL4SYaYU2FdroKkF60FtRTvZ1dYr/Y";
|
||||
proxyVendor = true;
|
||||
ldflags = [ "-s" "-w" ];
|
||||
checkPhase = ''
|
||||
go test -v $(pwd)/...
|
||||
@ -68,6 +69,7 @@
|
||||
in {
|
||||
packages = {
|
||||
default = server;
|
||||
"server-uncompressed" = serverUncompressed;
|
||||
"vs-code-extension-bare" = let
|
||||
name = "config-lsp";
|
||||
node-modules = pkgs.mkYarnPackage {
|
||||
@ -131,20 +133,27 @@
|
||||
};
|
||||
in node-modules;
|
||||
};
|
||||
devShells.default = pkgs.mkShell {
|
||||
buildInputs = inputs ++ (with pkgs; [
|
||||
mailutils
|
||||
wireguard-tools
|
||||
antlr
|
||||
]) ++ (if pkgs.stdenv.isLinux then with pkgs; [
|
||||
postfix
|
||||
] else []);
|
||||
};
|
||||
|
||||
devShells.default = let
|
||||
ourGopls = pkgs.gopls;
|
||||
in
|
||||
pkgs.mkShell {
|
||||
buildInputs = inputs ++ (with pkgs; [
|
||||
mailutils
|
||||
wireguard-tools
|
||||
antlr
|
||||
just
|
||||
ourGopls
|
||||
]) ++ (if pkgs.stdenv.isLinux then with pkgs; [
|
||||
postfix
|
||||
] else []);
|
||||
};
|
||||
|
||||
devShells."vs-code-extension" = pkgs.mkShell {
|
||||
buildInputs = [
|
||||
pkgs.nodejs
|
||||
pkgs.vsce
|
||||
pkgs.yarn2nix
|
||||
buildInputs = with pkgs; [
|
||||
nodejs
|
||||
vsce
|
||||
yarn2nix
|
||||
];
|
||||
};
|
||||
}
|
||||
|
50
justfile
Normal file
50
justfile
Normal file
@ -0,0 +1,50 @@
|
||||
#!/usr/bin/env just --justfile
|
||||
|
||||
set dotenv-load := true
|
||||
|
||||
default:
|
||||
@just --list
|
||||
|
||||
# Lint whole project
|
||||
lint:
|
||||
cd server && gofmt -s -w .
|
||||
# cd vs-code-extension && yarn run lint
|
||||
|
||||
# Build config-lsp and test it in nvim (config-lsp will be loaded automatically)
|
||||
[working-directory: "./server"]
|
||||
test-nvim file:
|
||||
go build -o ./result/bin/config-lsp && rm -rf ~/.local/state/nvim/lsp.log && DOTFILES_IGNORE_CONFIG_LSP=1 nvim {{file}} -c ':source nvim-lsp-debug.lua'
|
||||
|
||||
# Show Mason Logs
|
||||
show-nvim-logs:
|
||||
bat ~/.local/state/nvim/lsp.log
|
||||
|
||||
[working-directory: "./server"]
|
||||
test:
|
||||
nix develop --command bash -c 'go test ./... -count=1'
|
||||
|
||||
[working-directory: "./server"]
|
||||
update-antlr-parsers:
|
||||
# aliases
|
||||
cd handlers/aliases && antlr4 -Dlanguage=Go -o ast/parser Aliases.g4
|
||||
|
||||
# fstab
|
||||
cd handlers/fstab && antlr4 -Dlanguage=Go -o ast/parser Fstab.g4
|
||||
|
||||
# sshd_config
|
||||
cd handlers/sshd_config && antlr4 -Dlanguage=Go -o ast/parser Config.g4
|
||||
cd handlers/sshd_config/match-parser && antlr4 -Dlanguage=Go -o parser Match.g4
|
||||
|
||||
# ssh_config
|
||||
cd handlers/ssh_config && antlr4 -Dlanguage=Go -o ast/parser Config.g4
|
||||
cd handlers/ssh_config/match-parser && antlr4 -Dlanguage=Go -o parser Match.g4
|
||||
|
||||
# hosts
|
||||
cd handlers/hosts && antlr4 -Dlanguage=Go -o ast/parser Hosts.g4
|
||||
|
||||
|
||||
# Ready for a PR? Run this recipe before opening the PR!
|
||||
ready:
|
||||
just lint
|
||||
just test
|
||||
|
75
server/common-documentation/mnt-apfs.go
Normal file
75
server/common-documentation/mnt-apfs.go
Normal file
@ -0,0 +1,75 @@
|
||||
package commondocumentation
|
||||
|
||||
import (
|
||||
docvalues "config-lsp/doc-values"
|
||||
)
|
||||
|
||||
var APFSDocumentationAssignable = map[docvalues.EnumString]docvalues.DeprecatedValue{
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"user",
|
||||
"Set the owner of the files in the file system to user. The default owner is the owner of the directory on which the file system is being mounted. The user may be a user-name, or a numeric value.",
|
||||
): docvalues.UIDValue{},
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"group",
|
||||
"Set the group of the files in the file system to group. The default group is the group of the directory on which the file system is being mounted. The group may be a group-name, or a numeric value.",
|
||||
): docvalues.GIDValue{},
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"snapshot",
|
||||
"The name of the snapshot to mount. In this usage pathname is the mounted root directory of the base volume containing the snapshot.",
|
||||
): docvalues.StringValue{},
|
||||
}
|
||||
|
||||
var APFSDocumentationEnums = []docvalues.EnumString{
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"async",
|
||||
"All I/O to the file system should be done asynchronously. This can be somewhat dangerous with respect to losing data when faced with system crashes and power outages. This is also the default. It can be avoided with the noasync option.",
|
||||
),
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"noauto",
|
||||
"This filesystem should be skipped when mount is run with the -a flag.",
|
||||
),
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"nodev",
|
||||
"Do not interpret character or block special devices on the file system. This option is useful for a server that has file systems containing special devices for architectures other than its own.",
|
||||
),
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"noexec",
|
||||
"Do not allow execution of any binaries on the mounted file system. This option is useful for a server that has file systems containing binaries for architectures other than its own.",
|
||||
),
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"noowners",
|
||||
"Ignore the ownership field for the entire volume. This causes all objects to appear as owned by user ID 99 and group ID 99. User ID 99 is interpreted as the current effective user ID, while group ID 99 is used directly and translates to ``unknown''.",
|
||||
),
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"nosuid",
|
||||
"Do not allow set-user-identifier or set-group-identifier bits to take effect.",
|
||||
),
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"rdonly",
|
||||
"The same as -r; mount the file system read-only (even the super-user may not write it).",
|
||||
),
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"update",
|
||||
"The same as -u; indicate that the status of an already mounted file system should be changed.",
|
||||
),
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"union",
|
||||
"Causes the namespace to appear as the union of directories of the mounted filesystem with corresponding directories in the underlying filesystem. Lookups will be done in the mounted filesystem first. If those operations fail due to a non-existent file the underlying directory is then accessed.",
|
||||
),
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"noatime",
|
||||
"Do not update the file access time when reading from a file. This option is useful on file systems where there are large numbers of files and performance is more critical than updating the file access time (which is rarely ever important).",
|
||||
),
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"strictatime",
|
||||
"Always update the file access time when reading from a file. Without this option the filesystem may default to a less strict update mode, where some access time updates are skipped for performance reasons. This option could be ignored if it is not supported by the filesystem.",
|
||||
),
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"nobrowse",
|
||||
"This option indicates that the mount point should not be visible via the GUI (i.e., appear on the Desktop as a separate volume).",
|
||||
),
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"nofollow",
|
||||
"This option indicates that in the course of the mount system call, the kernel should not follow any symlinks that may be present in the provided mount-on directory. This is the same as the -k option.",
|
||||
),
|
||||
}
|
195
server/common-documentation/mnt-bcachefs.go
Normal file
195
server/common-documentation/mnt-bcachefs.go
Normal file
@ -0,0 +1,195 @@
|
||||
package commondocumentation
|
||||
|
||||
import docvalues "config-lsp/doc-values"
|
||||
|
||||
var checksumType = docvalues.EnumValue{
|
||||
EnforceValues: true,
|
||||
Values: []docvalues.EnumString{
|
||||
docvalues.CreateEnumString("none"),
|
||||
docvalues.CreateEnumString("crc32c"),
|
||||
docvalues.CreateEnumString("crc64"),
|
||||
},
|
||||
}
|
||||
|
||||
var compressionType = docvalues.EnumValue{
|
||||
EnforceValues: true,
|
||||
Values: []docvalues.EnumString{
|
||||
docvalues.CreateEnumStringWithDoc("none", "(default)"),
|
||||
docvalues.CreateEnumString("lz4"),
|
||||
docvalues.CreateEnumString("gzip"),
|
||||
docvalues.CreateEnumString("zstd"),
|
||||
},
|
||||
}
|
||||
|
||||
// No idea if those enums are correct,
|
||||
// the documentation does not provide any information
|
||||
var booleanEnumValue = docvalues.EnumValue{
|
||||
EnforceValues: true,
|
||||
Values: []docvalues.EnumString{
|
||||
docvalues.CreateEnumString("yes"),
|
||||
docvalues.CreateEnumString("no"),
|
||||
},
|
||||
}
|
||||
|
||||
var BcacheFSDocumentationAssignable = map[docvalues.EnumString]docvalues.DeprecatedValue{
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"errors",
|
||||
"Action to take on filesystem error. The errors option is used for inconsistencies that indicate some sort of a bug",
|
||||
): docvalues.EnumValue{
|
||||
EnforceValues: true,
|
||||
Values: []docvalues.EnumString{
|
||||
docvalues.CreateEnumStringWithDoc("continue", "Log the error but continue normal operation"),
|
||||
docvalues.CreateEnumStringWithDoc("ro", "Emergency read only, immediately halting any changes to the filesystem on disk"),
|
||||
docvalues.CreateEnumStringWithDoc("panic", "Immediately halt the entire machine, printing a backtrace on the system console"),
|
||||
},
|
||||
},
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"metadata_replicas",
|
||||
"Number of replicas for metadata (journal and btree)",
|
||||
): docvalues.PositiveNumberValue(),
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"data_replicas",
|
||||
"Number of replicas for user data",
|
||||
): docvalues.PositiveNumberValue(),
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"metadata_checksum",
|
||||
"Checksum type for metadata writes",
|
||||
): checksumType,
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"data_checksum",
|
||||
"Checksum type for data writes",
|
||||
): checksumType,
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"compression",
|
||||
"Compression type",
|
||||
): compressionType,
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"background_compression",
|
||||
"Background compression type",
|
||||
): compressionType,
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"str_hash",
|
||||
"Hash function for string hash tables (directories and xattrs)",
|
||||
): docvalues.EnumValue{
|
||||
EnforceValues: true,
|
||||
Values: []docvalues.EnumString{
|
||||
docvalues.CreateEnumString("crc32c"),
|
||||
docvalues.CreateEnumString("crc64"),
|
||||
docvalues.CreateEnumString("siphash"),
|
||||
},
|
||||
},
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"metadata_target",
|
||||
"Preferred target for metadata writes",
|
||||
): docvalues.StringValue{},
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"foreground_target",
|
||||
"Preferred target for foreground writes",
|
||||
): docvalues.StringValue{},
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"background_target",
|
||||
"Target for data to be moved to in the background",
|
||||
): docvalues.StringValue{},
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"promote_target",
|
||||
"Target for data to be copied to on read",
|
||||
): docvalues.StringValue{},
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"erasure_code",
|
||||
"Enable erasure coding",
|
||||
): booleanEnumValue,
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"inodes_32bit",
|
||||
"Restrict new inode numbers to 32 bits",
|
||||
): booleanEnumValue,
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"shard_inode_numbers",
|
||||
"Use CPU id for high bits of new inode numbers.",
|
||||
): booleanEnumValue,
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"wide_macs",
|
||||
"Store full 128 bit cryptographic MACs (default 80)",
|
||||
): booleanEnumValue,
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"inline_data",
|
||||
"Enable inline data extents (default on)",
|
||||
): booleanEnumValue,
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"journal_flush_delay",
|
||||
"Delay in milliseconds before automatic journal commit (default 1000)",
|
||||
): docvalues.PositiveNumberValue(),
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"journal_flush_disabled",
|
||||
"Disables journal flush on sync/fsync. `journal_flush_delay` remains in effect, thus with the default setting not more than 1 second of work will be lost",
|
||||
): booleanEnumValue,
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"journal_reclaim",
|
||||
"Reclaim journal space after a certain amount of time",
|
||||
): docvalues.PositiveNumberValue(),
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"journal_reclaim_delay",
|
||||
"Delay in milliseconds before automatic journal reclaim",
|
||||
): docvalues.PositiveNumberValue(),
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"acl",
|
||||
"Enable POSIX ACLs",
|
||||
): booleanEnumValue,
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"usrquota",
|
||||
"Enable user quotas",
|
||||
): booleanEnumValue,
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"grpquota",
|
||||
"Enable group quotas",
|
||||
): booleanEnumValue,
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"prjquota",
|
||||
"Enable project quotas",
|
||||
): booleanEnumValue,
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"degraded",
|
||||
"Allow mounting with data degraded",
|
||||
): booleanEnumValue,
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"very_degraded",
|
||||
"Allow mounting with data missing",
|
||||
): booleanEnumValue,
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"verbose",
|
||||
"Extra debugging info during mount/recovery",
|
||||
): booleanEnumValue,
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"fsck",
|
||||
"Run fsck during mount",
|
||||
): booleanEnumValue,
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"fix_errors",
|
||||
"Fix errors without asking during fsck",
|
||||
): booleanEnumValue,
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"ratelimit_errors",
|
||||
"Ratelimit error messages during fsck",
|
||||
): booleanEnumValue,
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"read_only",
|
||||
"Mount in read only mode",
|
||||
): booleanEnumValue,
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"nochanges",
|
||||
"Issue no writes, even for journal replay",
|
||||
): booleanEnumValue,
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"norecovery",
|
||||
"Don’t replay the journal (not recommended)",
|
||||
): booleanEnumValue,
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"noexcl",
|
||||
"Don’t open devices in exclusive mode",
|
||||
): booleanEnumValue,
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"version_upgrade",
|
||||
"Upgrade on disk format to latest version",
|
||||
): booleanEnumValue,
|
||||
}
|
||||
|
||||
var BcacheFSDocumentationEnums = []docvalues.EnumString{}
|
22
server/common-documentation/mnt-vboxsf.go
Normal file
22
server/common-documentation/mnt-vboxsf.go
Normal file
@ -0,0 +1,22 @@
|
||||
package commondocumentation
|
||||
|
||||
import docvalues "config-lsp/doc-values"
|
||||
|
||||
var VboxsfDocumentationAssignable = docvalues.MergeKeyEnumAssignmentMaps(FatDocumentationAssignable, map[docvalues.EnumString]docvalues.DeprecatedValue{
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"iocharset",
|
||||
"This option sets the character set used for I/O operations. Note that on Linux guests, if the iocharset option is not specified, then the Guest Additions driver will attempt to use the character set specified by the CONFIG_NLS_DEFAULT kernel option. If this option is not set either, then UTF-8 is used.",
|
||||
): docvalues.EnumValue{
|
||||
EnforceValues: true,
|
||||
Values: AvailableCharsets,
|
||||
},
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"convertcp",
|
||||
"This option specifies the character set used for the shared folder name. This is UTF-8 by default.",
|
||||
): docvalues.EnumValue{
|
||||
EnforceValues: true,
|
||||
Values: AvailableCharsets,
|
||||
},
|
||||
})
|
||||
|
||||
var VboxsfDocumentationEnums = []docvalues.EnumString{}
|
@ -6,7 +6,10 @@ import (
|
||||
)
|
||||
|
||||
func ClearDiagnostics(context *glsp.Context, uri protocol.DocumentUri) {
|
||||
go context.Notify(
|
||||
// Diagnostics are sent synchronously, as sending them async
|
||||
// could result in a race condition when we send diagnostics
|
||||
// to the client.
|
||||
context.Notify(
|
||||
protocol.ServerTextDocumentPublishDiagnostics,
|
||||
protocol.PublishDiagnosticsParams{
|
||||
URI: uri,
|
||||
|
43
server/common/levenshtein.go
Normal file
43
server/common/levenshtein.go
Normal file
@ -0,0 +1,43 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"github.com/hbollon/go-edlib"
|
||||
)
|
||||
|
||||
// Find items that are similar to the given input.
|
||||
// This is used to find typos & suggest the correct item.
|
||||
// Once an item is found that has a Damerau-Levenshtein distance of 1, it is immediately returned.
|
||||
// If not, then the next 2 items of similarity 2, or 3 items of similarity 3 are returned.
|
||||
// If no items with similarity <= 3 are found, then an empty slice is returned.
|
||||
func FindSimilarItems[T ~string](
|
||||
input T,
|
||||
items []T,
|
||||
) []T {
|
||||
itemsPerSimilarity := map[uint8][]T{
|
||||
2: make([]T, 0, 2),
|
||||
3: make([]T, 0, 3),
|
||||
}
|
||||
|
||||
for _, item := range items {
|
||||
similarity := edlib.DamerauLevenshteinDistance(string(item), string(input))
|
||||
|
||||
switch similarity {
|
||||
case 1:
|
||||
return []T{item}
|
||||
case 2:
|
||||
itemsPerSimilarity[2] = append(itemsPerSimilarity[2], item)
|
||||
|
||||
if len(itemsPerSimilarity[2]) >= 2 {
|
||||
return itemsPerSimilarity[2]
|
||||
}
|
||||
case 3:
|
||||
itemsPerSimilarity[3] = append(itemsPerSimilarity[3], item)
|
||||
|
||||
if len(itemsPerSimilarity[3]) >= 3 {
|
||||
return itemsPerSimilarity[3]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return append(itemsPerSimilarity[2], itemsPerSimilarity[3]...)
|
||||
}
|
5
server/common/log.go
Normal file
5
server/common/log.go
Normal file
@ -0,0 +1,5 @@
|
||||
package common
|
||||
|
||||
import "github.com/tliron/commonlog"
|
||||
|
||||
var Log = commonlog.GetLogger("config-lsp")
|
42
server/common/options.go
Normal file
42
server/common/options.go
Normal file
@ -0,0 +1,42 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"os"
|
||||
"slices"
|
||||
)
|
||||
|
||||
// Global options for the server
|
||||
type ServerOptionsType struct {
|
||||
// If true, the server will not return any errors if the
|
||||
// language was undetectable.
|
||||
// This is used for example in the VS Code extension, where
|
||||
// we show a native warning. The error message boxes just clutter
|
||||
// the interface.
|
||||
NoUndetectableErrors bool
|
||||
|
||||
// If true, the server will not detect typos and suggest
|
||||
// the correct keywords.
|
||||
// Since the server finds typos using the Damerau-Levenshtein distance,
|
||||
// and this is done each time code actions are requested
|
||||
// (which happens quite often), these suggestions can eat a lot of resources.
|
||||
// You may want to enable this option if you are dealing with little
|
||||
// resources or if you're low on battery.
|
||||
NoTypoSuggestions bool
|
||||
}
|
||||
|
||||
var ServerOptions = new(ServerOptionsType)
|
||||
|
||||
func InitServerOptions() {
|
||||
ServerOptions.NoUndetectableErrors = false
|
||||
ServerOptions.NoTypoSuggestions = false
|
||||
|
||||
if slices.Contains(os.Args, "--no-undetectable-errors") {
|
||||
Log.Info("config-lsp will not return errors for undetectable files")
|
||||
ServerOptions.NoUndetectableErrors = true
|
||||
}
|
||||
|
||||
if slices.Contains(os.Args, "--no-typo-suggestions") {
|
||||
Log.Info("config-lsp will not detect typos for keywords")
|
||||
ServerOptions.NoTypoSuggestions = true
|
||||
}
|
||||
}
|
@ -177,7 +177,7 @@ func (v KeyEnumAssignmentValue) DeprecatedFetchCompletions(line string, cursor u
|
||||
)
|
||||
|
||||
if found {
|
||||
relativePosition := max(1, foundPosition) - 1
|
||||
relativePosition := min(foundPosition, len(line)-1)
|
||||
selectedKey := line[:uint32(relativePosition)]
|
||||
line = line[uint32(relativePosition+len(v.Separator)):]
|
||||
cursor -= uint32(relativePosition)
|
||||
|
@ -2,31 +2,21 @@ package docvalues
|
||||
|
||||
import (
|
||||
"config-lsp/utils"
|
||||
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||
"errors"
|
||||
"strings"
|
||||
|
||||
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||
)
|
||||
|
||||
type PathDoesNotExistError struct{}
|
||||
|
||||
func (e PathDoesNotExistError) Error() string {
|
||||
return "This path does not exist"
|
||||
}
|
||||
|
||||
type PathInvalidError struct{}
|
||||
|
||||
func (e PathInvalidError) Error() string {
|
||||
return "This path is invalid"
|
||||
}
|
||||
|
||||
type PathType uint8
|
||||
|
||||
const (
|
||||
PathTypeExistenceOptional PathType = 0
|
||||
PathTypeFile PathType = 1
|
||||
PathTypeDirectory PathType = 2
|
||||
PathTypeFile PathType = 1
|
||||
PathTypeDirectory PathType = 2
|
||||
)
|
||||
|
||||
type PathValue struct {
|
||||
IsOptional bool
|
||||
RequiredType PathType
|
||||
}
|
||||
|
||||
@ -34,47 +24,88 @@ func (v PathValue) GetTypeDescription() []string {
|
||||
hints := make([]string, 0)
|
||||
|
||||
switch v.RequiredType {
|
||||
case PathTypeExistenceOptional:
|
||||
hints = append(hints, "Optional")
|
||||
break
|
||||
case PathTypeFile:
|
||||
hints = append(hints, "File")
|
||||
case PathTypeDirectory:
|
||||
hints = append(hints, "Directory")
|
||||
}
|
||||
|
||||
if v.IsOptional {
|
||||
hints = append(hints, "Optional")
|
||||
}
|
||||
|
||||
return []string{strings.Join(hints, ", ")}
|
||||
}
|
||||
|
||||
func (v PathValue) DeprecatedCheckIsValid(value string) []*InvalidValue {
|
||||
if !utils.DoesPathExist(value) {
|
||||
return []*InvalidValue{{
|
||||
Err: PathDoesNotExistError{},
|
||||
Start: 0,
|
||||
End: uint32(len(value)),
|
||||
}}
|
||||
if v.IsOptional {
|
||||
return nil
|
||||
} else {
|
||||
return []*InvalidValue{{
|
||||
Err: errors.New("This path does not exist"),
|
||||
Start: 0,
|
||||
End: uint32(len(value)),
|
||||
}}
|
||||
}
|
||||
}
|
||||
|
||||
isValid := false
|
||||
fileExpected := (v.RequiredType & PathTypeFile) == PathTypeFile
|
||||
directoryExpected := (v.RequiredType & PathTypeDirectory) == PathTypeDirectory
|
||||
|
||||
if (v.RequiredType & PathTypeFile) == PathTypeFile {
|
||||
isValid := true
|
||||
|
||||
// If file is expected
|
||||
if fileExpected {
|
||||
// and exists
|
||||
isValid = isValid && utils.IsPathFile(value)
|
||||
// file not expected
|
||||
} else {
|
||||
// and should not exist
|
||||
isValid = isValid && !utils.IsPathFile(value)
|
||||
}
|
||||
|
||||
if (v.RequiredType & PathTypeDirectory) == PathTypeDirectory {
|
||||
// if directory
|
||||
if directoryExpected {
|
||||
// and exists
|
||||
isValid = isValid && utils.IsPathDirectory(value)
|
||||
// directory not expected
|
||||
} else {
|
||||
// and should not exist
|
||||
isValid = isValid && !utils.IsPathDirectory(value)
|
||||
}
|
||||
|
||||
if isValid {
|
||||
return nil
|
||||
}
|
||||
|
||||
if fileExpected && directoryExpected {
|
||||
return []*InvalidValue{{
|
||||
Err: errors.New("This must be either a file or a directory"),
|
||||
Start: 0,
|
||||
End: uint32(len(value)),
|
||||
}}
|
||||
}
|
||||
if fileExpected {
|
||||
return []*InvalidValue{{
|
||||
Err: errors.New("This must be a file"),
|
||||
Start: 0,
|
||||
End: uint32(len(value)),
|
||||
}}
|
||||
}
|
||||
if directoryExpected {
|
||||
return []*InvalidValue{{
|
||||
Err: errors.New("This must be a directory"),
|
||||
Start: 0,
|
||||
End: uint32(len(value)),
|
||||
}}
|
||||
}
|
||||
|
||||
return []*InvalidValue{{
|
||||
Err: PathInvalidError{},
|
||||
Err: errors.New("This path is invalid"),
|
||||
Start: 0,
|
||||
End: uint32(len(value)),
|
||||
},
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
func (v PathValue) DeprecatedFetchCompletions(line string, cursor uint32) []protocol.CompletionItem {
|
||||
|
27
server/fetch_tags.js
Normal file
27
server/fetch_tags.js
Normal file
@ -0,0 +1,27 @@
|
||||
// Creates a JSON object in the form of:
|
||||
// {
|
||||
// [<Option name>]: documentation
|
||||
// }
|
||||
//
|
||||
// Searches for <dl> elements with <dt> and <dd> children based
|
||||
// on the currently selected element in the Elements tab of the
|
||||
|
||||
(() => {
|
||||
const content = {}
|
||||
let currentOption = ""
|
||||
|
||||
const $elements = $0.querySelectorAll(":scope > dt,dd")
|
||||
|
||||
for (const $element of $elements) {
|
||||
switch ($element.tagName) {
|
||||
case "DT":
|
||||
currentOption = $element.textContent.trim()
|
||||
break
|
||||
case "DD":
|
||||
content[currentOption] = $element.textContent.trim()
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
console.log(content)
|
||||
})()
|
@ -1,35 +1,32 @@
|
||||
module config-lsp
|
||||
|
||||
go 1.22.5
|
||||
go 1.24
|
||||
|
||||
require (
|
||||
github.com/antlr4-go/antlr/v4 v4.13.1
|
||||
github.com/emirpasic/gods v1.18.1
|
||||
github.com/google/go-cmp v0.6.0
|
||||
github.com/k0kubun/pp v3.0.1+incompatible
|
||||
github.com/tliron/commonlog v0.2.17
|
||||
github.com/google/go-cmp v0.7.0
|
||||
github.com/hbollon/go-edlib v1.6.0
|
||||
github.com/tliron/commonlog v0.2.19
|
||||
github.com/tliron/glsp v0.2.2
|
||||
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56
|
||||
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
|
||||
github.com/gorilla/websocket v1.5.3 // indirect
|
||||
github.com/iancoleman/strcase v0.3.0 // indirect
|
||||
github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88 // indirect
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
|
||||
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.16 // indirect
|
||||
github.com/muesli/termenv v0.15.2 // indirect
|
||||
github.com/petermattis/goid v0.0.0-20240716203034-badd1c0974d6 // indirect
|
||||
github.com/muesli/termenv v0.16.0 // indirect
|
||||
github.com/petermattis/goid v0.0.0-20250508124226-395b08cebbdb // indirect
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
github.com/rivo/uniseg v0.4.7 // indirect
|
||||
github.com/sasha-s/go-deadlock v0.3.1 // indirect
|
||||
github.com/sasha-s/go-deadlock v0.3.5 // indirect
|
||||
github.com/segmentio/ksuid v1.0.4 // indirect
|
||||
github.com/sourcegraph/jsonrpc2 v0.2.0 // indirect
|
||||
github.com/tliron/kutil v0.3.24 // indirect
|
||||
golang.org/x/crypto v0.25.0 // indirect
|
||||
golang.org/x/sys v0.22.0 // indirect
|
||||
golang.org/x/term v0.22.0 // indirect
|
||||
github.com/sourcegraph/jsonrpc2 v0.2.1 // indirect
|
||||
github.com/tliron/kutil v0.3.26 // indirect
|
||||
golang.org/x/crypto v0.38.0 // indirect
|
||||
golang.org/x/sys v0.33.0 // indirect
|
||||
golang.org/x/term v0.32.0 // indirect
|
||||
)
|
||||
|
@ -4,55 +4,46 @@ github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiE
|
||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8=
|
||||
github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc=
|
||||
github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||
github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
|
||||
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/hbollon/go-edlib v1.6.0 h1:ga7AwwVIvP8mHm9GsPueC0d71cfRU/52hmPJ7Tprv4E=
|
||||
github.com/hbollon/go-edlib v1.6.0/go.mod h1:wnt6o6EIVEzUfgbUZY7BerzQ2uvzp354qmS2xaLkrhM=
|
||||
github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI=
|
||||
github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho=
|
||||
github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88 h1:uC1QfSlInpQF+M0ao65imhwqKnz3Q2z/d8PWZRMQvDM=
|
||||
github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88/go.mod h1:3w7q1U84EfirKl04SVQ/s7nPm1ZPhiXd34z40TNz36k=
|
||||
github.com/k0kubun/pp v3.0.1+incompatible h1:3tqvf7QgUnZ5tXO6pNAZlrvHgl6DvifjDrd9g2S9Z40=
|
||||
github.com/k0kubun/pp v3.0.1+incompatible/go.mod h1:GWse8YhT0p8pT4ir3ZgBbfZild3tgzSScAn6HmfYukg=
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
|
||||
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
|
||||
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
|
||||
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
|
||||
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
|
||||
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||
github.com/muesli/termenv v0.15.2 h1:GohcuySI0QmI3wN8Ok9PtKGkgkFIk7y6Vpb5PvrY+Wo=
|
||||
github.com/muesli/termenv v0.15.2/go.mod h1:Epx+iuz8sNs7mNKhxzH4fWXGNpZwUaJKRS1noLXviQ8=
|
||||
github.com/petermattis/goid v0.0.0-20180202154549-b0b1615b78e5/go.mod h1:jvVRKCrJTQWu0XVbaOlby/2lO20uSCHEMzzplHXte1o=
|
||||
github.com/petermattis/goid v0.0.0-20240716203034-badd1c0974d6 h1:DUDJI8T/9NcGbbL+AWk6vIYlmQ8ZBS8LZqVre6zbkPQ=
|
||||
github.com/petermattis/goid v0.0.0-20240716203034-badd1c0974d6/go.mod h1:pxMtw7cyUw6B2bRH0ZBANSPg+AoSud1I1iyJHI69jH4=
|
||||
github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc=
|
||||
github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk=
|
||||
github.com/petermattis/goid v0.0.0-20240813172612-4fcff4a6cae7/go.mod h1:pxMtw7cyUw6B2bRH0ZBANSPg+AoSud1I1iyJHI69jH4=
|
||||
github.com/petermattis/goid v0.0.0-20250508124226-395b08cebbdb h1:3PrKuO92dUTMrQ9dx0YNejC6U/Si6jqKmyQ9vWjwqR4=
|
||||
github.com/petermattis/goid v0.0.0-20250508124226-395b08cebbdb/go.mod h1:pxMtw7cyUw6B2bRH0ZBANSPg+AoSud1I1iyJHI69jH4=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
||||
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||
github.com/sasha-s/go-deadlock v0.3.1 h1:sqv7fDNShgjcaxkO0JNcOAlr8B9+cV5Ey/OB71efZx0=
|
||||
github.com/sasha-s/go-deadlock v0.3.1/go.mod h1:F73l+cr82YSh10GxyRI6qZiCgK64VaZjwesgfQ1/iLM=
|
||||
github.com/sasha-s/go-deadlock v0.3.5 h1:tNCOEEDG6tBqrNDOX35j/7hL5FcFViG6awUGROb2NsU=
|
||||
github.com/sasha-s/go-deadlock v0.3.5/go.mod h1:bugP6EGbdGYObIlx7pUZtWqlvo8k9H6vCBBsiChJQ5U=
|
||||
github.com/segmentio/ksuid v1.0.4 h1:sBo2BdShXjmcugAMwjugoGUdUV0pcxY5mW4xKRn3v4c=
|
||||
github.com/segmentio/ksuid v1.0.4/go.mod h1:/XUiZBD3kVx5SmUOl55voK5yeAbBNNIed+2O73XgrPE=
|
||||
github.com/sourcegraph/jsonrpc2 v0.2.0 h1:KjN/dC4fP6aN9030MZCJs9WQbTOjWHhrtKVpzzSrr/U=
|
||||
github.com/sourcegraph/jsonrpc2 v0.2.0/go.mod h1:ZafdZgk/axhT1cvZAPOhw+95nz2I/Ra5qMlU4gTRwIo=
|
||||
github.com/tliron/commonlog v0.2.17 h1:GFVvzDZbNLkuQfT45IZeWkrR5AyqiX7Du8pWAtFuPTY=
|
||||
github.com/tliron/commonlog v0.2.17/go.mod h1:J2Hb63/mMjYmkDzd7E+VL9wCHT6NFNSzV/IOjJWMJqc=
|
||||
github.com/sourcegraph/jsonrpc2 v0.2.1 h1:2GtljixMQYUYCmIg7W9aF2dFmniq/mOr2T9tFRh6zSQ=
|
||||
github.com/sourcegraph/jsonrpc2 v0.2.1/go.mod h1:ZafdZgk/axhT1cvZAPOhw+95nz2I/Ra5qMlU4gTRwIo=
|
||||
github.com/tliron/commonlog v0.2.19 h1:v1mOH1TyzFLqkshR03khw7ENAZPjAyZTQBQrqN+vX9c=
|
||||
github.com/tliron/commonlog v0.2.19/go.mod h1:AcdhfcUqlAWukDrzTGyaPhUgYiNdZhS4dKzD/e0tjcY=
|
||||
github.com/tliron/glsp v0.2.2 h1:IKPfwpE8Lu8yB6Dayta+IyRMAbTVunudeauEgjXBt+c=
|
||||
github.com/tliron/glsp v0.2.2/go.mod h1:GMVWDNeODxHzmDPvYbYTCs7yHVaEATfYtXiYJ9w1nBg=
|
||||
github.com/tliron/kutil v0.3.24 h1:LvaqizF4htpEef9tC0B//sqtvQzEjDu69A4a1HrY+ko=
|
||||
github.com/tliron/kutil v0.3.24/go.mod h1:2iSIhOnOe1reqczZQy6TauVHhItsq6xRLV2rVBvodpk=
|
||||
golang.org/x/crypto v0.25.0 h1:ypSNr+bnYL2YhwoMt2zPxHFmbAN1KZs/njMG3hxUp30=
|
||||
golang.org/x/crypto v0.25.0/go.mod h1:T+wALwcMOSE0kXgUAnPAHqTLW+XHgcELELW8VaDgm/M=
|
||||
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 h1:2dVuKD2vS7b0QIHQbpyTISPd0LeHDbnYEryqj5Q1ug8=
|
||||
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56/go.mod h1:M4RDyNAINzryxdtnbRXRL/OHtkFuWGRjvuhBJpk2IlY=
|
||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
github.com/tliron/kutil v0.3.26 h1:G+dicQLvzm3zdOMrrQFLBfHJXtk57fEu2kf1IFNyJxw=
|
||||
github.com/tliron/kutil v0.3.26/go.mod h1:1/HRVAb+fnRIRnzmhu0FPP+ZJKobrpwHStDVMuaXDzY=
|
||||
golang.org/x/crypto v0.38.0 h1:jt+WWG8IZlBnVbomuhg2Mdq0+BBQaHbtqHEFEigjUV8=
|
||||
golang.org/x/crypto v0.38.0/go.mod h1:MvrbAqul58NNYPKnOra203SB9vpuZW0e+RRZV+Ggqjw=
|
||||
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6 h1:y5zboxd6LQAqYIhHnB48p0ByQ/GnQx2BE33L8BOHQkI=
|
||||
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6/go.mod h1:U6Lno4MTRCDY+Ba7aCcauB9T60gsv5s4ralQzP72ZoQ=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.22.0 h1:RI27ohtqKCnwULzJLqkv897zojh5/DwS/ENaMzUOaWI=
|
||||
golang.org/x/sys v0.22.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.22.0 h1:BbsgPEJULsl2fV/AT3v15Mjva5yXKQDyKf+TbDz7QJk=
|
||||
golang.org/x/term v0.22.0/go.mod h1:F3qCibpT5AMpCRfhfT53vVJwhLtIVHhB9XDjfFvnMI4=
|
||||
golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw=
|
||||
golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg=
|
||||
golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ=
|
||||
|
@ -68,7 +68,7 @@ func (s *aliasesParserListener) EnterValues(ctx *parser.ValuesContext) {
|
||||
}
|
||||
}
|
||||
|
||||
// === Value === //
|
||||
// === Name === //
|
||||
|
||||
func (s *aliasesParserListener) EnterUser(ctx *parser.UserContext) {
|
||||
location := common.CharacterRangeFromCtx(ctx.BaseParserRuleContext)
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Code generated from Aliases.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
||||
// Code generated from Aliases.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser // Aliases
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Code generated from Aliases.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
||||
// Code generated from Aliases.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Code generated from Aliases.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
||||
// Code generated from Aliases.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser // Aliases
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Code generated from Aliases.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
||||
// Code generated from Aliases.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser // Aliases
|
||||
|
||||
|
@ -15,6 +15,7 @@ var UserDeclaration = "`user`"
|
||||
var PathField = docvalues.DocumentationValue{
|
||||
Documentation: "Append messages to file, specified by its absolute pathname",
|
||||
Value: docvalues.PathValue{
|
||||
IsOptional: true,
|
||||
RequiredType: docvalues.PathTypeFile,
|
||||
},
|
||||
}
|
||||
@ -40,6 +41,7 @@ var EmailDeclaration = "`user-part@domain-part`"
|
||||
var IncludeField = docvalues.DocumentationValue{
|
||||
Documentation: "Include any definitions in file as alias entries. The format of the file is identical to this one.",
|
||||
Value: docvalues.PathValue{
|
||||
IsOptional: false,
|
||||
RequiredType: docvalues.PathTypeFile,
|
||||
},
|
||||
}
|
||||
|
@ -51,7 +51,7 @@ func TextDocumentHover(
|
||||
contents := []string{}
|
||||
contents = append(contents, handlers.GetAliasValueTypeInfo(value)...)
|
||||
contents = append(contents, "")
|
||||
contents = append(contents, "#### Value")
|
||||
contents = append(contents, "#### Name")
|
||||
contents = append(contents, handlers.GetAliasValueHoverInfo(*document.Indexes, value))
|
||||
|
||||
text := strings.Join(contents, "\n")
|
||||
|
@ -14,17 +14,23 @@ type analyzerContext struct {
|
||||
func Analyze(
|
||||
document *shared.FstabDocument,
|
||||
) []protocol.Diagnostic {
|
||||
ctx := analyzerContext{
|
||||
ctx := &analyzerContext{
|
||||
document: document,
|
||||
}
|
||||
|
||||
analyzeFieldAreFilled(&ctx)
|
||||
analyzeFieldAreFilled(ctx)
|
||||
|
||||
if len(ctx.diagnostics) > 0 {
|
||||
return ctx.diagnostics
|
||||
}
|
||||
|
||||
analyzeValuesAreValid(&ctx)
|
||||
analyzeValuesAreValid(ctx)
|
||||
|
||||
if len(ctx.diagnostics) > 0 {
|
||||
return ctx.diagnostics
|
||||
}
|
||||
|
||||
analyzeFSCKField(ctx)
|
||||
|
||||
return ctx.diagnostics
|
||||
}
|
||||
|
49
server/handlers/fstab/analyzer/fsck.go
Normal file
49
server/handlers/fstab/analyzer/fsck.go
Normal file
@ -0,0 +1,49 @@
|
||||
package analyzer
|
||||
|
||||
import (
|
||||
"config-lsp/common"
|
||||
"config-lsp/handlers/fstab/ast"
|
||||
"config-lsp/handlers/fstab/fields"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||
)
|
||||
|
||||
func analyzeFSCKField(ctx *analyzerContext) {
|
||||
it := ctx.document.Config.Entries.Iterator()
|
||||
|
||||
var rootEntry *ast.FstabEntry
|
||||
|
||||
for it.Next() {
|
||||
entry := it.Value().(*ast.FstabEntry)
|
||||
|
||||
if entry.Fields != nil && entry.Fields.Fsck != nil && entry.Fields.Fsck.Value.Value == "1" {
|
||||
fileSystem := strings.ToLower(entry.Fields.FilesystemType.Value.Value)
|
||||
|
||||
if _, found := fields.FsckOneDisabledFilesystems[fileSystem]; found {
|
||||
// From https://wiki.archlinux.org/title/Fstab
|
||||
|
||||
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
||||
Range: entry.Fields.Fsck.ToLSPRange(),
|
||||
Message: "If the root file system is btrfs or XFS, the fsck order should be set to 0 instead of 1. See fsck.btrfs(8) and fsck.xfs(8).",
|
||||
Severity: &common.SeverityWarning,
|
||||
})
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
if entry.Fields.Fsck.Value.Value == "1" {
|
||||
if rootEntry != nil {
|
||||
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
||||
Range: entry.Fields.Fsck.ToLSPRange(),
|
||||
Message: fmt.Sprintf("Only the root file system should have a fsck of 1. Other file systems should have a fsck of 2 or 0. The root file system is already using a fsck=1 on line %d", rootEntry.Fields.Start.Line),
|
||||
Severity: &common.SeverityWarning,
|
||||
})
|
||||
} else {
|
||||
rootEntry = entry
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
44
server/handlers/fstab/analyzer/fsck_test.go
Normal file
44
server/handlers/fstab/analyzer/fsck_test.go
Normal file
@ -0,0 +1,44 @@
|
||||
package analyzer
|
||||
|
||||
import (
|
||||
testutils_test "config-lsp/handlers/fstab/test_utils"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestFSCKMultipleRoots(
|
||||
t *testing.T,
|
||||
) {
|
||||
document := testutils_test.DocumentFromInput(t, `
|
||||
UUID=12345678-1234-1234-1234-123456789012 /boot ext4 defaults 0 1
|
||||
UUID=12345678-1234-1234-1234-123456789012 / btrfs defaults 0 1
|
||||
UUID=12345678-1234-1234-1234-123456789012 /home ext4 defaults 0 2
|
||||
`)
|
||||
|
||||
ctx := &analyzerContext{
|
||||
document: document,
|
||||
}
|
||||
|
||||
analyzeFSCKField(ctx)
|
||||
|
||||
if len(ctx.diagnostics) != 1 {
|
||||
t.Errorf("Expected 1 error, got %v", len(ctx.diagnostics))
|
||||
}
|
||||
}
|
||||
|
||||
func TestFSCKBtrfsUsingRoot(
|
||||
t *testing.T,
|
||||
) {
|
||||
document := testutils_test.DocumentFromInput(t, `
|
||||
UUID=12345678-1234-1234-1234-123456789012 /boot btrfs defaults 0 1
|
||||
`)
|
||||
|
||||
ctx := &analyzerContext{
|
||||
document: document,
|
||||
}
|
||||
|
||||
analyzeFSCKField(ctx)
|
||||
|
||||
if len(ctx.diagnostics) != 1 {
|
||||
t.Errorf("Expected 1 error, got %v", len(ctx.diagnostics))
|
||||
}
|
||||
}
|
38
server/handlers/fstab/analyzer/spec.go
Normal file
38
server/handlers/fstab/analyzer/spec.go
Normal file
@ -0,0 +1,38 @@
|
||||
package analyzer
|
||||
|
||||
import (
|
||||
"config-lsp/common"
|
||||
"config-lsp/handlers/fstab/ast"
|
||||
"regexp"
|
||||
|
||||
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||
)
|
||||
|
||||
var volatileBlockFields = regexp.MustCompile(`^/dev/(sd|nvme|mmcblk|sr|vd|loop|cdrom)[a-zA-Z0-9]*$`)
|
||||
|
||||
func analyzeSpecField(
|
||||
ctx *analyzerContext,
|
||||
field *ast.FstabField,
|
||||
) {
|
||||
if field == nil {
|
||||
return
|
||||
}
|
||||
|
||||
if field.Value.Value == "" {
|
||||
return
|
||||
}
|
||||
|
||||
if !volatileBlockFields.MatchString(field.Value.Value) {
|
||||
return
|
||||
}
|
||||
|
||||
codeDescription := protocol.CodeDescription{
|
||||
HRef: protocol.URI("https://wiki.archlinux.org/title/Persistent_block_device_naming"),
|
||||
}
|
||||
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
||||
Range: field.ToLSPRange(),
|
||||
Message: "Kernel name descriptors for block devices are not persistent and can change each boot, they should not be used in configuration files. Prefer device UUIDs or LABELs instead.",
|
||||
CodeDescription: &codeDescription,
|
||||
Severity: &common.SeverityWarning,
|
||||
})
|
||||
}
|
@ -21,6 +21,8 @@ func analyzeValuesAreValid(
|
||||
checkField(ctx, entry.Fields.MountPoint, fields.MountPointField)
|
||||
checkField(ctx, entry.Fields.FilesystemType, fields.FileSystemTypeField)
|
||||
|
||||
analyzeSpecField(ctx, entry.Fields.Spec)
|
||||
|
||||
if entry.Fields.Options != nil {
|
||||
mountOptions := entry.FetchMountOptionsField(true)
|
||||
|
||||
@ -33,8 +35,8 @@ func analyzeValuesAreValid(
|
||||
checkField(ctx, entry.Fields.Freq, fields.FreqField)
|
||||
}
|
||||
|
||||
if entry.Fields.Pass != nil {
|
||||
checkField(ctx, entry.Fields.Pass, fields.PassField)
|
||||
if entry.Fields.Fsck != nil {
|
||||
checkField(ctx, entry.Fields.Fsck, fields.FsckField)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -14,7 +14,7 @@ const (
|
||||
FstabFieldFileSystemType FstabFieldName = "filesystemtype"
|
||||
FstabFieldOptions FstabFieldName = "options"
|
||||
FstabFieldFreq FstabFieldName = "freq"
|
||||
FstabFieldPass FstabFieldName = "pass"
|
||||
FstabFieldFsck FstabFieldName = "fsck"
|
||||
)
|
||||
|
||||
type FstabField struct {
|
||||
@ -29,7 +29,7 @@ type FstabFields struct {
|
||||
FilesystemType *FstabField
|
||||
Options *FstabField
|
||||
Freq *FstabField
|
||||
Pass *FstabField
|
||||
Fsck *FstabField
|
||||
}
|
||||
|
||||
type FstabEntry struct {
|
||||
|
@ -20,7 +20,7 @@ import (
|
||||
// LABEL=test ext4 defaults 0 0
|
||||
func (e FstabEntry) GetFieldAtPosition(position common.Position) FstabFieldName {
|
||||
// No fields defined, empty line
|
||||
if e.Fields.Spec == nil && e.Fields.MountPoint == nil && e.Fields.FilesystemType == nil && e.Fields.Options == nil && e.Fields.Freq == nil && e.Fields.Pass == nil {
|
||||
if e.Fields.Spec == nil && e.Fields.MountPoint == nil && e.Fields.FilesystemType == nil && e.Fields.Options == nil && e.Fields.Freq == nil && e.Fields.Fsck == nil {
|
||||
return FstabFieldSpec
|
||||
}
|
||||
|
||||
@ -41,8 +41,8 @@ func (e FstabEntry) GetFieldAtPosition(position common.Position) FstabFieldName
|
||||
if e.Fields.Freq != nil && e.Fields.Freq.ContainsPosition(position) {
|
||||
return FstabFieldFreq
|
||||
}
|
||||
if e.Fields.Pass != nil && e.Fields.Pass.ContainsPosition(position) {
|
||||
return FstabFieldPass
|
||||
if e.Fields.Fsck != nil && e.Fields.Fsck.ContainsPosition(position) {
|
||||
return FstabFieldFsck
|
||||
}
|
||||
|
||||
// Okay let's try to fetch the field by assuming the user is typing from left to right normally
|
||||
@ -63,8 +63,8 @@ func (e FstabEntry) GetFieldAtPosition(position common.Position) FstabFieldName
|
||||
return FstabFieldFreq
|
||||
}
|
||||
|
||||
if e.Fields.Freq != nil && e.Fields.Freq.IsPositionAfterEnd(position) && (e.Fields.Pass == nil || e.Fields.Pass.IsPositionBeforeEnd(position)) {
|
||||
return FstabFieldPass
|
||||
if e.Fields.Freq != nil && e.Fields.Freq.IsPositionAfterEnd(position) && (e.Fields.Fsck == nil || e.Fields.Fsck.IsPositionBeforeEnd(position)) {
|
||||
return FstabFieldFsck
|
||||
}
|
||||
|
||||
// Okay shit no idea, let's just give whatever is missing
|
||||
@ -89,7 +89,7 @@ func (e FstabEntry) GetFieldAtPosition(position common.Position) FstabFieldName
|
||||
return FstabFieldFreq
|
||||
}
|
||||
|
||||
return FstabFieldPass
|
||||
return FstabFieldFsck
|
||||
}
|
||||
|
||||
// LABEL=test /mnt/test btrfs subvol=backup,fat=32 [0] [0]
|
||||
@ -122,7 +122,7 @@ func (e FstabEntry) getDefinedFieldsAmount() uint8 {
|
||||
if e.Fields.Freq != nil {
|
||||
definedAmount++
|
||||
}
|
||||
if e.Fields.Pass != nil {
|
||||
if e.Fields.Fsck != nil {
|
||||
definedAmount++
|
||||
}
|
||||
|
||||
@ -141,8 +141,8 @@ func (e FstabEntry) FetchMountOptionsField(includeDefaults bool) docvalues.Depre
|
||||
return nil
|
||||
}
|
||||
|
||||
var enums []docvalues.EnumString
|
||||
var assignable map[docvalues.EnumString]docvalues.DeprecatedValue
|
||||
var enums []docvalues.EnumString = make([]docvalues.EnumString, 0)
|
||||
var assignable map[docvalues.EnumString]docvalues.DeprecatedValue = make(map[docvalues.EnumString]docvalues.DeprecatedValue, 0)
|
||||
|
||||
if includeDefaults {
|
||||
enums = append(option.Enums, fields.DefaultOptions...)
|
||||
|
@ -128,7 +128,7 @@ func (s *fstabParserListener) EnterPass(ctx *parser.PassContext) {
|
||||
text := ctx.GetText()
|
||||
value := commonparser.ParseRawString(text, commonparser.FullFeatures)
|
||||
|
||||
s.fstabContext.currentEntry.Fields.Pass = &FstabField{
|
||||
s.fstabContext.currentEntry.Fields.Fsck = &FstabField{
|
||||
LocationRange: location,
|
||||
Value: value,
|
||||
}
|
||||
|
@ -161,7 +161,7 @@ func (c *FstabConfig) parseStatement(
|
||||
// FilesystemType: filesystemType,
|
||||
// Options: options,
|
||||
// Freq: freq,
|
||||
// Pass: pass,
|
||||
// Fsck: pass,
|
||||
// },
|
||||
// }
|
||||
//
|
||||
|
@ -5,10 +5,6 @@ null
|
||||
'#'
|
||||
null
|
||||
null
|
||||
null
|
||||
null
|
||||
null
|
||||
null
|
||||
|
||||
token symbolic names:
|
||||
null
|
||||
@ -17,10 +13,6 @@ WHITESPACE
|
||||
HASH
|
||||
STRING
|
||||
QUOTED_STRING
|
||||
ADFS
|
||||
AFFS
|
||||
BTRFS
|
||||
EXFAT
|
||||
|
||||
rule names:
|
||||
entry
|
||||
@ -33,4 +25,4 @@ pass
|
||||
|
||||
|
||||
atn:
|
||||
[4, 1, 9, 68, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 1, 0, 3, 0, 16, 8, 0, 1, 0, 3, 0, 19, 8, 0, 1, 0, 3, 0, 22, 8, 0, 1, 0, 3, 0, 25, 8, 0, 1, 0, 3, 0, 28, 8, 0, 1, 0, 3, 0, 31, 8, 0, 1, 0, 3, 0, 34, 8, 0, 1, 0, 3, 0, 37, 8, 0, 1, 0, 3, 0, 40, 8, 0, 1, 0, 3, 0, 43, 8, 0, 1, 0, 3, 0, 46, 8, 0, 1, 0, 3, 0, 49, 8, 0, 1, 0, 3, 0, 52, 8, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 0, 0, 7, 0, 2, 4, 6, 8, 10, 12, 0, 2, 1, 0, 4, 5, 1, 0, 4, 9, 73, 0, 15, 1, 0, 0, 0, 2, 55, 1, 0, 0, 0, 4, 57, 1, 0, 0, 0, 6, 59, 1, 0, 0, 0, 8, 61, 1, 0, 0, 0, 10, 63, 1, 0, 0, 0, 12, 65, 1, 0, 0, 0, 14, 16, 5, 2, 0, 0, 15, 14, 1, 0, 0, 0, 15, 16, 1, 0, 0, 0, 16, 18, 1, 0, 0, 0, 17, 19, 3, 2, 1, 0, 18, 17, 1, 0, 0, 0, 18, 19, 1, 0, 0, 0, 19, 21, 1, 0, 0, 0, 20, 22, 5, 2, 0, 0, 21, 20, 1, 0, 0, 0, 21, 22, 1, 0, 0, 0, 22, 24, 1, 0, 0, 0, 23, 25, 3, 4, 2, 0, 24, 23, 1, 0, 0, 0, 24, 25, 1, 0, 0, 0, 25, 27, 1, 0, 0, 0, 26, 28, 5, 2, 0, 0, 27, 26, 1, 0, 0, 0, 27, 28, 1, 0, 0, 0, 28, 30, 1, 0, 0, 0, 29, 31, 3, 6, 3, 0, 30, 29, 1, 0, 0, 0, 30, 31, 1, 0, 0, 0, 31, 33, 1, 0, 0, 0, 32, 34, 5, 2, 0, 0, 33, 32, 1, 0, 0, 0, 33, 34, 1, 0, 0, 0, 34, 36, 1, 0, 0, 0, 35, 37, 3, 8, 4, 0, 36, 35, 1, 0, 0, 0, 36, 37, 1, 0, 0, 0, 37, 39, 1, 0, 0, 0, 38, 40, 5, 2, 0, 0, 39, 38, 1, 0, 0, 0, 39, 40, 1, 0, 0, 0, 40, 42, 1, 0, 0, 0, 41, 43, 3, 10, 5, 0, 42, 41, 1, 0, 0, 0, 42, 43, 1, 0, 0, 0, 43, 45, 1, 0, 0, 0, 44, 46, 5, 2, 0, 0, 45, 44, 1, 0, 0, 0, 45, 46, 1, 0, 0, 0, 46, 48, 1, 0, 0, 0, 47, 49, 3, 12, 6, 0, 48, 47, 1, 0, 0, 0, 48, 49, 1, 0, 0, 0, 49, 51, 1, 0, 0, 0, 50, 52, 5, 2, 0, 0, 51, 50, 1, 0, 0, 0, 51, 52, 1, 0, 0, 0, 52, 53, 1, 0, 0, 0, 53, 54, 5, 0, 0, 1, 54, 1, 1, 0, 0, 0, 55, 56, 7, 0, 0, 0, 56, 3, 1, 0, 0, 0, 57, 58, 7, 0, 0, 0, 58, 5, 1, 0, 0, 0, 59, 60, 7, 1, 0, 0, 60, 7, 1, 0, 0, 0, 61, 62, 7, 0, 0, 0, 62, 9, 1, 0, 0, 0, 63, 64, 5, 1, 0, 0, 64, 11, 1, 0, 0, 0, 65, 66, 5, 1, 0, 0, 66, 13, 1, 0, 0, 0, 13, 15, 18, 21, 24, 27, 30, 33, 36, 39, 42, 45, 48, 51]
|
||||
[4, 1, 5, 68, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 1, 0, 3, 0, 16, 8, 0, 1, 0, 3, 0, 19, 8, 0, 1, 0, 3, 0, 22, 8, 0, 1, 0, 3, 0, 25, 8, 0, 1, 0, 3, 0, 28, 8, 0, 1, 0, 3, 0, 31, 8, 0, 1, 0, 3, 0, 34, 8, 0, 1, 0, 3, 0, 37, 8, 0, 1, 0, 3, 0, 40, 8, 0, 1, 0, 3, 0, 43, 8, 0, 1, 0, 3, 0, 46, 8, 0, 1, 0, 3, 0, 49, 8, 0, 1, 0, 3, 0, 52, 8, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 0, 0, 7, 0, 2, 4, 6, 8, 10, 12, 0, 1, 1, 0, 4, 5, 73, 0, 15, 1, 0, 0, 0, 2, 55, 1, 0, 0, 0, 4, 57, 1, 0, 0, 0, 6, 59, 1, 0, 0, 0, 8, 61, 1, 0, 0, 0, 10, 63, 1, 0, 0, 0, 12, 65, 1, 0, 0, 0, 14, 16, 5, 2, 0, 0, 15, 14, 1, 0, 0, 0, 15, 16, 1, 0, 0, 0, 16, 18, 1, 0, 0, 0, 17, 19, 3, 2, 1, 0, 18, 17, 1, 0, 0, 0, 18, 19, 1, 0, 0, 0, 19, 21, 1, 0, 0, 0, 20, 22, 5, 2, 0, 0, 21, 20, 1, 0, 0, 0, 21, 22, 1, 0, 0, 0, 22, 24, 1, 0, 0, 0, 23, 25, 3, 4, 2, 0, 24, 23, 1, 0, 0, 0, 24, 25, 1, 0, 0, 0, 25, 27, 1, 0, 0, 0, 26, 28, 5, 2, 0, 0, 27, 26, 1, 0, 0, 0, 27, 28, 1, 0, 0, 0, 28, 30, 1, 0, 0, 0, 29, 31, 3, 6, 3, 0, 30, 29, 1, 0, 0, 0, 30, 31, 1, 0, 0, 0, 31, 33, 1, 0, 0, 0, 32, 34, 5, 2, 0, 0, 33, 32, 1, 0, 0, 0, 33, 34, 1, 0, 0, 0, 34, 36, 1, 0, 0, 0, 35, 37, 3, 8, 4, 0, 36, 35, 1, 0, 0, 0, 36, 37, 1, 0, 0, 0, 37, 39, 1, 0, 0, 0, 38, 40, 5, 2, 0, 0, 39, 38, 1, 0, 0, 0, 39, 40, 1, 0, 0, 0, 40, 42, 1, 0, 0, 0, 41, 43, 3, 10, 5, 0, 42, 41, 1, 0, 0, 0, 42, 43, 1, 0, 0, 0, 43, 45, 1, 0, 0, 0, 44, 46, 5, 2, 0, 0, 45, 44, 1, 0, 0, 0, 45, 46, 1, 0, 0, 0, 46, 48, 1, 0, 0, 0, 47, 49, 3, 12, 6, 0, 48, 47, 1, 0, 0, 0, 48, 49, 1, 0, 0, 0, 49, 51, 1, 0, 0, 0, 50, 52, 5, 2, 0, 0, 51, 50, 1, 0, 0, 0, 51, 52, 1, 0, 0, 0, 52, 53, 1, 0, 0, 0, 53, 54, 5, 0, 0, 1, 54, 1, 1, 0, 0, 0, 55, 56, 7, 0, 0, 0, 56, 3, 1, 0, 0, 0, 57, 58, 7, 0, 0, 0, 58, 5, 1, 0, 0, 0, 59, 60, 7, 0, 0, 0, 60, 7, 1, 0, 0, 0, 61, 62, 7, 0, 0, 0, 62, 9, 1, 0, 0, 0, 63, 64, 5, 1, 0, 0, 64, 11, 1, 0, 0, 0, 65, 66, 5, 1, 0, 0, 66, 13, 1, 0, 0, 0, 13, 15, 18, 21, 24, 27, 30, 33, 36, 39, 42, 45, 48, 51]
|
@ -3,8 +3,4 @@ WHITESPACE=2
|
||||
HASH=3
|
||||
STRING=4
|
||||
QUOTED_STRING=5
|
||||
ADFS=6
|
||||
AFFS=7
|
||||
BTRFS=8
|
||||
EXFAT=9
|
||||
'#'=3
|
||||
|
@ -5,10 +5,6 @@ null
|
||||
'#'
|
||||
null
|
||||
null
|
||||
null
|
||||
null
|
||||
null
|
||||
null
|
||||
|
||||
token symbolic names:
|
||||
null
|
||||
@ -17,10 +13,6 @@ WHITESPACE
|
||||
HASH
|
||||
STRING
|
||||
QUOTED_STRING
|
||||
ADFS
|
||||
AFFS
|
||||
BTRFS
|
||||
EXFAT
|
||||
|
||||
rule names:
|
||||
DIGITS
|
||||
@ -28,10 +20,6 @@ WHITESPACE
|
||||
HASH
|
||||
STRING
|
||||
QUOTED_STRING
|
||||
ADFS
|
||||
AFFS
|
||||
BTRFS
|
||||
EXFAT
|
||||
|
||||
channel names:
|
||||
DEFAULT_TOKEN_CHANNEL
|
||||
@ -41,4 +29,4 @@ mode names:
|
||||
DEFAULT_MODE
|
||||
|
||||
atn:
|
||||
[4, 0, 9, 76, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 1, 0, 4, 0, 21, 8, 0, 11, 0, 12, 0, 22, 1, 1, 4, 1, 26, 8, 1, 11, 1, 12, 1, 27, 1, 2, 1, 2, 1, 3, 4, 3, 33, 8, 3, 11, 3, 12, 3, 34, 1, 4, 1, 4, 3, 4, 39, 8, 4, 1, 4, 1, 4, 1, 4, 5, 4, 44, 8, 4, 10, 4, 12, 4, 47, 9, 4, 1, 4, 3, 4, 50, 8, 4, 1, 4, 3, 4, 53, 8, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 0, 0, 9, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 1, 0, 12, 1, 0, 48, 57, 2, 0, 9, 9, 32, 32, 3, 0, 9, 9, 32, 32, 35, 35, 2, 0, 65, 65, 97, 97, 2, 0, 68, 68, 100, 100, 2, 0, 70, 70, 102, 102, 2, 0, 83, 83, 115, 115, 2, 0, 66, 66, 98, 98, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 69, 69, 101, 101, 2, 0, 88, 88, 120, 120, 82, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 1, 20, 1, 0, 0, 0, 3, 25, 1, 0, 0, 0, 5, 29, 1, 0, 0, 0, 7, 32, 1, 0, 0, 0, 9, 36, 1, 0, 0, 0, 11, 54, 1, 0, 0, 0, 13, 59, 1, 0, 0, 0, 15, 64, 1, 0, 0, 0, 17, 70, 1, 0, 0, 0, 19, 21, 7, 0, 0, 0, 20, 19, 1, 0, 0, 0, 21, 22, 1, 0, 0, 0, 22, 20, 1, 0, 0, 0, 22, 23, 1, 0, 0, 0, 23, 2, 1, 0, 0, 0, 24, 26, 7, 1, 0, 0, 25, 24, 1, 0, 0, 0, 26, 27, 1, 0, 0, 0, 27, 25, 1, 0, 0, 0, 27, 28, 1, 0, 0, 0, 28, 4, 1, 0, 0, 0, 29, 30, 5, 35, 0, 0, 30, 6, 1, 0, 0, 0, 31, 33, 8, 2, 0, 0, 32, 31, 1, 0, 0, 0, 33, 34, 1, 0, 0, 0, 34, 32, 1, 0, 0, 0, 34, 35, 1, 0, 0, 0, 35, 8, 1, 0, 0, 0, 36, 38, 5, 34, 0, 0, 37, 39, 3, 3, 1, 0, 38, 37, 1, 0, 0, 0, 38, 39, 1, 0, 0, 0, 39, 45, 1, 0, 0, 0, 40, 41, 3, 7, 3, 0, 41, 42, 3, 3, 1, 0, 42, 44, 1, 0, 0, 0, 43, 40, 1, 0, 0, 0, 44, 47, 1, 0, 0, 0, 45, 43, 1, 0, 0, 0, 45, 46, 1, 0, 0, 0, 46, 49, 1, 0, 0, 0, 47, 45, 1, 0, 0, 0, 48, 50, 3, 7, 3, 0, 49, 48, 1, 0, 0, 0, 49, 50, 1, 0, 0, 0, 50, 52, 1, 0, 0, 0, 51, 53, 5, 34, 0, 0, 52, 51, 1, 0, 0, 0, 52, 53, 1, 0, 0, 0, 53, 10, 1, 0, 0, 0, 54, 55, 7, 3, 0, 0, 55, 56, 7, 4, 0, 0, 56, 57, 7, 5, 0, 0, 57, 58, 7, 6, 0, 0, 58, 12, 1, 0, 0, 0, 59, 60, 7, 3, 0, 0, 60, 61, 7, 5, 0, 0, 61, 62, 7, 5, 0, 0, 62, 63, 7, 6, 0, 0, 63, 14, 1, 0, 0, 0, 64, 65, 7, 7, 0, 0, 65, 66, 7, 8, 0, 0, 66, 67, 7, 9, 0, 0, 67, 68, 7, 5, 0, 0, 68, 69, 7, 6, 0, 0, 69, 16, 1, 0, 0, 0, 70, 71, 7, 10, 0, 0, 71, 72, 7, 11, 0, 0, 72, 73, 7, 5, 0, 0, 73, 74, 7, 3, 0, 0, 74, 75, 7, 8, 0, 0, 75, 18, 1, 0, 0, 0, 8, 0, 22, 27, 34, 38, 45, 49, 52, 0]
|
||||
[4, 0, 5, 46, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 1, 0, 4, 0, 13, 8, 0, 11, 0, 12, 0, 14, 1, 1, 4, 1, 18, 8, 1, 11, 1, 12, 1, 19, 1, 2, 1, 2, 1, 3, 4, 3, 25, 8, 3, 11, 3, 12, 3, 26, 1, 4, 1, 4, 3, 4, 31, 8, 4, 1, 4, 1, 4, 1, 4, 5, 4, 36, 8, 4, 10, 4, 12, 4, 39, 9, 4, 1, 4, 3, 4, 42, 8, 4, 1, 4, 3, 4, 45, 8, 4, 0, 0, 5, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 1, 0, 3, 1, 0, 48, 57, 2, 0, 9, 9, 32, 32, 3, 0, 9, 9, 32, 32, 35, 35, 52, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 1, 12, 1, 0, 0, 0, 3, 17, 1, 0, 0, 0, 5, 21, 1, 0, 0, 0, 7, 24, 1, 0, 0, 0, 9, 28, 1, 0, 0, 0, 11, 13, 7, 0, 0, 0, 12, 11, 1, 0, 0, 0, 13, 14, 1, 0, 0, 0, 14, 12, 1, 0, 0, 0, 14, 15, 1, 0, 0, 0, 15, 2, 1, 0, 0, 0, 16, 18, 7, 1, 0, 0, 17, 16, 1, 0, 0, 0, 18, 19, 1, 0, 0, 0, 19, 17, 1, 0, 0, 0, 19, 20, 1, 0, 0, 0, 20, 4, 1, 0, 0, 0, 21, 22, 5, 35, 0, 0, 22, 6, 1, 0, 0, 0, 23, 25, 8, 2, 0, 0, 24, 23, 1, 0, 0, 0, 25, 26, 1, 0, 0, 0, 26, 24, 1, 0, 0, 0, 26, 27, 1, 0, 0, 0, 27, 8, 1, 0, 0, 0, 28, 30, 5, 34, 0, 0, 29, 31, 3, 3, 1, 0, 30, 29, 1, 0, 0, 0, 30, 31, 1, 0, 0, 0, 31, 37, 1, 0, 0, 0, 32, 33, 3, 7, 3, 0, 33, 34, 3, 3, 1, 0, 34, 36, 1, 0, 0, 0, 35, 32, 1, 0, 0, 0, 36, 39, 1, 0, 0, 0, 37, 35, 1, 0, 0, 0, 37, 38, 1, 0, 0, 0, 38, 41, 1, 0, 0, 0, 39, 37, 1, 0, 0, 0, 40, 42, 3, 7, 3, 0, 41, 40, 1, 0, 0, 0, 41, 42, 1, 0, 0, 0, 42, 44, 1, 0, 0, 0, 43, 45, 5, 34, 0, 0, 44, 43, 1, 0, 0, 0, 44, 45, 1, 0, 0, 0, 45, 10, 1, 0, 0, 0, 8, 0, 14, 19, 26, 30, 37, 41, 44, 0]
|
@ -3,8 +3,4 @@ WHITESPACE=2
|
||||
HASH=3
|
||||
STRING=4
|
||||
QUOTED_STRING=5
|
||||
ADFS=6
|
||||
AFFS=7
|
||||
BTRFS=8
|
||||
EXFAT=9
|
||||
'#'=3
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Code generated from Fstab.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
||||
// Code generated from Fstab.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser // Fstab
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Code generated from Fstab.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
||||
// Code generated from Fstab.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser
|
||||
|
||||
@ -46,51 +46,34 @@ func fstablexerLexerInit() {
|
||||
"", "", "", "'#'",
|
||||
}
|
||||
staticData.SymbolicNames = []string{
|
||||
"", "DIGITS", "WHITESPACE", "HASH", "STRING", "QUOTED_STRING", "ADFS",
|
||||
"AFFS", "BTRFS", "EXFAT",
|
||||
"", "DIGITS", "WHITESPACE", "HASH", "STRING", "QUOTED_STRING",
|
||||
}
|
||||
staticData.RuleNames = []string{
|
||||
"DIGITS", "WHITESPACE", "HASH", "STRING", "QUOTED_STRING", "ADFS", "AFFS",
|
||||
"BTRFS", "EXFAT",
|
||||
"DIGITS", "WHITESPACE", "HASH", "STRING", "QUOTED_STRING",
|
||||
}
|
||||
staticData.PredictionContextCache = antlr.NewPredictionContextCache()
|
||||
staticData.serializedATN = []int32{
|
||||
4, 0, 9, 76, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2,
|
||||
4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 1, 0, 4, 0, 21,
|
||||
8, 0, 11, 0, 12, 0, 22, 1, 1, 4, 1, 26, 8, 1, 11, 1, 12, 1, 27, 1, 2, 1,
|
||||
2, 1, 3, 4, 3, 33, 8, 3, 11, 3, 12, 3, 34, 1, 4, 1, 4, 3, 4, 39, 8, 4,
|
||||
1, 4, 1, 4, 1, 4, 5, 4, 44, 8, 4, 10, 4, 12, 4, 47, 9, 4, 1, 4, 3, 4, 50,
|
||||
8, 4, 1, 4, 3, 4, 53, 8, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1,
|
||||
6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1,
|
||||
8, 1, 8, 1, 8, 0, 0, 9, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15,
|
||||
8, 17, 9, 1, 0, 12, 1, 0, 48, 57, 2, 0, 9, 9, 32, 32, 3, 0, 9, 9, 32, 32,
|
||||
35, 35, 2, 0, 65, 65, 97, 97, 2, 0, 68, 68, 100, 100, 2, 0, 70, 70, 102,
|
||||
102, 2, 0, 83, 83, 115, 115, 2, 0, 66, 66, 98, 98, 2, 0, 84, 84, 116, 116,
|
||||
2, 0, 82, 82, 114, 114, 2, 0, 69, 69, 101, 101, 2, 0, 88, 88, 120, 120,
|
||||
82, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0,
|
||||
0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0,
|
||||
0, 0, 0, 17, 1, 0, 0, 0, 1, 20, 1, 0, 0, 0, 3, 25, 1, 0, 0, 0, 5, 29, 1,
|
||||
0, 0, 0, 7, 32, 1, 0, 0, 0, 9, 36, 1, 0, 0, 0, 11, 54, 1, 0, 0, 0, 13,
|
||||
59, 1, 0, 0, 0, 15, 64, 1, 0, 0, 0, 17, 70, 1, 0, 0, 0, 19, 21, 7, 0, 0,
|
||||
0, 20, 19, 1, 0, 0, 0, 21, 22, 1, 0, 0, 0, 22, 20, 1, 0, 0, 0, 22, 23,
|
||||
1, 0, 0, 0, 23, 2, 1, 0, 0, 0, 24, 26, 7, 1, 0, 0, 25, 24, 1, 0, 0, 0,
|
||||
26, 27, 1, 0, 0, 0, 27, 25, 1, 0, 0, 0, 27, 28, 1, 0, 0, 0, 28, 4, 1, 0,
|
||||
0, 0, 29, 30, 5, 35, 0, 0, 30, 6, 1, 0, 0, 0, 31, 33, 8, 2, 0, 0, 32, 31,
|
||||
1, 0, 0, 0, 33, 34, 1, 0, 0, 0, 34, 32, 1, 0, 0, 0, 34, 35, 1, 0, 0, 0,
|
||||
35, 8, 1, 0, 0, 0, 36, 38, 5, 34, 0, 0, 37, 39, 3, 3, 1, 0, 38, 37, 1,
|
||||
0, 0, 0, 38, 39, 1, 0, 0, 0, 39, 45, 1, 0, 0, 0, 40, 41, 3, 7, 3, 0, 41,
|
||||
42, 3, 3, 1, 0, 42, 44, 1, 0, 0, 0, 43, 40, 1, 0, 0, 0, 44, 47, 1, 0, 0,
|
||||
0, 45, 43, 1, 0, 0, 0, 45, 46, 1, 0, 0, 0, 46, 49, 1, 0, 0, 0, 47, 45,
|
||||
1, 0, 0, 0, 48, 50, 3, 7, 3, 0, 49, 48, 1, 0, 0, 0, 49, 50, 1, 0, 0, 0,
|
||||
50, 52, 1, 0, 0, 0, 51, 53, 5, 34, 0, 0, 52, 51, 1, 0, 0, 0, 52, 53, 1,
|
||||
0, 0, 0, 53, 10, 1, 0, 0, 0, 54, 55, 7, 3, 0, 0, 55, 56, 7, 4, 0, 0, 56,
|
||||
57, 7, 5, 0, 0, 57, 58, 7, 6, 0, 0, 58, 12, 1, 0, 0, 0, 59, 60, 7, 3, 0,
|
||||
0, 60, 61, 7, 5, 0, 0, 61, 62, 7, 5, 0, 0, 62, 63, 7, 6, 0, 0, 63, 14,
|
||||
1, 0, 0, 0, 64, 65, 7, 7, 0, 0, 65, 66, 7, 8, 0, 0, 66, 67, 7, 9, 0, 0,
|
||||
67, 68, 7, 5, 0, 0, 68, 69, 7, 6, 0, 0, 69, 16, 1, 0, 0, 0, 70, 71, 7,
|
||||
10, 0, 0, 71, 72, 7, 11, 0, 0, 72, 73, 7, 5, 0, 0, 73, 74, 7, 3, 0, 0,
|
||||
74, 75, 7, 8, 0, 0, 75, 18, 1, 0, 0, 0, 8, 0, 22, 27, 34, 38, 45, 49, 52,
|
||||
0,
|
||||
4, 0, 5, 46, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2,
|
||||
4, 7, 4, 1, 0, 4, 0, 13, 8, 0, 11, 0, 12, 0, 14, 1, 1, 4, 1, 18, 8, 1,
|
||||
11, 1, 12, 1, 19, 1, 2, 1, 2, 1, 3, 4, 3, 25, 8, 3, 11, 3, 12, 3, 26, 1,
|
||||
4, 1, 4, 3, 4, 31, 8, 4, 1, 4, 1, 4, 1, 4, 5, 4, 36, 8, 4, 10, 4, 12, 4,
|
||||
39, 9, 4, 1, 4, 3, 4, 42, 8, 4, 1, 4, 3, 4, 45, 8, 4, 0, 0, 5, 1, 1, 3,
|
||||
2, 5, 3, 7, 4, 9, 5, 1, 0, 3, 1, 0, 48, 57, 2, 0, 9, 9, 32, 32, 3, 0, 9,
|
||||
9, 32, 32, 35, 35, 52, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0,
|
||||
0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 1, 12, 1, 0, 0, 0, 3, 17, 1,
|
||||
0, 0, 0, 5, 21, 1, 0, 0, 0, 7, 24, 1, 0, 0, 0, 9, 28, 1, 0, 0, 0, 11, 13,
|
||||
7, 0, 0, 0, 12, 11, 1, 0, 0, 0, 13, 14, 1, 0, 0, 0, 14, 12, 1, 0, 0, 0,
|
||||
14, 15, 1, 0, 0, 0, 15, 2, 1, 0, 0, 0, 16, 18, 7, 1, 0, 0, 17, 16, 1, 0,
|
||||
0, 0, 18, 19, 1, 0, 0, 0, 19, 17, 1, 0, 0, 0, 19, 20, 1, 0, 0, 0, 20, 4,
|
||||
1, 0, 0, 0, 21, 22, 5, 35, 0, 0, 22, 6, 1, 0, 0, 0, 23, 25, 8, 2, 0, 0,
|
||||
24, 23, 1, 0, 0, 0, 25, 26, 1, 0, 0, 0, 26, 24, 1, 0, 0, 0, 26, 27, 1,
|
||||
0, 0, 0, 27, 8, 1, 0, 0, 0, 28, 30, 5, 34, 0, 0, 29, 31, 3, 3, 1, 0, 30,
|
||||
29, 1, 0, 0, 0, 30, 31, 1, 0, 0, 0, 31, 37, 1, 0, 0, 0, 32, 33, 3, 7, 3,
|
||||
0, 33, 34, 3, 3, 1, 0, 34, 36, 1, 0, 0, 0, 35, 32, 1, 0, 0, 0, 36, 39,
|
||||
1, 0, 0, 0, 37, 35, 1, 0, 0, 0, 37, 38, 1, 0, 0, 0, 38, 41, 1, 0, 0, 0,
|
||||
39, 37, 1, 0, 0, 0, 40, 42, 3, 7, 3, 0, 41, 40, 1, 0, 0, 0, 41, 42, 1,
|
||||
0, 0, 0, 42, 44, 1, 0, 0, 0, 43, 45, 5, 34, 0, 0, 44, 43, 1, 0, 0, 0, 44,
|
||||
45, 1, 0, 0, 0, 45, 10, 1, 0, 0, 0, 8, 0, 14, 19, 26, 30, 37, 41, 44, 0,
|
||||
}
|
||||
deserializer := antlr.NewATNDeserializer(nil)
|
||||
staticData.atn = deserializer.Deserialize(staticData.serializedATN)
|
||||
@ -136,8 +119,4 @@ const (
|
||||
FstabLexerHASH = 3
|
||||
FstabLexerSTRING = 4
|
||||
FstabLexerQUOTED_STRING = 5
|
||||
FstabLexerADFS = 6
|
||||
FstabLexerAFFS = 7
|
||||
FstabLexerBTRFS = 8
|
||||
FstabLexerEXFAT = 9
|
||||
)
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Code generated from Fstab.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
||||
// Code generated from Fstab.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser // Fstab
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Code generated from Fstab.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
||||
// Code generated from Fstab.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser // Fstab
|
||||
|
||||
@ -36,8 +36,7 @@ func fstabParserInit() {
|
||||
"", "", "", "'#'",
|
||||
}
|
||||
staticData.SymbolicNames = []string{
|
||||
"", "DIGITS", "WHITESPACE", "HASH", "STRING", "QUOTED_STRING", "ADFS",
|
||||
"AFFS", "BTRFS", "EXFAT",
|
||||
"", "DIGITS", "WHITESPACE", "HASH", "STRING", "QUOTED_STRING",
|
||||
}
|
||||
staticData.RuleNames = []string{
|
||||
"entry", "spec", "mountPoint", "fileSystem", "mountOptions", "freq",
|
||||
@ -45,35 +44,35 @@ func fstabParserInit() {
|
||||
}
|
||||
staticData.PredictionContextCache = antlr.NewPredictionContextCache()
|
||||
staticData.serializedATN = []int32{
|
||||
4, 1, 9, 68, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4,
|
||||
4, 1, 5, 68, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4,
|
||||
2, 5, 7, 5, 2, 6, 7, 6, 1, 0, 3, 0, 16, 8, 0, 1, 0, 3, 0, 19, 8, 0, 1,
|
||||
0, 3, 0, 22, 8, 0, 1, 0, 3, 0, 25, 8, 0, 1, 0, 3, 0, 28, 8, 0, 1, 0, 3,
|
||||
0, 31, 8, 0, 1, 0, 3, 0, 34, 8, 0, 1, 0, 3, 0, 37, 8, 0, 1, 0, 3, 0, 40,
|
||||
8, 0, 1, 0, 3, 0, 43, 8, 0, 1, 0, 3, 0, 46, 8, 0, 1, 0, 3, 0, 49, 8, 0,
|
||||
1, 0, 3, 0, 52, 8, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1,
|
||||
4, 1, 4, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 0, 0, 7, 0, 2, 4, 6, 8, 10, 12,
|
||||
0, 2, 1, 0, 4, 5, 1, 0, 4, 9, 73, 0, 15, 1, 0, 0, 0, 2, 55, 1, 0, 0, 0,
|
||||
4, 57, 1, 0, 0, 0, 6, 59, 1, 0, 0, 0, 8, 61, 1, 0, 0, 0, 10, 63, 1, 0,
|
||||
0, 0, 12, 65, 1, 0, 0, 0, 14, 16, 5, 2, 0, 0, 15, 14, 1, 0, 0, 0, 15, 16,
|
||||
1, 0, 0, 0, 16, 18, 1, 0, 0, 0, 17, 19, 3, 2, 1, 0, 18, 17, 1, 0, 0, 0,
|
||||
18, 19, 1, 0, 0, 0, 19, 21, 1, 0, 0, 0, 20, 22, 5, 2, 0, 0, 21, 20, 1,
|
||||
0, 0, 0, 21, 22, 1, 0, 0, 0, 22, 24, 1, 0, 0, 0, 23, 25, 3, 4, 2, 0, 24,
|
||||
23, 1, 0, 0, 0, 24, 25, 1, 0, 0, 0, 25, 27, 1, 0, 0, 0, 26, 28, 5, 2, 0,
|
||||
0, 27, 26, 1, 0, 0, 0, 27, 28, 1, 0, 0, 0, 28, 30, 1, 0, 0, 0, 29, 31,
|
||||
3, 6, 3, 0, 30, 29, 1, 0, 0, 0, 30, 31, 1, 0, 0, 0, 31, 33, 1, 0, 0, 0,
|
||||
32, 34, 5, 2, 0, 0, 33, 32, 1, 0, 0, 0, 33, 34, 1, 0, 0, 0, 34, 36, 1,
|
||||
0, 0, 0, 35, 37, 3, 8, 4, 0, 36, 35, 1, 0, 0, 0, 36, 37, 1, 0, 0, 0, 37,
|
||||
39, 1, 0, 0, 0, 38, 40, 5, 2, 0, 0, 39, 38, 1, 0, 0, 0, 39, 40, 1, 0, 0,
|
||||
0, 40, 42, 1, 0, 0, 0, 41, 43, 3, 10, 5, 0, 42, 41, 1, 0, 0, 0, 42, 43,
|
||||
1, 0, 0, 0, 43, 45, 1, 0, 0, 0, 44, 46, 5, 2, 0, 0, 45, 44, 1, 0, 0, 0,
|
||||
45, 46, 1, 0, 0, 0, 46, 48, 1, 0, 0, 0, 47, 49, 3, 12, 6, 0, 48, 47, 1,
|
||||
0, 0, 0, 48, 49, 1, 0, 0, 0, 49, 51, 1, 0, 0, 0, 50, 52, 5, 2, 0, 0, 51,
|
||||
50, 1, 0, 0, 0, 51, 52, 1, 0, 0, 0, 52, 53, 1, 0, 0, 0, 53, 54, 5, 0, 0,
|
||||
1, 54, 1, 1, 0, 0, 0, 55, 56, 7, 0, 0, 0, 56, 3, 1, 0, 0, 0, 57, 58, 7,
|
||||
0, 0, 0, 58, 5, 1, 0, 0, 0, 59, 60, 7, 1, 0, 0, 60, 7, 1, 0, 0, 0, 61,
|
||||
62, 7, 0, 0, 0, 62, 9, 1, 0, 0, 0, 63, 64, 5, 1, 0, 0, 64, 11, 1, 0, 0,
|
||||
0, 65, 66, 5, 1, 0, 0, 66, 13, 1, 0, 0, 0, 13, 15, 18, 21, 24, 27, 30,
|
||||
33, 36, 39, 42, 45, 48, 51,
|
||||
0, 1, 1, 0, 4, 5, 73, 0, 15, 1, 0, 0, 0, 2, 55, 1, 0, 0, 0, 4, 57, 1, 0,
|
||||
0, 0, 6, 59, 1, 0, 0, 0, 8, 61, 1, 0, 0, 0, 10, 63, 1, 0, 0, 0, 12, 65,
|
||||
1, 0, 0, 0, 14, 16, 5, 2, 0, 0, 15, 14, 1, 0, 0, 0, 15, 16, 1, 0, 0, 0,
|
||||
16, 18, 1, 0, 0, 0, 17, 19, 3, 2, 1, 0, 18, 17, 1, 0, 0, 0, 18, 19, 1,
|
||||
0, 0, 0, 19, 21, 1, 0, 0, 0, 20, 22, 5, 2, 0, 0, 21, 20, 1, 0, 0, 0, 21,
|
||||
22, 1, 0, 0, 0, 22, 24, 1, 0, 0, 0, 23, 25, 3, 4, 2, 0, 24, 23, 1, 0, 0,
|
||||
0, 24, 25, 1, 0, 0, 0, 25, 27, 1, 0, 0, 0, 26, 28, 5, 2, 0, 0, 27, 26,
|
||||
1, 0, 0, 0, 27, 28, 1, 0, 0, 0, 28, 30, 1, 0, 0, 0, 29, 31, 3, 6, 3, 0,
|
||||
30, 29, 1, 0, 0, 0, 30, 31, 1, 0, 0, 0, 31, 33, 1, 0, 0, 0, 32, 34, 5,
|
||||
2, 0, 0, 33, 32, 1, 0, 0, 0, 33, 34, 1, 0, 0, 0, 34, 36, 1, 0, 0, 0, 35,
|
||||
37, 3, 8, 4, 0, 36, 35, 1, 0, 0, 0, 36, 37, 1, 0, 0, 0, 37, 39, 1, 0, 0,
|
||||
0, 38, 40, 5, 2, 0, 0, 39, 38, 1, 0, 0, 0, 39, 40, 1, 0, 0, 0, 40, 42,
|
||||
1, 0, 0, 0, 41, 43, 3, 10, 5, 0, 42, 41, 1, 0, 0, 0, 42, 43, 1, 0, 0, 0,
|
||||
43, 45, 1, 0, 0, 0, 44, 46, 5, 2, 0, 0, 45, 44, 1, 0, 0, 0, 45, 46, 1,
|
||||
0, 0, 0, 46, 48, 1, 0, 0, 0, 47, 49, 3, 12, 6, 0, 48, 47, 1, 0, 0, 0, 48,
|
||||
49, 1, 0, 0, 0, 49, 51, 1, 0, 0, 0, 50, 52, 5, 2, 0, 0, 51, 50, 1, 0, 0,
|
||||
0, 51, 52, 1, 0, 0, 0, 52, 53, 1, 0, 0, 0, 53, 54, 5, 0, 0, 1, 54, 1, 1,
|
||||
0, 0, 0, 55, 56, 7, 0, 0, 0, 56, 3, 1, 0, 0, 0, 57, 58, 7, 0, 0, 0, 58,
|
||||
5, 1, 0, 0, 0, 59, 60, 7, 0, 0, 0, 60, 7, 1, 0, 0, 0, 61, 62, 7, 0, 0,
|
||||
0, 62, 9, 1, 0, 0, 0, 63, 64, 5, 1, 0, 0, 64, 11, 1, 0, 0, 0, 65, 66, 5,
|
||||
1, 0, 0, 66, 13, 1, 0, 0, 0, 13, 15, 18, 21, 24, 27, 30, 33, 36, 39, 42,
|
||||
45, 48, 51,
|
||||
}
|
||||
deserializer := antlr.NewATNDeserializer(nil)
|
||||
staticData.atn = deserializer.Deserialize(staticData.serializedATN)
|
||||
@ -117,10 +116,6 @@ const (
|
||||
FstabParserHASH = 3
|
||||
FstabParserSTRING = 4
|
||||
FstabParserQUOTED_STRING = 5
|
||||
FstabParserADFS = 6
|
||||
FstabParserAFFS = 7
|
||||
FstabParserBTRFS = 8
|
||||
FstabParserEXFAT = 9
|
||||
)
|
||||
|
||||
// FstabParser rules.
|
||||
@ -754,10 +749,6 @@ type IFileSystemContext interface {
|
||||
GetParser() antlr.Parser
|
||||
|
||||
// Getter signatures
|
||||
ADFS() antlr.TerminalNode
|
||||
AFFS() antlr.TerminalNode
|
||||
BTRFS() antlr.TerminalNode
|
||||
EXFAT() antlr.TerminalNode
|
||||
STRING() antlr.TerminalNode
|
||||
QUOTED_STRING() antlr.TerminalNode
|
||||
|
||||
@ -797,22 +788,6 @@ func NewFileSystemContext(parser antlr.Parser, parent antlr.ParserRuleContext, i
|
||||
|
||||
func (s *FileSystemContext) GetParser() antlr.Parser { return s.parser }
|
||||
|
||||
func (s *FileSystemContext) ADFS() antlr.TerminalNode {
|
||||
return s.GetToken(FstabParserADFS, 0)
|
||||
}
|
||||
|
||||
func (s *FileSystemContext) AFFS() antlr.TerminalNode {
|
||||
return s.GetToken(FstabParserAFFS, 0)
|
||||
}
|
||||
|
||||
func (s *FileSystemContext) BTRFS() antlr.TerminalNode {
|
||||
return s.GetToken(FstabParserBTRFS, 0)
|
||||
}
|
||||
|
||||
func (s *FileSystemContext) EXFAT() antlr.TerminalNode {
|
||||
return s.GetToken(FstabParserEXFAT, 0)
|
||||
}
|
||||
|
||||
func (s *FileSystemContext) STRING() antlr.TerminalNode {
|
||||
return s.GetToken(FstabParserSTRING, 0)
|
||||
}
|
||||
@ -851,7 +826,7 @@ func (p *FstabParser) FileSystem() (localctx IFileSystemContext) {
|
||||
p.SetState(59)
|
||||
_la = p.GetTokenStream().LA(1)
|
||||
|
||||
if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&1008) != 0) {
|
||||
if !(_la == FstabParserSTRING || _la == FstabParserQUOTED_STRING) {
|
||||
p.GetErrorHandler().RecoverInline(p)
|
||||
} else {
|
||||
p.GetErrorHandler().ReportMatch(p)
|
||||
|
@ -27,7 +27,7 @@ LABEL=example /mnt/example fat32 defaults 0 2
|
||||
|
||||
rawFirstEntry, _ := c.Entries.Get(uint32(0))
|
||||
firstEntry := rawFirstEntry.(*FstabEntry)
|
||||
if !(firstEntry.Fields.Spec.Value.Value == "LABEL=test" && firstEntry.Fields.MountPoint.Value.Value == "/mnt/test" && firstEntry.Fields.FilesystemType.Value.Value == "ext4" && firstEntry.Fields.Options.Value.Value == "defaults" && firstEntry.Fields.Freq.Value.Value == "0" && firstEntry.Fields.Pass.Value.Value == "0") {
|
||||
if !(firstEntry.Fields.Spec.Value.Value == "LABEL=test" && firstEntry.Fields.MountPoint.Value.Value == "/mnt/test" && firstEntry.Fields.FilesystemType.Value.Value == "ext4" && firstEntry.Fields.Options.Value.Value == "defaults" && firstEntry.Fields.Freq.Value.Value == "0" && firstEntry.Fields.Fsck.Value.Value == "0") {
|
||||
t.Fatalf("Expected entry to be LABEL=test /mnt/test ext4 defaults 0 0, got %v", firstEntry)
|
||||
}
|
||||
|
||||
@ -71,8 +71,8 @@ LABEL=example /mnt/example fat32 defaults 0 2
|
||||
t.Errorf("Expected freq end to be 0:36, got %v", firstEntry.Fields.Freq.LocationRange.End)
|
||||
}
|
||||
|
||||
if !(firstEntry.Fields.Pass.LocationRange.Start.Line == 0 && firstEntry.Fields.Pass.LocationRange.Start.Character == 37) {
|
||||
t.Errorf("Expected pass start to be 0:37, got %v", firstEntry.Fields.Pass.LocationRange.Start)
|
||||
if !(firstEntry.Fields.Fsck.LocationRange.Start.Line == 0 && firstEntry.Fields.Fsck.LocationRange.Start.Character == 37) {
|
||||
t.Errorf("Expected pass start to be 0:37, got %v", firstEntry.Fields.Fsck.LocationRange.Start)
|
||||
}
|
||||
|
||||
field := firstEntry.GetFieldAtPosition(common.IndexPosition(0))
|
||||
|
40
server/handlers/fstab/fields/fsck.go
Normal file
40
server/handlers/fstab/fields/fsck.go
Normal file
@ -0,0 +1,40 @@
|
||||
package fields
|
||||
|
||||
import docvalues "config-lsp/doc-values"
|
||||
|
||||
var FsckField = docvalues.EnumValue{
|
||||
EnforceValues: false,
|
||||
Values: []docvalues.EnumString{
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"0",
|
||||
"Defaults to zero (don’t check the filesystem) if not present.",
|
||||
),
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"1",
|
||||
"The root filesystem should be specified with a fs_passno of 1.",
|
||||
),
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"2",
|
||||
"Other filesystems [than the root filesystem] should have a fs_passno of 2.",
|
||||
),
|
||||
},
|
||||
}
|
||||
|
||||
var FsckFieldWhenDisabledFilesystems = docvalues.EnumValue{
|
||||
EnforceValues: false,
|
||||
Values: []docvalues.EnumString{
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"0",
|
||||
"Defaults to zero (don’t check the filesystem) if not present.",
|
||||
),
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"2",
|
||||
"Other filesystems [than the root filesystem] should have a fs_passno of 2.",
|
||||
),
|
||||
},
|
||||
}
|
||||
|
||||
var FsckOneDisabledFilesystems = map[string]struct{}{
|
||||
"btrfs": {},
|
||||
"xfs": {},
|
||||
}
|
@ -6,6 +6,31 @@ import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
func createMountOptionField(
|
||||
options []docvalues.EnumString,
|
||||
assignOption map[docvalues.EnumString]docvalues.DeprecatedValue,
|
||||
) docvalues.DeprecatedValue {
|
||||
// dynamicOptions := docvalues.MergeKeyEnumAssignmentMaps(defaultAssignOptions, assignOption)
|
||||
|
||||
return docvalues.ArrayValue{
|
||||
Separator: ",",
|
||||
DuplicatesExtractor: &MountOptionsExtractor,
|
||||
SubValue: docvalues.OrValue{
|
||||
Values: []docvalues.DeprecatedValue{
|
||||
docvalues.KeyEnumAssignmentValue{
|
||||
Values: assignOption,
|
||||
ValueIsOptional: false,
|
||||
Separator: "=",
|
||||
},
|
||||
docvalues.EnumValue{
|
||||
EnforceValues: true,
|
||||
Values: options,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
var MountOptionsExtractor = func(value string) string {
|
||||
separatorIndex := strings.Index(value, "=")
|
||||
|
||||
@ -339,31 +364,6 @@ Added in version 233.`,
|
||||
): docvalues.StringValue{},
|
||||
}
|
||||
|
||||
func createMountOptionField(
|
||||
options []docvalues.EnumString,
|
||||
assignOption map[docvalues.EnumString]docvalues.DeprecatedValue,
|
||||
) docvalues.DeprecatedValue {
|
||||
// dynamicOptions := docvalues.MergeKeyEnumAssignmentMaps(defaultAssignOptions, assignOption)
|
||||
|
||||
return docvalues.ArrayValue{
|
||||
Separator: ",",
|
||||
DuplicatesExtractor: &MountOptionsExtractor,
|
||||
SubValue: docvalues.OrValue{
|
||||
Values: []docvalues.DeprecatedValue{
|
||||
docvalues.KeyEnumAssignmentValue{
|
||||
Values: assignOption,
|
||||
ValueIsOptional: false,
|
||||
Separator: "=",
|
||||
},
|
||||
docvalues.EnumValue{
|
||||
EnforceValues: true,
|
||||
Values: options,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
type optionField struct {
|
||||
Assignable map[docvalues.EnumString]docvalues.DeprecatedValue
|
||||
Enums []docvalues.EnumString
|
||||
@ -376,6 +376,10 @@ var MountOptionsMapField = map[string]optionField{
|
||||
Enums: commondocumentation.AdfsDocumentationEnums,
|
||||
Assignable: commondocumentation.AdfsDocumentationAssignable,
|
||||
},
|
||||
"apfs": {
|
||||
Enums: commondocumentation.APFSDocumentationEnums,
|
||||
Assignable: commondocumentation.APFSDocumentationAssignable,
|
||||
},
|
||||
"affs": {
|
||||
Enums: commondocumentation.AffsDocumentationEnums,
|
||||
Assignable: commondocumentation.AffsDocumentationAssignable,
|
||||
@ -470,8 +474,16 @@ var MountOptionsMapField = map[string]optionField{
|
||||
Enums: commondocumentation.UmsdosDocumentationEnums,
|
||||
Assignable: commondocumentation.UmsdosDocumentationAssignable,
|
||||
},
|
||||
"vboxsf": {
|
||||
Enums: commondocumentation.VboxsfDocumentationEnums,
|
||||
Assignable: commondocumentation.VboxsfDocumentationAssignable,
|
||||
},
|
||||
"vfat": {
|
||||
Enums: commondocumentation.VfatDocumentationEnums,
|
||||
Assignable: commondocumentation.VfatDocumentationAssignable,
|
||||
},
|
||||
"bcachefs": {
|
||||
Enums: commondocumentation.BcacheFSDocumentationEnums,
|
||||
Assignable: commondocumentation.BcacheFSDocumentationAssignable,
|
||||
},
|
||||
}
|
||||
|
@ -1,26 +0,0 @@
|
||||
package fields
|
||||
|
||||
import docvalues "config-lsp/doc-values"
|
||||
|
||||
var PassField = docvalues.OrValue{
|
||||
Values: []docvalues.DeprecatedValue{
|
||||
docvalues.EnumValue{
|
||||
EnforceValues: false,
|
||||
Values: []docvalues.EnumString{
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"0",
|
||||
"Defaults to zero (don’t check the filesystem) if not present.",
|
||||
),
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"1",
|
||||
"The root filesystem should be specified with a fs_passno of 1.",
|
||||
),
|
||||
docvalues.CreateEnumStringWithDoc(
|
||||
"2",
|
||||
"Other filesystems [than the root filesystem] should have a fs_passno of 2.",
|
||||
),
|
||||
},
|
||||
},
|
||||
docvalues.NumberValue{},
|
||||
},
|
||||
}
|
@ -6,7 +6,8 @@ import (
|
||||
)
|
||||
|
||||
var UuidField = docvalues.RegexValue{
|
||||
Regex: *regexp.MustCompile(`[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}`),
|
||||
// Can either be a UUID or UID
|
||||
Regex: *regexp.MustCompile(`(?i)([a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}|[a-f0-9]{4}-[a-f0-9]{4})`),
|
||||
}
|
||||
var LabelField = docvalues.RegexValue{
|
||||
Regex: *regexp.MustCompile(`\S+`),
|
||||
@ -15,7 +16,8 @@ var LabelField = docvalues.RegexValue{
|
||||
var SpecField = docvalues.OrValue{
|
||||
Values: []docvalues.DeprecatedValue{
|
||||
docvalues.PathValue{
|
||||
RequiredType: docvalues.PathTypeExistenceOptional,
|
||||
IsOptional: false,
|
||||
RequiredType: docvalues.PathTypeFile,
|
||||
},
|
||||
docvalues.KeyEnumAssignmentValue{
|
||||
Separator: "=",
|
||||
|
@ -4,7 +4,9 @@ import (
|
||||
"config-lsp/common"
|
||||
"config-lsp/handlers/fstab/ast"
|
||||
"config-lsp/handlers/fstab/fields"
|
||||
"config-lsp/utils"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/tliron/glsp/protocol_3_16"
|
||||
)
|
||||
@ -42,39 +44,29 @@ func GetCompletion(
|
||||
fileSystemType := entry.Fields.FilesystemType.Value.Value
|
||||
completions := make([]protocol.CompletionItem, 0, 50)
|
||||
|
||||
for _, completion := range fields.DefaultMountOptionsField.DeprecatedFetchCompletions(line, cursor) {
|
||||
var documentation string
|
||||
optionsValue := entry.FetchMountOptionsField(false)
|
||||
|
||||
switch completion.Documentation.(type) {
|
||||
case string:
|
||||
documentation = completion.Documentation.(string)
|
||||
case *string:
|
||||
documentation = *completion.Documentation.(*string)
|
||||
}
|
||||
if optionsValue != nil {
|
||||
for _, completion := range optionsValue.DeprecatedFetchCompletions(line, cursor) {
|
||||
var documentation string
|
||||
|
||||
completion.Documentation = protocol.MarkupContent{
|
||||
Kind: protocol.MarkupKindMarkdown,
|
||||
Value: documentation + "\n\n" + "From: _Default Mount Options_",
|
||||
switch completion.Documentation.(type) {
|
||||
case string:
|
||||
documentation = completion.Documentation.(string)
|
||||
case *string:
|
||||
documentation = *completion.Documentation.(*string)
|
||||
}
|
||||
|
||||
completion.Documentation = protocol.MarkupContent{
|
||||
Kind: protocol.MarkupKindMarkdown,
|
||||
Value: documentation + "\n\n" + fmt.Sprintf("From: _%s_", fileSystemType),
|
||||
}
|
||||
completions = append(completions, completion)
|
||||
}
|
||||
completions = append(completions, completion)
|
||||
}
|
||||
|
||||
for _, completion := range entry.FetchMountOptionsField(false).DeprecatedFetchCompletions(line, cursor) {
|
||||
var documentation string
|
||||
|
||||
switch completion.Documentation.(type) {
|
||||
case string:
|
||||
documentation = completion.Documentation.(string)
|
||||
case *string:
|
||||
documentation = *completion.Documentation.(*string)
|
||||
}
|
||||
|
||||
completion.Documentation = protocol.MarkupContent{
|
||||
Kind: protocol.MarkupKindMarkdown,
|
||||
Value: documentation + "\n\n" + fmt.Sprintf("From: _%s_", fileSystemType),
|
||||
}
|
||||
completions = append(completions, completion)
|
||||
}
|
||||
// Add defaults
|
||||
completions = append(completions, fields.DefaultMountOptionsField.DeprecatedFetchCompletions(line, cursor)...)
|
||||
|
||||
return completions, nil
|
||||
case ast.FstabFieldFreq:
|
||||
@ -84,13 +76,21 @@ func GetCompletion(
|
||||
value,
|
||||
cursor,
|
||||
), nil
|
||||
case ast.FstabFieldPass:
|
||||
value, cursor := getFieldSafely(entry.Fields.Pass, cursor)
|
||||
case ast.FstabFieldFsck:
|
||||
value, cursor := getFieldSafely(entry.Fields.Fsck, cursor)
|
||||
|
||||
return fields.PassField.DeprecatedFetchCompletions(
|
||||
value,
|
||||
cursor,
|
||||
), nil
|
||||
if entry.Fields.FilesystemType != nil &&
|
||||
utils.KeyExists(fields.FsckOneDisabledFilesystems, strings.ToLower(entry.Fields.FilesystemType.Value.Value)) {
|
||||
return fields.FsckFieldWhenDisabledFilesystems.DeprecatedFetchCompletions(
|
||||
value,
|
||||
cursor,
|
||||
), nil
|
||||
} else {
|
||||
return fields.FsckField.DeprecatedFetchCompletions(
|
||||
value,
|
||||
cursor,
|
||||
), nil
|
||||
}
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
|
@ -42,7 +42,7 @@ func GetHoverInfo(
|
||||
return &hover, nil
|
||||
case ast.FstabFieldFreq:
|
||||
return &FreqHoverField, nil
|
||||
case ast.FstabFieldPass:
|
||||
case ast.FstabFieldFsck:
|
||||
return &PassHoverField, nil
|
||||
}
|
||||
|
||||
|
85
server/handlers/fstab/handlers/signature_help.go
Normal file
85
server/handlers/fstab/handlers/signature_help.go
Normal file
@ -0,0 +1,85 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"config-lsp/common"
|
||||
"config-lsp/handlers/fstab/ast"
|
||||
|
||||
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||
)
|
||||
|
||||
func GetEntrySignatureHelp(
|
||||
entry *ast.FstabEntry,
|
||||
cursor common.CursorPosition,
|
||||
) *protocol.SignatureHelp {
|
||||
var index uint32
|
||||
|
||||
if entry == nil || entry.Fields.Spec == nil || entry.Fields.Spec.ContainsPosition(cursor) {
|
||||
index = 0
|
||||
} else if entry.Fields.MountPoint == nil && entry.Fields.MountPoint.ContainsPosition(cursor) {
|
||||
index = 1
|
||||
} else if entry.Fields.FilesystemType == nil && entry.Fields.FilesystemType.ContainsPosition(cursor) {
|
||||
index = 2
|
||||
} else if entry.Fields.Options == nil || entry.Fields.Options.ContainsPosition(cursor) {
|
||||
index = 3
|
||||
} else if entry.Fields.Freq == nil || entry.Fields.Freq.ContainsPosition(cursor) {
|
||||
index = 4
|
||||
} else {
|
||||
index = 5
|
||||
}
|
||||
|
||||
signature := uint32(0)
|
||||
|
||||
return &protocol.SignatureHelp{
|
||||
ActiveSignature: &signature,
|
||||
Signatures: []protocol.SignatureInformation{
|
||||
{
|
||||
Label: "<spec> <mount point> <file system type> <options> <freq> <pass>",
|
||||
ActiveParameter: &index,
|
||||
Parameters: []protocol.ParameterInformation{
|
||||
{
|
||||
Label: []uint32{
|
||||
0,
|
||||
uint32(len("<spec>")),
|
||||
},
|
||||
Documentation: "The device or remote filesystem to mount",
|
||||
},
|
||||
{
|
||||
Label: []uint32{
|
||||
uint32(len("<spec>")),
|
||||
uint32(len("<spec> ") + len("<mount point>")),
|
||||
},
|
||||
Documentation: "The directory to mount the device or remote filesystem",
|
||||
},
|
||||
{
|
||||
Label: []uint32{
|
||||
uint32(len("<spec> <mount point>")),
|
||||
uint32(len("<spec> <mount point> ") + len("<file system type>")),
|
||||
},
|
||||
Documentation: "The type of filesystem",
|
||||
},
|
||||
{
|
||||
Label: []uint32{
|
||||
uint32(len("<spec> <mount point> <file system type>")),
|
||||
uint32(len("<spec> <mount point> <file system type> ") + len("<options>")),
|
||||
},
|
||||
Documentation: "Mount options",
|
||||
},
|
||||
{
|
||||
Label: []uint32{
|
||||
uint32(len("<spec> <mount point> <file system type> <options>")),
|
||||
uint32(len("<spec> <mount point> <file system type> <options> ") + len("<freq>")),
|
||||
},
|
||||
Documentation: "Used by dump(8) to determine which filesystems need to be dumped",
|
||||
},
|
||||
{
|
||||
Label: []uint32{
|
||||
uint32(len("<spec> <mount point> <file system type> <options> <freq>")),
|
||||
uint32(len("<spec> <mount point> <file system type> <options> <freq> ") + len("<pass>")),
|
||||
},
|
||||
Documentation: "Used by fsck(8) to determine the order in which filesystem checks are done at boot time",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
31
server/handlers/fstab/lsp/text-document-signature-help.go
Normal file
31
server/handlers/fstab/lsp/text-document-signature-help.go
Normal file
@ -0,0 +1,31 @@
|
||||
package lsp
|
||||
|
||||
import (
|
||||
"config-lsp/common"
|
||||
"config-lsp/handlers/fstab/ast"
|
||||
"config-lsp/handlers/fstab/handlers"
|
||||
fstab "config-lsp/handlers/fstab/shared"
|
||||
|
||||
"github.com/tliron/glsp"
|
||||
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||
)
|
||||
|
||||
func TextDocumentSignatureHelp(context *glsp.Context, params *protocol.SignatureHelpParams) (*protocol.SignatureHelp, error) {
|
||||
document := fstab.DocumentParserMap[params.TextDocument.URI]
|
||||
|
||||
line := uint32(params.Position.Line)
|
||||
cursor := common.LSPCharacterAsCursorPosition(params.Position.Character)
|
||||
|
||||
if _, found := document.Config.CommentLines[line]; found {
|
||||
// Comment
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
entry, found := document.Config.Entries.Get(line)
|
||||
|
||||
if !found {
|
||||
return handlers.GetEntrySignatureHelp(nil, cursor), nil
|
||||
} else {
|
||||
return handlers.GetEntrySignatureHelp(entry.(*ast.FstabEntry), cursor), nil
|
||||
}
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
// Code generated from Hosts.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
||||
// Code generated from Hosts.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser // Hosts
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Code generated from Hosts.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
||||
// Code generated from Hosts.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Code generated from Hosts.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
||||
// Code generated from Hosts.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser // Hosts
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Code generated from Hosts.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
||||
// Code generated from Hosts.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser // Hosts
|
||||
|
||||
|
58
server/handlers/hosts/handlers/signature_help.go
Normal file
58
server/handlers/hosts/handlers/signature_help.go
Normal file
@ -0,0 +1,58 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"config-lsp/common"
|
||||
"config-lsp/handlers/hosts/ast"
|
||||
|
||||
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||
)
|
||||
|
||||
func GetEntrySignatureHelp(
|
||||
entry *ast.HostsEntry,
|
||||
cursor common.CursorPosition,
|
||||
) *protocol.SignatureHelp {
|
||||
var index uint32
|
||||
|
||||
if entry == nil || entry.IPAddress == nil || entry.IPAddress.Location.ContainsPosition(cursor) {
|
||||
index = 0
|
||||
} else if entry.Hostname == nil || entry.Hostname.Location.ContainsPosition(cursor) {
|
||||
index = 1
|
||||
} else {
|
||||
index = 2
|
||||
}
|
||||
|
||||
signature := uint32(0)
|
||||
|
||||
return &protocol.SignatureHelp{
|
||||
ActiveSignature: &signature,
|
||||
Signatures: []protocol.SignatureInformation{
|
||||
{
|
||||
Label: "<ip address> <hostname> [<alias>...]",
|
||||
ActiveParameter: &index,
|
||||
Parameters: []protocol.ParameterInformation{
|
||||
{
|
||||
Label: []uint32{
|
||||
0,
|
||||
uint32(len("<ip address>")),
|
||||
},
|
||||
Documentation: "The ip address to forward to",
|
||||
},
|
||||
{
|
||||
Label: []uint32{
|
||||
uint32(len("<ip address>")),
|
||||
uint32(len("<ip address> ") + len("<hostname>")),
|
||||
},
|
||||
Documentation: "The hostname to forward to",
|
||||
},
|
||||
{
|
||||
Label: []uint32{
|
||||
uint32(len("<ip address> ") + len("<hostname>")),
|
||||
uint32(len("<ip address> ") + len("<hostname> ") + len("[<alias>...]")),
|
||||
},
|
||||
Documentation: "An optional list of aliases that can also forward",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
31
server/handlers/hosts/lsp/text-document-signature-help.go
Normal file
31
server/handlers/hosts/lsp/text-document-signature-help.go
Normal file
@ -0,0 +1,31 @@
|
||||
package lsp
|
||||
|
||||
import (
|
||||
"config-lsp/common"
|
||||
"config-lsp/handlers/hosts"
|
||||
"config-lsp/handlers/hosts/ast"
|
||||
"config-lsp/handlers/hosts/handlers"
|
||||
|
||||
"github.com/tliron/glsp"
|
||||
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||
)
|
||||
|
||||
func TextDocumentSignatureHelp(context *glsp.Context, params *protocol.SignatureHelpParams) (*protocol.SignatureHelp, error) {
|
||||
document := hosts.DocumentParserMap[params.TextDocument.URI]
|
||||
|
||||
line := uint32(params.Position.Line)
|
||||
cursor := common.LSPCharacterAsCursorPosition(params.Position.Character)
|
||||
|
||||
if _, found := document.Parser.CommentLines[line]; found {
|
||||
// Comment
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
entry, found := document.Parser.Tree.Entries.Get(line)
|
||||
|
||||
if !found {
|
||||
return handlers.GetEntrySignatureHelp(nil, cursor), nil
|
||||
} else {
|
||||
return handlers.GetEntrySignatureHelp(entry.(*ast.HostsEntry), cursor), nil
|
||||
}
|
||||
}
|
@ -38,27 +38,43 @@ func checkOption(
|
||||
option *ast.SSHOption,
|
||||
block ast.SSHBlock,
|
||||
) {
|
||||
if option.Key == nil {
|
||||
return
|
||||
}
|
||||
|
||||
///// General checks
|
||||
checkIsUsingDoubleQuotes(ctx, option.Key.Value, option.Key.LocationRange)
|
||||
checkQuotesAreClosed(ctx, option.Key.Value, option.Key.LocationRange)
|
||||
|
||||
docOption, found := fields.Options[option.Key.Key]
|
||||
if option.Separator == nil || option.Separator.Value.Value == "" {
|
||||
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
||||
Range: option.Key.LocationRange.ToLSPRange(),
|
||||
Message: "There should be a separator between an option and its value",
|
||||
Severity: &common.SeverityError,
|
||||
})
|
||||
} else {
|
||||
checkIsUsingDoubleQuotes(ctx, option.Separator.Value, option.Separator.LocationRange)
|
||||
checkQuotesAreClosed(ctx, option.Separator.Value, option.Separator.LocationRange)
|
||||
}
|
||||
|
||||
if !found {
|
||||
///// Check if the key is valid
|
||||
docOption, optionFound := fields.Options[option.Key.Key]
|
||||
|
||||
if !optionFound {
|
||||
// Diagnostics will be handled by `values.go`
|
||||
return
|
||||
}
|
||||
|
||||
// Check for values that are not allowed in Host blocks
|
||||
if block != nil && block.GetBlockType() == ast.SSHBlockTypeHost {
|
||||
if utils.KeyExists(fields.HostDisallowedOptions, option.Key.Key) {
|
||||
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
||||
Range: option.Key.LocationRange.ToLSPRange(),
|
||||
Message: fmt.Sprintf("Option '%s' is not allowed in Host blocks", option.Key.Key),
|
||||
Severity: &common.SeverityError,
|
||||
})
|
||||
}
|
||||
if block != nil && block.GetBlockType() == ast.SSHBlockTypeHost && utils.KeyExists(fields.HostDisallowedOptions, option.Key.Key) {
|
||||
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
||||
Range: option.Key.LocationRange.ToLSPRange(),
|
||||
Message: fmt.Sprintf("Option '%s' is not allowed in Host blocks", option.Key.Key),
|
||||
Severity: &common.SeverityError,
|
||||
})
|
||||
}
|
||||
|
||||
///// Check if the value is valid
|
||||
if option.OptionValue != nil {
|
||||
checkIsUsingDoubleQuotes(ctx, option.OptionValue.Value, option.OptionValue.LocationRange)
|
||||
checkQuotesAreClosed(ctx, option.OptionValue.Value, option.OptionValue.LocationRange)
|
||||
@ -66,8 +82,7 @@ func checkOption(
|
||||
invalidValues := docOption.DeprecatedCheckIsValid(option.OptionValue.Value.Value)
|
||||
|
||||
for _, invalidValue := range invalidValues {
|
||||
err := docvalues.LSPErrorFromInvalidValue(option.Start.Line, *invalidValue)
|
||||
err.ShiftCharacter(option.OptionValue.Start.Character)
|
||||
err := docvalues.LSPErrorFromInvalidValue(option.Start.Line, *invalidValue).ShiftCharacter(option.OptionValue.Start.Character)
|
||||
|
||||
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
||||
Range: err.Range.ToLSPRange(),
|
||||
@ -76,17 +91,6 @@ func checkOption(
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if option.Separator == nil || option.Separator.Value.Value == "" {
|
||||
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
||||
Range: option.Key.LocationRange.ToLSPRange(),
|
||||
Message: fmt.Sprintf("There should be a separator between an option and its value"),
|
||||
Severity: &common.SeverityError,
|
||||
})
|
||||
} else {
|
||||
checkIsUsingDoubleQuotes(ctx, option.Separator.Value, option.Separator.LocationRange)
|
||||
checkQuotesAreClosed(ctx, option.Separator.Value, option.Separator.LocationRange)
|
||||
}
|
||||
}
|
||||
|
||||
func checkBlock(
|
||||
|
@ -9,33 +9,24 @@ import (
|
||||
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||
)
|
||||
|
||||
func analyzeQuotesAreValid(
|
||||
ctx *analyzerContext,
|
||||
) {
|
||||
for _, info := range ctx.document.Config.GetAllOptions() {
|
||||
checkIsUsingDoubleQuotes(ctx, info.Option.Key.Value, info.Option.Key.LocationRange)
|
||||
checkIsUsingDoubleQuotes(ctx, info.Option.OptionValue.Value, info.Option.OptionValue.LocationRange)
|
||||
|
||||
checkQuotesAreClosed(ctx, info.Option.Key.Value, info.Option.Key.LocationRange)
|
||||
checkQuotesAreClosed(ctx, info.Option.OptionValue.Value, info.Option.OptionValue.LocationRange)
|
||||
}
|
||||
}
|
||||
|
||||
func checkIsUsingDoubleQuotes(
|
||||
ctx *analyzerContext,
|
||||
value commonparser.ParsedString,
|
||||
valueRange common.LocationRange,
|
||||
) {
|
||||
quoteRanges := utils.GetQuoteRanges(value.Raw)
|
||||
singleQuotePosition := strings.Index(value.Raw, "'")
|
||||
invertedRanges := quoteRanges.GetInvertedRanges(len(value.Raw))
|
||||
|
||||
// Single quote
|
||||
if singleQuotePosition != -1 && !quoteRanges.IsIndexInsideQuotes(singleQuotePosition) {
|
||||
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
||||
Range: valueRange.ToLSPRange(),
|
||||
Message: "ssh_config does not support single quotes. Use double quotes (\") instead.",
|
||||
Severity: &common.SeverityError,
|
||||
})
|
||||
for _, rang := range invertedRanges {
|
||||
text := value.Raw[rang[0]:rang[1]]
|
||||
|
||||
if strings.Contains(text, "'") {
|
||||
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
||||
Range: valueRange.ToLSPRange(),
|
||||
Message: "ssh_config does not support single quotes. Use double quotes (\") instead.",
|
||||
Severity: &common.SeverityError,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -7,6 +7,18 @@ import (
|
||||
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||
)
|
||||
|
||||
func testQuotes(
|
||||
ctx *analyzerContext,
|
||||
) {
|
||||
for _, info := range ctx.document.Config.GetAllOptions() {
|
||||
checkIsUsingDoubleQuotes(ctx, info.Option.Key.Value, info.Option.Key.LocationRange)
|
||||
checkIsUsingDoubleQuotes(ctx, info.Option.OptionValue.Value, info.Option.OptionValue.LocationRange)
|
||||
|
||||
checkQuotesAreClosed(ctx, info.Option.Key.Value, info.Option.Key.LocationRange)
|
||||
checkQuotesAreClosed(ctx, info.Option.OptionValue.Value, info.Option.OptionValue.LocationRange)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSimpleInvalidQuotesExample(
|
||||
t *testing.T,
|
||||
) {
|
||||
@ -17,7 +29,7 @@ PermitRootLogin 'yes'
|
||||
document: d,
|
||||
diagnostics: make([]protocol.Diagnostic, 0),
|
||||
}
|
||||
analyzeQuotesAreValid(ctx)
|
||||
testQuotes(ctx)
|
||||
|
||||
if !(len(ctx.diagnostics) == 1) {
|
||||
t.Errorf("Expected 1 error, got %v", len(ctx.diagnostics))
|
||||
@ -34,7 +46,7 @@ func TestSingleQuotesKeyAndOptionExample(
|
||||
document: d,
|
||||
diagnostics: make([]protocol.Diagnostic, 0),
|
||||
}
|
||||
analyzeQuotesAreValid(ctx)
|
||||
testQuotes(ctx)
|
||||
|
||||
if !(len(ctx.diagnostics) == 2) {
|
||||
t.Errorf("Expected 2 ctx.diagnostics, got %v", len(ctx.diagnostics))
|
||||
@ -51,7 +63,7 @@ PermitRootLogin "yes
|
||||
document: d,
|
||||
diagnostics: make([]protocol.Diagnostic, 0),
|
||||
}
|
||||
analyzeQuotesAreValid(ctx)
|
||||
testQuotes(ctx)
|
||||
|
||||
if !(len(ctx.diagnostics) == 1) {
|
||||
t.Errorf("Expected 1 error, got %v", len(ctx.diagnostics))
|
||||
@ -68,7 +80,7 @@ func TestIncompleteQuotesExample(
|
||||
document: d,
|
||||
diagnostics: make([]protocol.Diagnostic, 0),
|
||||
}
|
||||
analyzeQuotesAreValid(ctx)
|
||||
testQuotes(ctx)
|
||||
|
||||
if !(len(ctx.diagnostics) == 1) {
|
||||
t.Errorf("Expected 1 error, got %v", len(ctx.diagnostics))
|
||||
@ -101,7 +113,8 @@ func TestValidDependentOptionsExample(
|
||||
d := testutils_test.DocumentFromInput(t, `
|
||||
Port 1234
|
||||
CanonicalizeHostname yes
|
||||
CanonicalDomains example.com
|
||||
CanonicalDomains "example.com"
|
||||
Test "hello world 'test' "
|
||||
`)
|
||||
ctx := &analyzerContext{
|
||||
document: d,
|
||||
|
@ -8,6 +8,7 @@ import (
|
||||
func analyzeValuesAreValid(
|
||||
ctx *analyzerContext,
|
||||
) {
|
||||
// Check if there are unknown options
|
||||
for _, info := range ctx.document.Config.GetAllOptions() {
|
||||
option := info.Option
|
||||
block := info.Block
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Code generated from Config.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
||||
// Code generated from Config.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser // Config
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Code generated from Config.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
||||
// Code generated from Config.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Code generated from Config.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
||||
// Code generated from Config.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser // Config
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Code generated from Config.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
||||
// Code generated from Config.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser // Config
|
||||
|
||||
|
@ -139,6 +139,7 @@ rsa-sha2-512,rsa-sha2-256
|
||||
Arguments to CertificateFile may use the tilde syntax to refer to a user's home directory, the tokens described in the TOKENS section and environment variables as described in the ENVIRONMENT VARIABLES section.
|
||||
It is possible to have multiple certificate files specified in configuration files; these certificates will be tried in sequence. Multiple CertificateFile directives will add to the list of certificates used for authentication.`,
|
||||
Value: docvalues.PathValue{
|
||||
IsOptional: true,
|
||||
RequiredType: docvalues.PathTypeFile,
|
||||
},
|
||||
},
|
||||
@ -366,6 +367,7 @@ aes128-gcm@openssh.com,aes256-gcm@openssh.com
|
||||
DuplicatesExtractor: &docvalues.SimpleDuplicatesExtractor,
|
||||
RespectQuotes: true,
|
||||
SubValue: docvalues.PathValue{
|
||||
IsOptional: true,
|
||||
RequiredType: docvalues.PathTypeFile,
|
||||
},
|
||||
},
|
||||
@ -834,6 +836,7 @@ rsa-sha2-512,rsa-sha2-256
|
||||
Documentation: `Specifies a path to a library that will be used when loading any FIDO authenticator-hosted keys, overriding the default of using the built-in USB HID support.
|
||||
If the specified value begins with a ‘$’ character, then it will be treated as an environment variable containing the path to the library.`,
|
||||
Value: docvalues.PathValue{
|
||||
IsOptional: false,
|
||||
RequiredType: docvalues.PathTypeFile,
|
||||
},
|
||||
},
|
||||
@ -963,6 +966,7 @@ rsa-sha2-512,rsa-sha2-256
|
||||
DuplicatesExtractor: &docvalues.SimpleDuplicatesExtractor,
|
||||
RespectQuotes: true,
|
||||
SubValue: docvalues.PathValue{
|
||||
IsOptional: true,
|
||||
RequiredType: docvalues.PathTypeFile,
|
||||
},
|
||||
},
|
||||
@ -986,6 +990,7 @@ rsa-sha2-512,rsa-sha2-256
|
||||
"xauthlocation": {
|
||||
Documentation: `Specifies the full pathname of the xauth(1) program. The default is /usr/X11R6/bin/xauth.`,
|
||||
Value: docvalues.PathValue{
|
||||
IsOptional: false,
|
||||
RequiredType: docvalues.PathTypeFile,
|
||||
},
|
||||
},
|
||||
|
@ -2,8 +2,6 @@ package handlers
|
||||
|
||||
import (
|
||||
sshconfig "config-lsp/handlers/ssh_config"
|
||||
"config-lsp/handlers/ssh_config/diagnostics"
|
||||
"fmt"
|
||||
|
||||
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||
)
|
||||
@ -12,49 +10,12 @@ func FetchCodeActions(
|
||||
d *sshconfig.SSHDocument,
|
||||
params *protocol.CodeActionParams,
|
||||
) []protocol.CodeAction {
|
||||
line := params.Range.Start.Line
|
||||
|
||||
if d.Indexes == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if unknownOption, found := d.Indexes.UnknownOptions[line]; found {
|
||||
var blockLine *uint32
|
||||
actions := getAddToUnknownCodeAction(d, params)
|
||||
actions = append(actions, getKeywordTypoFixes(d, params)...)
|
||||
|
||||
if unknownOption.Block != nil {
|
||||
blockLineValue := uint32(unknownOption.Block.GetLocation().Start.Line)
|
||||
blockLine = &blockLineValue
|
||||
}
|
||||
|
||||
commandID := "sshconfig." + CodeActionAddToUnknown
|
||||
command := protocol.Command{
|
||||
Title: fmt.Sprintf("Add %s to unknown options", unknownOption.Option.Key.Key),
|
||||
Command: string(commandID),
|
||||
Arguments: []any{
|
||||
codeActionAddToUnknownArgs{
|
||||
URI: params.TextDocument.URI,
|
||||
OptionLine: unknownOption.Option.Start.Line,
|
||||
BlockLine: blockLine,
|
||||
},
|
||||
},
|
||||
}
|
||||
kind := protocol.CodeActionKindQuickFix
|
||||
codeAction := &protocol.CodeAction{
|
||||
Title: fmt.Sprintf("Add %s to unknown options", unknownOption.Option.Key.Key),
|
||||
Command: &command,
|
||||
Kind: &kind,
|
||||
Diagnostics: []protocol.Diagnostic{
|
||||
diagnostics.GenerateUnknownOption(
|
||||
unknownOption.Option.Key.ToLSPRange(),
|
||||
unknownOption.Option.Key.Value.Value,
|
||||
),
|
||||
},
|
||||
}
|
||||
|
||||
return []protocol.CodeAction{
|
||||
*codeAction,
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
return actions
|
||||
}
|
||||
|
@ -0,0 +1,56 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
sshconfig "config-lsp/handlers/ssh_config"
|
||||
"config-lsp/handlers/ssh_config/diagnostics"
|
||||
"fmt"
|
||||
|
||||
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||
)
|
||||
|
||||
func getAddToUnknownCodeAction(
|
||||
d *sshconfig.SSHDocument,
|
||||
params *protocol.CodeActionParams,
|
||||
) []protocol.CodeAction {
|
||||
line := params.Range.Start.Line
|
||||
|
||||
if unknownOption, found := d.Indexes.UnknownOptions[line]; found {
|
||||
var blockLine *uint32
|
||||
|
||||
if unknownOption.Block != nil {
|
||||
blockLineValue := uint32(unknownOption.Block.GetLocation().Start.Line)
|
||||
blockLine = &blockLineValue
|
||||
}
|
||||
|
||||
commandID := "sshconfig." + CodeActionAddToUnknown
|
||||
command := protocol.Command{
|
||||
Title: fmt.Sprintf("Add %s to unknown options", unknownOption.Option.Key.Key),
|
||||
Command: string(commandID),
|
||||
Arguments: []any{
|
||||
codeActionAddToUnknownArgs{
|
||||
URI: params.TextDocument.URI,
|
||||
OptionLine: unknownOption.Option.Start.Line,
|
||||
BlockLine: blockLine,
|
||||
},
|
||||
},
|
||||
}
|
||||
kind := protocol.CodeActionKindQuickFix
|
||||
codeAction := protocol.CodeAction{
|
||||
Title: fmt.Sprintf("Add %s to unknown options", unknownOption.Option.Key.Key),
|
||||
Command: &command,
|
||||
Kind: &kind,
|
||||
Diagnostics: []protocol.Diagnostic{
|
||||
diagnostics.GenerateUnknownOption(
|
||||
unknownOption.Option.Key.ToLSPRange(),
|
||||
unknownOption.Option.Key.Value.Value,
|
||||
),
|
||||
},
|
||||
}
|
||||
|
||||
return []protocol.CodeAction{
|
||||
codeAction,
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
@ -0,0 +1,64 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"config-lsp/common"
|
||||
sshconfig "config-lsp/handlers/ssh_config"
|
||||
"config-lsp/handlers/ssh_config/diagnostics"
|
||||
"config-lsp/handlers/ssh_config/fields"
|
||||
"config-lsp/utils"
|
||||
"fmt"
|
||||
|
||||
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||
)
|
||||
|
||||
func getKeywordTypoFixes(
|
||||
d *sshconfig.SSHDocument,
|
||||
params *protocol.CodeActionParams,
|
||||
) []protocol.CodeAction {
|
||||
if common.ServerOptions.NoTypoSuggestions {
|
||||
return nil
|
||||
}
|
||||
|
||||
line := params.Range.Start.Line
|
||||
|
||||
if typoOption, found := d.Indexes.UnknownOptions[line]; found {
|
||||
name := typoOption.Option.Key.Value.Value
|
||||
|
||||
opts := utils.KeysOfMap(fields.Options)
|
||||
suggestedOptions := common.FindSimilarItems(fields.CreateNormalizedName(name), opts)
|
||||
|
||||
actions := make([]protocol.CodeAction, 0, len(suggestedOptions))
|
||||
|
||||
kind := protocol.CodeActionKindQuickFix
|
||||
for index, normalizedOptionName := range suggestedOptions {
|
||||
isPreferred := index == 0
|
||||
optionName := fields.FieldsNameFormattedMap[normalizedOptionName]
|
||||
|
||||
actions = append(actions, protocol.CodeAction{
|
||||
Title: fmt.Sprintf("Typo Fix: %s", optionName),
|
||||
IsPreferred: &isPreferred,
|
||||
Kind: &kind,
|
||||
Diagnostics: []protocol.Diagnostic{
|
||||
diagnostics.GenerateUnknownOption(
|
||||
typoOption.Option.Key.ToLSPRange(),
|
||||
typoOption.Option.Key.Value.Value,
|
||||
),
|
||||
},
|
||||
Edit: &protocol.WorkspaceEdit{
|
||||
Changes: map[protocol.DocumentUri][]protocol.TextEdit{
|
||||
params.TextDocument.URI: {
|
||||
{
|
||||
Range: typoOption.Option.Key.ToLSPRange(),
|
||||
NewText: optionName,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
return actions
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
@ -33,7 +33,7 @@ func FormatDocument(
|
||||
// it := d.Config.Options.Iterator()
|
||||
// for it.Next() {
|
||||
// line := it.Key().(uint32)
|
||||
// entry := it.Value().(ast.SSHEntry)
|
||||
// entry := it.Name().(ast.SSHEntry)
|
||||
//
|
||||
// if !(line >= textRange.Start.Line && line <= textRange.End.Line) {
|
||||
// continue
|
||||
|
@ -20,6 +20,7 @@ func GetOptionSignatureHelp(
|
||||
}
|
||||
|
||||
signature := uint32(0)
|
||||
|
||||
return &protocol.SignatureHelp{
|
||||
ActiveSignature: &signature,
|
||||
Signatures: []protocol.SignatureInformation{
|
||||
@ -37,7 +38,7 @@ func GetOptionSignatureHelp(
|
||||
{
|
||||
Label: []uint32{
|
||||
uint32(len("<option>")),
|
||||
uint32(len("<option>") + len("<value>") + 1),
|
||||
uint32(len("<option> ") + len("<value>")),
|
||||
},
|
||||
Documentation: "The value for the option",
|
||||
},
|
||||
@ -90,7 +91,7 @@ func GetMatchSignatureHelp(
|
||||
{
|
||||
Label: []uint32{
|
||||
uint32(len("Host <criteria> ")),
|
||||
uint32(len("Host <criteria> ") + len("<values>") + 1),
|
||||
uint32(len("Host <criteria> ") + len("<values>")),
|
||||
},
|
||||
Documentation: "Values for the criteria",
|
||||
},
|
||||
@ -132,14 +133,14 @@ func GetHostSignatureHelp(
|
||||
{
|
||||
Label: []uint32{
|
||||
uint32(len("Host ")),
|
||||
uint32(len("Host ") + len("<host1>") + 1),
|
||||
uint32(len("Host ") + len("<host1>")),
|
||||
},
|
||||
Documentation: "A host that should match",
|
||||
},
|
||||
{
|
||||
Label: []uint32{
|
||||
uint32(len("Host <host1> ")),
|
||||
uint32(len("Host <host1> ") + len("[<host2> ...]") + 1),
|
||||
uint32(len("Host <host1> ") + len("[<host2> ...]")),
|
||||
},
|
||||
Documentation: "Additional (optional) hosts that should match",
|
||||
},
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Code generated from Match.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
||||
// Code generated from Match.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser // Match
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Code generated from Match.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
||||
// Code generated from Match.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Code generated from Match.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
||||
// Code generated from Match.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser // Match
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Code generated from Match.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
||||
// Code generated from Match.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser // Match
|
||||
|
||||
|
@ -129,8 +129,7 @@ func analyzeMatchValueIsValid(
|
||||
invalidValues := docOption.DeprecatedCheckIsValid(value.Value.Raw)
|
||||
|
||||
for _, invalidValue := range invalidValues {
|
||||
err := docvalues.LSPErrorFromInvalidValue(value.Start.Line, *invalidValue)
|
||||
err.ShiftCharacter(value.Start.Character)
|
||||
err := docvalues.LSPErrorFromInvalidValue(value.Start.Line, *invalidValue).ShiftCharacter(value.Start.Character)
|
||||
|
||||
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
||||
Range: err.Range.ToLSPRange(),
|
||||
|
@ -4,7 +4,9 @@ import (
|
||||
"config-lsp/common"
|
||||
docvalues "config-lsp/doc-values"
|
||||
"config-lsp/handlers/sshd_config/ast"
|
||||
"config-lsp/handlers/sshd_config/diagnostics"
|
||||
"config-lsp/handlers/sshd_config/fields"
|
||||
"config-lsp/utils"
|
||||
"fmt"
|
||||
|
||||
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||
@ -20,7 +22,7 @@ func analyzeStructureIsValid(
|
||||
|
||||
switch entry.(type) {
|
||||
case *ast.SSHDOption:
|
||||
checkOption(ctx, entry.(*ast.SSHDOption), false)
|
||||
checkOption(ctx, entry.(*ast.SSHDOption), nil)
|
||||
case *ast.SSHDMatchBlock:
|
||||
matchBlock := entry.(*ast.SSHDMatchBlock)
|
||||
checkMatchBlock(ctx, matchBlock)
|
||||
@ -31,36 +33,52 @@ func analyzeStructureIsValid(
|
||||
func checkOption(
|
||||
ctx *analyzerContext,
|
||||
option *ast.SSHDOption,
|
||||
isInMatchBlock bool,
|
||||
matchBlock *ast.SSHDMatchBlock,
|
||||
) {
|
||||
if option.Key == nil {
|
||||
return
|
||||
}
|
||||
|
||||
///// General checks
|
||||
checkIsUsingDoubleQuotes(ctx, option.Key.Value, option.Key.LocationRange)
|
||||
checkQuotesAreClosed(ctx, option.Key.Value, option.Key.LocationRange)
|
||||
|
||||
key := option.Key.Key
|
||||
docOption, found := fields.Options[key]
|
||||
|
||||
if !found {
|
||||
if option.Separator == nil || option.Separator.Value.Value == "" {
|
||||
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
||||
Range: option.Key.ToLSPRange(),
|
||||
Message: fmt.Sprintf("Unknown option: %s", option.Key.Key),
|
||||
Range: option.Key.LocationRange.ToLSPRange(),
|
||||
Message: "There should be a separator between an option and its value",
|
||||
Severity: &common.SeverityError,
|
||||
})
|
||||
} else {
|
||||
checkIsUsingDoubleQuotes(ctx, option.Separator.Value, option.Separator.LocationRange)
|
||||
checkQuotesAreClosed(ctx, option.Separator.Value, option.Separator.LocationRange)
|
||||
}
|
||||
|
||||
///// Check if the key is valid
|
||||
docOption, optionFound := fields.Options[option.Key.Key]
|
||||
|
||||
if !optionFound {
|
||||
ctx.diagnostics = append(ctx.diagnostics, diagnostics.GenerateUnknownOption(
|
||||
option.Key.ToLSPRange(),
|
||||
option.Key.Value.Value,
|
||||
))
|
||||
ctx.document.Indexes.UnknownOptions[option.Start.Line] = ast.SSHDOptionInfo{
|
||||
Option: option,
|
||||
MatchBlock: matchBlock,
|
||||
}
|
||||
|
||||
// Since we don't know the option, we can't verify the value
|
||||
return
|
||||
}
|
||||
|
||||
if _, found := fields.MatchAllowedOptions[key]; !found && isInMatchBlock {
|
||||
// Check for values that are not allowed in Match blocks
|
||||
} else if matchBlock != nil && !utils.KeyExists(fields.MatchAllowedOptions, option.Key.Key) {
|
||||
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
||||
Range: option.Key.ToLSPRange(),
|
||||
Message: fmt.Sprintf("Option '%s' is not allowed inside Match blocks", option.Key.Key),
|
||||
Message: fmt.Sprintf("Option '%s' is not allowed in Match blocks", option.Key.Key),
|
||||
Severity: &common.SeverityError,
|
||||
})
|
||||
}
|
||||
|
||||
///// Check if the value is valid
|
||||
if option.OptionValue != nil {
|
||||
checkIsUsingDoubleQuotes(ctx, option.OptionValue.Value, option.OptionValue.LocationRange)
|
||||
checkQuotesAreClosed(ctx, option.OptionValue.Value, option.OptionValue.LocationRange)
|
||||
@ -68,8 +86,7 @@ func checkOption(
|
||||
invalidValues := docOption.DeprecatedCheckIsValid(option.OptionValue.Value.Value)
|
||||
|
||||
for _, invalidValue := range invalidValues {
|
||||
err := docvalues.LSPErrorFromInvalidValue(option.Start.Line, *invalidValue)
|
||||
err.ShiftCharacter(option.OptionValue.Start.Character)
|
||||
err := docvalues.LSPErrorFromInvalidValue(option.Start.Line, *invalidValue).ShiftCharacter(option.OptionValue.Start.Character)
|
||||
|
||||
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
||||
Range: err.Range.ToLSPRange(),
|
||||
@ -79,16 +96,6 @@ func checkOption(
|
||||
}
|
||||
}
|
||||
|
||||
if option.Separator == nil || option.Separator.Value.Value == "" {
|
||||
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
||||
Range: option.Key.LocationRange.ToLSPRange(),
|
||||
Message: fmt.Sprintf("There should be a separator between an option and its value"),
|
||||
Severity: &common.SeverityError,
|
||||
})
|
||||
} else {
|
||||
checkIsUsingDoubleQuotes(ctx, option.Separator.Value, option.Separator.LocationRange)
|
||||
checkQuotesAreClosed(ctx, option.Separator.Value, option.Separator.LocationRange)
|
||||
}
|
||||
}
|
||||
|
||||
func checkMatchBlock(
|
||||
@ -100,6 +107,6 @@ func checkMatchBlock(
|
||||
for it.Next() {
|
||||
option := it.Value().(*ast.SSHDOption)
|
||||
|
||||
checkOption(ctx, option, true)
|
||||
checkOption(ctx, option, matchBlock)
|
||||
}
|
||||
}
|
||||
|
34
server/handlers/sshd_config/analyzer/options_test.go
Normal file
34
server/handlers/sshd_config/analyzer/options_test.go
Normal file
@ -0,0 +1,34 @@
|
||||
package analyzer
|
||||
|
||||
import (
|
||||
testutils_test "config-lsp/handlers/sshd_config/test_utils"
|
||||
"testing"
|
||||
|
||||
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||
)
|
||||
|
||||
func TestUnknownOptionExample(
|
||||
t *testing.T,
|
||||
) {
|
||||
d := testutils_test.DocumentFromInput(t, `
|
||||
ThisOptionDoesNotExist okay
|
||||
`)
|
||||
ctx := &analyzerContext{
|
||||
document: d,
|
||||
diagnostics: make([]protocol.Diagnostic, 0),
|
||||
}
|
||||
|
||||
analyzeStructureIsValid(ctx)
|
||||
|
||||
if !(len(ctx.diagnostics) == 1) {
|
||||
t.Errorf("Expected 1 error, got %v", len(ctx.diagnostics))
|
||||
}
|
||||
|
||||
if !(len(ctx.document.Indexes.UnknownOptions) == 1) {
|
||||
t.Errorf("Expected 1 unknown option, got %v", len(ctx.document.Indexes.UnknownOptions))
|
||||
}
|
||||
|
||||
if !(ctx.document.Indexes.UnknownOptions[0].Option.Key.Value.Value == "ThisOptionDoesNotExist") {
|
||||
t.Errorf("Expected 'ThisOptionDoesNotExist', got %v", ctx.document.Indexes.UnknownOptions[0].Option.Key.Value.Value)
|
||||
}
|
||||
}
|
@ -3,7 +3,6 @@ package analyzer
|
||||
import (
|
||||
"config-lsp/common"
|
||||
commonparser "config-lsp/common/parser"
|
||||
"config-lsp/utils"
|
||||
"strings"
|
||||
|
||||
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||
@ -12,12 +11,12 @@ import (
|
||||
func analyzeQuotesAreValid(
|
||||
ctx *analyzerContext,
|
||||
) {
|
||||
for _, option := range ctx.document.Config.GetAllOptions() {
|
||||
checkIsUsingDoubleQuotes(ctx, option.Key.Value, option.Key.LocationRange)
|
||||
checkIsUsingDoubleQuotes(ctx, option.OptionValue.Value, option.OptionValue.LocationRange)
|
||||
for _, info := range ctx.document.Config.GetAllOptions() {
|
||||
checkIsUsingDoubleQuotes(ctx, info.Option.Key.Value, info.Option.Key.LocationRange)
|
||||
checkIsUsingDoubleQuotes(ctx, info.Option.OptionValue.Value, info.Option.OptionValue.LocationRange)
|
||||
|
||||
checkQuotesAreClosed(ctx, option.Key.Value, option.Key.LocationRange)
|
||||
checkQuotesAreClosed(ctx, option.OptionValue.Value, option.OptionValue.LocationRange)
|
||||
checkQuotesAreClosed(ctx, info.Option.Key.Value, info.Option.Key.LocationRange)
|
||||
checkQuotesAreClosed(ctx, info.Option.OptionValue.Value, info.Option.OptionValue.LocationRange)
|
||||
}
|
||||
}
|
||||
|
||||
@ -26,14 +25,10 @@ func checkIsUsingDoubleQuotes(
|
||||
value commonparser.ParsedString,
|
||||
valueRange common.LocationRange,
|
||||
) {
|
||||
quoteRanges := utils.GetQuoteRanges(value.Raw)
|
||||
singleQuotePosition := strings.Index(value.Raw, "'")
|
||||
|
||||
// Single quote
|
||||
if singleQuotePosition != -1 && !quoteRanges.IsIndexInsideQuotes(singleQuotePosition) {
|
||||
if strings.HasPrefix(value.Raw, "'") && strings.HasSuffix(value.Raw, "'") {
|
||||
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
||||
Range: valueRange.ToLSPRange(),
|
||||
Message: "sshd_config does not support single quotes. Use double quotes (\") instead.",
|
||||
Message: "ssh_config does not support single quotes. Use double quotes (\") instead.",
|
||||
Severity: &common.SeverityError,
|
||||
})
|
||||
}
|
||||
|
@ -13,13 +13,13 @@ import (
|
||||
func analyzeTokens(
|
||||
ctx *analyzerContext,
|
||||
) {
|
||||
for _, option := range ctx.document.Config.GetAllOptions() {
|
||||
if option.Key == nil || option.OptionValue == nil {
|
||||
for _, info := range ctx.document.Config.GetAllOptions() {
|
||||
if info.Option.Key == nil || info.Option.OptionValue == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
key := option.Key.Key
|
||||
text := option.OptionValue.Value.Value
|
||||
key := info.Option.Key.Key
|
||||
text := info.Option.OptionValue.Value.Value
|
||||
var tokens []string
|
||||
|
||||
if foundTokens, found := fields.OptionsTokensMap[key]; found {
|
||||
@ -39,7 +39,7 @@ func analyzeTokens(
|
||||
}
|
||||
|
||||
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
||||
Range: option.OptionValue.ToLSPRange(),
|
||||
Range: info.Option.OptionValue.ToLSPRange(),
|
||||
Message: fmt.Sprintf("Token '%s' is not allowed for option '%s'", token, optionName),
|
||||
Severity: &common.SeverityError,
|
||||
})
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Code generated from Config.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
||||
// Code generated from Config.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser // Config
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Code generated from Config.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
||||
// Code generated from Config.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Code generated from Config.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
||||
// Code generated from Config.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser // Config
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Code generated from Config.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
||||
// Code generated from Config.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser // Config
|
||||
|
||||
|
8
server/handlers/sshd_config/ast/sshd_config_ast_utils.go
Normal file
8
server/handlers/sshd_config/ast/sshd_config_ast_utils.go
Normal file
@ -0,0 +1,8 @@
|
||||
// Contains structs that are used as utilities, but are
|
||||
// not used for the AST itself
|
||||
package ast
|
||||
|
||||
type SSHDOptionInfo struct {
|
||||
MatchBlock *SSHDMatchBlock
|
||||
Option *SSHDOption
|
||||
}
|
@ -64,26 +64,32 @@ func (c SSHDConfig) FindOption(line uint32) (*SSHDOption, *SSHDMatchBlock) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (c SSHDConfig) GetAllOptions() []*SSHDOption {
|
||||
options := make(
|
||||
[]*SSHDOption,
|
||||
func (c SSHDConfig) GetAllOptions() []SSHDOptionInfo {
|
||||
infos := make(
|
||||
[]SSHDOptionInfo,
|
||||
0,
|
||||
// Approximation, this does not need to be exact
|
||||
c.Options.Size()+10,
|
||||
)
|
||||
|
||||
var currentMatchBlock *SSHDMatchBlock = nil
|
||||
|
||||
for _, rawEntry := range c.Options.Values() {
|
||||
switch entry := rawEntry.(type) {
|
||||
case *SSHDOption:
|
||||
options = append(options, entry)
|
||||
infos = append(infos, SSHDOptionInfo{
|
||||
Option: entry,
|
||||
MatchBlock: currentMatchBlock,
|
||||
})
|
||||
case *SSHDMatchBlock:
|
||||
options = append(options, entry.MatchOption)
|
||||
currentMatchBlock = entry
|
||||
|
||||
for _, rawOption := range entry.Options.Values() {
|
||||
options = append(options, rawOption.(*SSHDOption))
|
||||
}
|
||||
infos = append(infos, SSHDOptionInfo{
|
||||
Option: entry.MatchOption,
|
||||
MatchBlock: currentMatchBlock,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return options
|
||||
return infos
|
||||
}
|
||||
|
19
server/handlers/sshd_config/diagnostics/diagnostics.go
Normal file
19
server/handlers/sshd_config/diagnostics/diagnostics.go
Normal file
@ -0,0 +1,19 @@
|
||||
package diagnostics
|
||||
|
||||
import (
|
||||
"config-lsp/common"
|
||||
"fmt"
|
||||
|
||||
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||
)
|
||||
|
||||
func GenerateUnknownOption(
|
||||
diagnosticRange protocol.Range,
|
||||
optionName string,
|
||||
) protocol.Diagnostic {
|
||||
return protocol.Diagnostic{
|
||||
Range: diagnosticRange,
|
||||
Message: fmt.Sprintf("Unknown option: %s", optionName),
|
||||
Severity: &common.SeverityError,
|
||||
}
|
||||
}
|
@ -83,31 +83,36 @@ See PATTERNS in ssh_config(5) for more information on patterns. This keyword may
|
||||
},
|
||||
},
|
||||
docvalues.ArrayValue{
|
||||
Separator: ",",
|
||||
DuplicatesExtractor: &docvalues.SimpleDuplicatesExtractor,
|
||||
Separator: " ",
|
||||
DuplicatesExtractor: nil,
|
||||
RespectQuotes: true,
|
||||
SubValue: docvalues.EnumValue{
|
||||
EnforceValues: true,
|
||||
Values: []docvalues.EnumString{
|
||||
docvalues.CreateEnumString("none"),
|
||||
SubValue: docvalues.ArrayValue{
|
||||
Separator: ",",
|
||||
DuplicatesExtractor: &docvalues.SimpleDuplicatesExtractor,
|
||||
RespectQuotes: true,
|
||||
SubValue: docvalues.EnumValue{
|
||||
EnforceValues: true,
|
||||
Values: []docvalues.EnumString{
|
||||
docvalues.CreateEnumString("none"),
|
||||
|
||||
docvalues.CreateEnumString("password"),
|
||||
docvalues.CreateEnumString("publickey"),
|
||||
docvalues.CreateEnumString("gssapi-with-mic"),
|
||||
docvalues.CreateEnumString("keyboard-interactive"),
|
||||
docvalues.CreateEnumString("hostbased"),
|
||||
docvalues.CreateEnumString("password"),
|
||||
docvalues.CreateEnumString("publickey"),
|
||||
docvalues.CreateEnumString("gssapi-with-mic"),
|
||||
docvalues.CreateEnumString("keyboard-interactive"),
|
||||
docvalues.CreateEnumString("hostbased"),
|
||||
|
||||
docvalues.CreateEnumString("password:bsdauth"),
|
||||
docvalues.CreateEnumString("publickey:bsdauth"),
|
||||
docvalues.CreateEnumString("gssapi-with-mic:bsdauth"),
|
||||
docvalues.CreateEnumString("keyboard-interactive:bsdauth"),
|
||||
docvalues.CreateEnumString("hostbased:bsdauth"),
|
||||
docvalues.CreateEnumString("password:bsdauth"),
|
||||
docvalues.CreateEnumString("publickey:bsdauth"),
|
||||
docvalues.CreateEnumString("gssapi-with-mic:bsdauth"),
|
||||
docvalues.CreateEnumString("keyboard-interactive:bsdauth"),
|
||||
docvalues.CreateEnumString("hostbased:bsdauth"),
|
||||
|
||||
docvalues.CreateEnumString("password:pam"),
|
||||
docvalues.CreateEnumString("publickey:pam"),
|
||||
docvalues.CreateEnumString("gssapi-with-mic:pam"),
|
||||
docvalues.CreateEnumString("keyboard-interactive:pam"),
|
||||
docvalues.CreateEnumString("hostbased:pam"),
|
||||
docvalues.CreateEnumString("password:pam"),
|
||||
docvalues.CreateEnumString("publickey:pam"),
|
||||
docvalues.CreateEnumString("gssapi-with-mic:pam"),
|
||||
docvalues.CreateEnumString("keyboard-interactive:pam"),
|
||||
docvalues.CreateEnumString("hostbased:pam"),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -146,14 +151,26 @@ See PATTERNS in ssh_config(5) for more information on patterns. This keyword may
|
||||
Documentation: `Specifies a file that lists principal names that are accepted for certificate authentication. When using certificates signed by a key listed in TrustedUserCAKeys, this file lists names, one of which must appear in the certificate for it to be accepted for authentication. Names are listed one per line preceded by key options (as described in “AUTHORIZED_KEYS FILE FORMAT” in sshd(8)). Empty lines and comments starting with ‘#’ are ignored.
|
||||
Arguments to AuthorizedPrincipalsFile accept the tokens described in the “TOKENS” section. After expansion, AuthorizedPrincipalsFile is taken to be an absolute path or one relative to the user's home directory. The default is none, i.e. not to use a principals file – in this case, the username of the user must appear in a certificate's principals list for it to be accepted.
|
||||
Note that AuthorizedPrincipalsFile is only used when authentication proceeds using a CA listed in TrustedUserCAKeys and is not consulted for certification authorities trusted via ~/.ssh/authorized_keys, though the principals= key option offers a similar facility (see sshd(8) for details).`,
|
||||
Value: docvalues.PathValue{
|
||||
RequiredType: docvalues.PathTypeFile,
|
||||
Value: docvalues.OrValue{
|
||||
Values: []docvalues.DeprecatedValue{
|
||||
docvalues.SingleEnumValue("none"),
|
||||
docvalues.PathValue{
|
||||
IsOptional: false,
|
||||
RequiredType: docvalues.PathTypeFile,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"banner": {
|
||||
Documentation: `The contents of the specified file are sent to the remote user before authentication is allowed. If the argument is none then no banner is displayed. By default, no banner is displayed.`,
|
||||
Value: docvalues.PathValue{
|
||||
RequiredType: docvalues.PathTypeFile,
|
||||
Value: docvalues.OrValue{
|
||||
Values: []docvalues.DeprecatedValue{
|
||||
docvalues.SingleEnumValue("none"),
|
||||
docvalues.PathValue{
|
||||
IsOptional: false,
|
||||
RequiredType: docvalues.PathTypeFile,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"casignaturealgorithms": {
|
||||
@ -343,13 +360,19 @@ See PATTERNS in ssh_config(5) for more information on patterns. This keyword may
|
||||
},
|
||||
"hostcertificate": {
|
||||
Documentation: `Specifies a file containing a public host certificate. The certificate's public key must match a private host key already specified by HostKey. The default behaviour of sshd(8) is not to load any certificates.`,
|
||||
Value: docvalues.PathValue{},
|
||||
Value: docvalues.PathValue{
|
||||
IsOptional: true,
|
||||
RequiredType: docvalues.PathTypeFile,
|
||||
},
|
||||
},
|
||||
"hostkey": {
|
||||
Documentation: `Specifies a file containing a private host key used by SSH. The defaults are /etc/ssh/ssh_host_ecdsa_key, /etc/ssh/ssh_host_ed25519_key and /etc/ssh/ssh_host_rsa_key.
|
||||
Note that sshd(8) will refuse to use a file if it is group/world-accessible and that the HostKeyAlgorithms option restricts which of the keys are actually used by sshd(8).
|
||||
It is possible to have multiple host key files. It is also possible to specify public host key files instead. In this case operations on the private key will be delegated to an ssh-agent(1).`,
|
||||
Value: docvalues.PathValue{},
|
||||
Value: docvalues.PathValue{
|
||||
IsOptional: false,
|
||||
RequiredType: docvalues.PathTypeFile,
|
||||
},
|
||||
},
|
||||
"hostkeyagent": {
|
||||
Documentation: `Identifies the UNIX-domain socket used to communicate with an agent that has access to the private host keys. If the string "SSH_AUTH_SOCK" is specified, the location of the socket will be read from the SSH_AUTH_SOCK environment variable.`,
|
||||
@ -592,8 +615,9 @@ Only a subset of keywords may be used on the lines following a Match keyword. Av
|
||||
},
|
||||
},
|
||||
"modulifile": {
|
||||
Documentation: `Specifies the moduli(5) file that contains the Diffie- Hellman groups used for the “diffie-hellman-group-exchange-sha1” and “diffie-hellman-group-exchange-sha256” key exchange methods. The default is /etc/moduli.`,
|
||||
Documentation: `Specifies the moduli(5) file that contains the Diffie-Hellman groups used for the “diffie-hellman-group-exchange-sha1” and “diffie-hellman-group-exchange-sha256” key exchange methods. The default is /etc/moduli.`,
|
||||
Value: docvalues.PathValue{
|
||||
IsOptional: false,
|
||||
RequiredType: docvalues.PathTypeFile,
|
||||
},
|
||||
},
|
||||
@ -859,6 +883,7 @@ Only a subset of keywords may be used on the lines following a Match keyword. Av
|
||||
"securitykeyprovider": {
|
||||
Documentation: `Specifies a path to a library that will be used when loading FIDO authenticator-hosted keys, overriding the default of using the built-in USB HID support.`,
|
||||
Value: docvalues.PathValue{
|
||||
IsOptional: false,
|
||||
RequiredType: docvalues.PathTypeFile,
|
||||
},
|
||||
},
|
||||
|
@ -8,4 +8,5 @@ var AllowedDuplicateOptions = map[NormalizedOptionName]struct{}{
|
||||
"listenaddress": {},
|
||||
"match": {},
|
||||
"port": {},
|
||||
"hostkey": {},
|
||||
}
|
||||
|
@ -18,6 +18,10 @@ func GetRootCompletions(
|
||||
parentMatchBlock *ast.SSHDMatchBlock,
|
||||
suggestValue bool,
|
||||
) ([]protocol.CompletionItem, error) {
|
||||
if d.Indexes == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
kind := protocol.CompletionItemKindField
|
||||
|
||||
availableOptions := make(map[fields.NormalizedOptionName]docvalues.DocumentationValue, 0)
|
||||
|
20
server/handlers/sshd_config/handlers/fetch-code-actions.go
Normal file
20
server/handlers/sshd_config/handlers/fetch-code-actions.go
Normal file
@ -0,0 +1,20 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
sshdconfig "config-lsp/handlers/sshd_config"
|
||||
|
||||
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||
)
|
||||
|
||||
func FetchCodeActions(
|
||||
d *sshdconfig.SSHDDocument,
|
||||
params *protocol.CodeActionParams,
|
||||
) []protocol.CodeAction {
|
||||
if d.Indexes == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
actions := getKeywordTypoFixes(d, params)
|
||||
|
||||
return actions
|
||||
}
|
110
server/handlers/sshd_config/handlers/fetch-code-actions_typos.go
Normal file
110
server/handlers/sshd_config/handlers/fetch-code-actions_typos.go
Normal file
@ -0,0 +1,110 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"config-lsp/common"
|
||||
sshdconfig "config-lsp/handlers/sshd_config"
|
||||
"config-lsp/handlers/sshd_config/diagnostics"
|
||||
"config-lsp/handlers/sshd_config/fields"
|
||||
"fmt"
|
||||
|
||||
"github.com/hbollon/go-edlib"
|
||||
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||
)
|
||||
|
||||
func getKeywordTypoFixes(
|
||||
d *sshdconfig.SSHDDocument,
|
||||
params *protocol.CodeActionParams,
|
||||
) []protocol.CodeAction {
|
||||
if common.ServerOptions.NoTypoSuggestions {
|
||||
return nil
|
||||
}
|
||||
|
||||
line := params.Range.Start.Line
|
||||
|
||||
if typoOption, found := d.Indexes.UnknownOptions[line]; found {
|
||||
name := typoOption.Option.Key.Value.Value
|
||||
|
||||
suggestedOptions := findSimilarOptions(name, typoOption.MatchBlock != nil)
|
||||
|
||||
actions := make([]protocol.CodeAction, 0, len(suggestedOptions))
|
||||
|
||||
kind := protocol.CodeActionKindQuickFix
|
||||
for index, normalizedOptionName := range suggestedOptions {
|
||||
isPreferred := index == 0
|
||||
optionName := fields.FieldsNameFormattedMap[normalizedOptionName]
|
||||
|
||||
actions = append(actions, protocol.CodeAction{
|
||||
Title: fmt.Sprintf("Typo Fix: %s", optionName),
|
||||
IsPreferred: &isPreferred,
|
||||
Kind: &kind,
|
||||
Diagnostics: []protocol.Diagnostic{
|
||||
diagnostics.GenerateUnknownOption(
|
||||
typoOption.Option.Key.ToLSPRange(),
|
||||
typoOption.Option.Key.Value.Value,
|
||||
),
|
||||
},
|
||||
Edit: &protocol.WorkspaceEdit{
|
||||
Changes: map[protocol.DocumentUri][]protocol.TextEdit{
|
||||
params.TextDocument.URI: {
|
||||
{
|
||||
Range: typoOption.Option.Key.ToLSPRange(),
|
||||
NewText: optionName,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
return actions
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Find options that are similar to the given option name.
|
||||
// This is used to find typos & suggest the correct option name.
|
||||
// Once an option is found that has a Damerau-Levenshtein distance of 1, it is immediately returned.
|
||||
// If not, then the next 2 options of similarity 2, or 3 options of similarity 3 are returned.
|
||||
// If no options with similarity <= 3 are found, then an empty slice is returned.
|
||||
func findSimilarOptions(
|
||||
optionName string,
|
||||
restrictToMatchOptions bool,
|
||||
) []fields.NormalizedOptionName {
|
||||
normalizedOptionName := string(fields.CreateNormalizedName(optionName))
|
||||
|
||||
optionsPerSimilarity := map[uint8][]fields.NormalizedOptionName{
|
||||
2: make([]fields.NormalizedOptionName, 0, 2),
|
||||
3: make([]fields.NormalizedOptionName, 0, 3),
|
||||
}
|
||||
|
||||
for name := range fields.Options {
|
||||
if restrictToMatchOptions {
|
||||
if _, found := fields.MatchAllowedOptions[name]; !found {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
normalizedName := string(name)
|
||||
similarity := edlib.DamerauLevenshteinDistance(normalizedName, normalizedOptionName)
|
||||
|
||||
switch similarity {
|
||||
case 1:
|
||||
return []fields.NormalizedOptionName{name}
|
||||
case 2:
|
||||
optionsPerSimilarity[2] = append(optionsPerSimilarity[2], name)
|
||||
|
||||
if len(optionsPerSimilarity[2]) >= 2 {
|
||||
return optionsPerSimilarity[2]
|
||||
}
|
||||
case 3:
|
||||
optionsPerSimilarity[3] = append(optionsPerSimilarity[3], name)
|
||||
|
||||
if len(optionsPerSimilarity[3]) >= 3 {
|
||||
return optionsPerSimilarity[3]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return append(optionsPerSimilarity[2], optionsPerSimilarity[3]...)
|
||||
}
|
@ -37,4 +37,6 @@ type SSHDIndexes struct {
|
||||
AllOptionsPerName map[fields.NormalizedOptionName](map[*ast.SSHDMatchBlock]([]*ast.SSHDOption))
|
||||
|
||||
Includes map[uint32]*SSHDIndexIncludeLine
|
||||
|
||||
UnknownOptions map[uint32]ast.SSHDOptionInfo
|
||||
}
|
||||
|
@ -18,6 +18,7 @@ func CreateIndexes(config ast.SSHDConfig) (*SSHDIndexes, []common.LSPError) {
|
||||
indexes := &SSHDIndexes{
|
||||
AllOptionsPerName: make(map[fields.NormalizedOptionName](map[*ast.SSHDMatchBlock]([]*ast.SSHDOption))),
|
||||
Includes: make(map[uint32]*SSHDIndexIncludeLine),
|
||||
UnknownOptions: make(map[uint32]ast.SSHDOptionInfo),
|
||||
}
|
||||
|
||||
it := config.Options.Iterator()
|
||||
|
16
server/handlers/sshd_config/lsp/text-document-code-action.go
Normal file
16
server/handlers/sshd_config/lsp/text-document-code-action.go
Normal file
@ -0,0 +1,16 @@
|
||||
package lsp
|
||||
|
||||
import (
|
||||
sshdconfig "config-lsp/handlers/sshd_config"
|
||||
"config-lsp/handlers/sshd_config/handlers"
|
||||
|
||||
"github.com/tliron/glsp"
|
||||
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||
)
|
||||
|
||||
func TextDocumentCodeAction(context *glsp.Context, params *protocol.CodeActionParams) ([]protocol.CodeAction, error) {
|
||||
d := sshdconfig.DocumentParserMap[params.TextDocument.URI]
|
||||
actions := handlers.FetchCodeActions(d, params)
|
||||
|
||||
return actions, nil
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
// Code generated from Match.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
||||
// Code generated from Match.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser // Match
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Code generated from Match.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
||||
// Code generated from Match.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Code generated from Match.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
||||
// Code generated from Match.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||
|
||||
package parser // Match
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user