Compare commits

..

64 Commits
v0.1.3 ... main

Author SHA1 Message Date
Myzel394
66c93938fc
Merge pull request #45 from Myzel394/improve-ssh 2025-05-29 20:37:44 +02:00
e187040dd0
fix(ci-cd): Remove unused go version 2025-05-29 20:30:30 +02:00
ac97ec77ef
fix(server): Improve namin 2025-05-29 20:22:57 +02:00
a2decaeff3
fix(server): Remove print statement 2025-05-29 20:20:46 +02:00
d5ec3694db
chore(server): Update dependencies
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-05-29 20:10:36 +02:00
5c6ca95912
chore(server): Update go version
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-05-29 20:10:36 +02:00
6fe41b5040
feat(ci-cd): Check if code is up to date
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-05-29 20:10:36 +02:00
0dda74c8cb
chore(server): Move antlr parser updater to justfile
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-05-29 20:10:36 +02:00
d10655996f
chore(server): Update antlr parsers
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-05-29 20:10:36 +02:00
c578b4b448
chore(server): Update go version
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-05-29 20:10:35 +02:00
3c9ee9da53
fix(server): Update version; Small fixes
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-05-29 20:10:35 +02:00
429c2cd4be
fix(server): Improvements
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-05-29 20:10:35 +02:00
9a1686a7d8
fix(server): Improve ssh handlers
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-05-29 20:10:35 +02:00
fa45351ec5
fix(server): Fix SSH fields
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-05-29 20:10:35 +02:00
b520ada4ed
fix(server): Improve SSH fields
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-05-29 20:10:35 +02:00
Myzel394
9b306f339e
Merge pull request #40 from Myzel394/improve-wireguard
fix(server): Overall bugfixes & improvements
2025-03-29 21:21:52 +01:00
e1140ae757
chore: Update version 2025-03-29 19:48:04 +01:00
9f22689cac
fix(server): Improvements 2025-03-29 18:53:20 +01:00
e4d7521a4c
feat(server): Add Wireguard code actions: Fix typo 2025-03-29 16:12:28 +01:00
c5fefad56d
feat(server): Add apfs 2025-03-26 22:24:39 +01:00
ef625f9bf6
feat(flake): Add gopls 2025-03-24 10:45:45 +01:00
db4e1bae4c
fix(server): Overall bugfixes & improvements 2025-03-24 10:45:16 +01:00
5de2711b03 Merge branch 'main' into improve-wireguard 2025-03-23 17:35:35 +01:00
5e535741d2
fix(server): Improvements 2025-03-23 17:35:31 +01:00
ce7264aded
feat(server): fstab: Add bcachefs options 2025-03-23 17:34:15 +01:00
e69edeaece
fix(server): ValuePath: Do not check for errors if existence is optional 2025-03-22 16:15:02 +01:00
15ce5958da
feat(wireguard): Add code action: Create peer like this 2025-03-17 22:10:29 +01:00
3857bd5694
fix(server): Overall bugfixes & improvements 2025-03-17 22:00:47 +01:00
Myzel394
e2c2fac98c
Merge pull request #36 from Myzel394/server/add-suggested-words
Add suggestion words
2025-03-16 21:52:30 +01:00
25218161b9
fix(ci-cd): Use upload-artifact@4 2025-03-16 14:59:39 +01:00
1d7e746545
fix: Improvements 2025-03-16 14:56:42 +01:00
Myzel394
7feb034a84
fix(server): Overall improvements
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-03-16 00:41:23 +01:00
Myzel394
7377d952c8
feat(justfile): Add ready recipe
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-03-16 00:41:23 +01:00
15471894d4
fix: Fix 2025-03-16 00:25:05 +01:00
dependabot[bot]
8350458ae5
chore(deps): bump golang.org/x/crypto from 0.25.0 to 0.31.0 in /server
Bumps [golang.org/x/crypto](https://github.com/golang/crypto) from 0.25.0 to 0.31.0.
- [Commits](https://github.com/golang/crypto/compare/v0.25.0...v0.31.0)

---
updated-dependencies:
- dependency-name: golang.org/x/crypto
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-03-16 00:23:55 +01:00
Myzel394
d81f978771
fix(server): Improve spec field analyzer
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-03-16 00:23:55 +01:00
Myzel394
437985d72f
chore(ci-cd): Update .goreleaser.yaml config
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-03-16 00:23:55 +01:00
Myzel394
00976cec95
feat: Add justfile
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-03-16 00:23:55 +01:00
Myzel394
f29bb12d84
fix(server): Fix language initialization
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-03-16 00:23:55 +01:00
Myzel394
1cfb9bbfba
refactor(server): ssh_config improvements
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-03-16 00:23:55 +01:00
Myzel394
0b2690910f
fix(server): Improve aliases
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-03-16 00:23:55 +01:00
Myzel394
b94d987565
fix(server): Improve wireguard hover lsp
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-03-16 00:23:55 +01:00
Myzel394
e14866bcdc
fix(server): Improve wireguard
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-03-16 00:23:55 +01:00
Myzel394
1227949f26
fix(server): Improve wireguard
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-03-16 00:23:55 +01:00
Myzel394
d3f6122eef
feat(server): Add FindBiggestKey utils
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-03-16 00:23:54 +01:00
Myzel394
ff9b5db18a
refactor(server): Refactor Wireguard config; Improve completions; Improve code actions
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-03-16 00:23:54 +01:00
Myzel394
a0dca94b9d
refactor(server): Refactor Wireguard config; Improve completions + bunch of other stuff
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-03-16 00:23:54 +01:00
Myzel394
020cc8ad67
refactor(server): Improve Wireguard indexes
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-03-16 00:23:54 +01:00
Myzel394
eb076dbf53
refactor(server): Improve Wireguard analyzer
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-03-16 00:23:54 +01:00
Myzel394
36950fe271
refactor(server): Improve Wireguard AST, analyzer and indexes
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-03-16 00:23:54 +01:00
Myzel394
ba056d6ae9
refactor(server): Improve Wireguard config; Improve the parser
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-03-16 00:23:54 +01:00
Myzel394
bf05d07fc9
fix(server): Improve structure analyzer for sshd_config
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-03-16 00:23:54 +01:00
Myzel394
98f76fd839
fix(server): Improve structure analyzer for ssh_config
Signed-off-by: Myzel394 <github.7a2op@simplelogin.co>
2025-03-16 00:23:54 +01:00
Myzel394
019726e28a
Merge pull request #37 from Myzel394/dependabot/go_modules/server/golang.org/x/crypto-0.31.0
chore(deps): bump golang.org/x/crypto from 0.25.0 to 0.31.0 in /server
2025-02-18 20:13:38 +01:00
dependabot[bot]
78ca195a9d
chore(deps): bump golang.org/x/crypto from 0.25.0 to 0.31.0 in /server
Bumps [golang.org/x/crypto](https://github.com/golang/crypto) from 0.25.0 to 0.31.0.
- [Commits](https://github.com/golang/crypto/compare/v0.25.0...v0.31.0)

---
updated-dependencies:
- dependency-name: golang.org/x/crypto
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-02-17 21:57:27 +00:00
Myzel394
706b8137dd
fix(server): Add code action LSP to root-handler 2025-02-17 22:28:09 +01:00
Myzel394
dbf543db66
fix(server): Improve ssh_config 2025-02-17 22:27:52 +01:00
Myzel394
e569516aae
fix(server): Improve sshd_config 2025-02-17 22:27:14 +01:00
Myzel394
0c827b04cd
feat(server): Improve sshd_config; Add unknown option detection 2025-02-17 21:50:56 +01:00
Myzel394
3ac3ebbe50
refactor(server): Outsource ssh_config code action: Add To Unknown into own file 2025-02-16 15:04:52 +01:00
Myzel394
026e0349a1
feat(server): Add typo fix suggestion to ssh_config 2025-02-16 14:43:14 +01:00
Myzel394
67c7f7f4b7
feat(server): Add NoTypoSuggestions to global options 2025-02-16 13:21:45 +01:00
Myzel394
5d03b4598c
chore: Update version 2025-02-14 23:26:17 +01:00
Myzel394
dd6bcc4301
fix(ci-cd): Add .exe suffix for Windows releases 2025-02-14 23:22:39 +01:00
137 changed files with 3344 additions and 2732 deletions

View File

@ -5,9 +5,6 @@ on: [pull_request]
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
go-version: [ '1.22.x' ]
steps:
- uses: actions/checkout@v4
@ -16,9 +13,24 @@ jobs:
with:
github_access_token: ${{ secrets.GITHUB_TOKEN }}
- name: Check if project can be linted
run: nix develop --command bash -c "just lint" && git diff --exit-code
- name: Check if antlr parsers are up to date
run: nix develop --command bash -c "just update-antlr-parsers" && git diff --exit-code
- name: Check Nix flake
run: nix flake check
- name: Build app
run: nix develop --command bash -c "cd server && go build"
- name: Build VS Code extension
run: nix build .#vs-code-extension
- name: Upload VS Code extension
uses: actions/upload-artifact@v4
with:
name: vs-code-extension
path: result/config-lsp-*.vsix

View File

@ -59,7 +59,7 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GH_CONFIGLSP_TOKEN }}
build-extension:
name: Build extension for ${{ matrix.target }}
name: Build extension for ${{ matrix.action_name }}
runs-on: ubuntu-latest
needs:
# Wait for server to build so that we know the checks have passed
@ -71,23 +71,29 @@ jobs:
- goos: linux
goarch: amd64
vscode_target: linux-x64
action_name: Linux x64
- goos: linux
goarch: arm64
vscode_target: linux-arm64
action_name: Linux ARM
- goos: darwin
goarch: amd64
vscode_target: darwin-x64
action_name: macOS x64
- goos: darwin
goarch: arm64
vscode_target: darwin-arm64
action_name: macOS ARM
- goos: windows
goarch: amd64
vscode_target: win32-x64
action_name: Windows x64
- goos: windows
goarch: arm64
vscode_target: win32-arm64
action_name: Windows ARM
steps:
- name: Checkout
@ -108,13 +114,21 @@ jobs:
run: nix build .#"vs-code-extension-bare"
- name: Build extension
if: ${{ matrix.goos != 'windows' }}
run: cd server && GOOS=${{ matrix.goos }} GOARCH=${{ matrix.goarch }} go build -a -gcflags=all="-l -B" -ldflags="-s -w" -o config-lsp
- name: Build extension
if: ${{ matrix.goos == 'windows' }}
run: cd server && GOOS=${{ matrix.goos }} GOARCH=${{ matrix.goarch }} go build -a -gcflags=all="-l -B" -ldflags="-s -w" -o config-lsp.exe
- name: Prepare folder
run: cp -rL result dist && chmod -R 777 dist
- name: Move binary to extension
if: ${{ matrix.goos != 'windows' }}
run: mv server/config-lsp dist/out/
- name: Move binary to extension
if: ${{ matrix.goos == 'windows' }}
run: mv server/config-lsp.exe dist/out/
- name: Shrink binary
if: ${{ matrix.goos == 'linux' }}

View File

@ -19,7 +19,7 @@ builds:
dir: ./server
archives:
- format: tar.gz
- formats: [ 'tar.gz' ]
# this name template makes the OS and Arch compatible with the results of `uname`.
name_template: >-
{{ .ProjectName }}_
@ -31,7 +31,7 @@ archives:
# use zip for windows archives
format_overrides:
- goos: windows
format: zip
formats: [ 'zip' ]
changelog:
sort: asc

12
flake.lock generated
View File

@ -26,11 +26,11 @@
]
},
"locked": {
"lastModified": 1733668782,
"narHash": "sha256-tPsqU00FhgdFr0JiQUiBMgPVbl1jbPCY5gbFiJycL3I=",
"lastModified": 1742209644,
"narHash": "sha256-jMy1XqXqD0/tJprEbUmKilTkvbDY/C0ZGSsJJH4TNCE=",
"owner": "tweag",
"repo": "gomod2nix",
"rev": "514283ec89c39ad0079ff2f3b1437404e4cba608",
"rev": "8f3534eb8f6c5c3fce799376dc3b91bae6b11884",
"type": "github"
},
"original": {
@ -41,11 +41,11 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1739214665,
"narHash": "sha256-26L8VAu3/1YRxS8MHgBOyOM8xALdo6N0I04PgorE7UM=",
"lastModified": 1742669843,
"narHash": "sha256-G5n+FOXLXcRx+3hCJ6Rt6ZQyF1zqQ0DL0sWAMn2Nk0w=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "64e75cd44acf21c7933d61d7721e812eac1b5a0a",
"rev": "1e5b653dff12029333a6546c11e108ede13052eb",
"type": "github"
},
"original": {

View File

@ -3,12 +3,12 @@
inputs = {
nixpkgs.url = "github:nixos/nixpkgs?ref=nixos-unstable";
utils.url = "github:numtide/flake-utils";
gomod2nix = {
url = "github:tweag/gomod2nix";
inputs.nixpkgs.follows = "nixpkgs";
inputs.utils.follows = "utils";
};
utils.url = "github:numtide/flake-utils";
};
outputs = { self, nixpkgs, utils, gomod2nix }:
@ -23,26 +23,27 @@
"aarch64-windows"
] (system:
let
version = "0.1.3"; # CI:CD-VERSION
version = "0.2.2"; # CI:CD-VERSION
pkgs = import nixpkgs {
inherit system;
overlays = [
(final: prev: {
go = prev.go_1_22;
buildGoModule = prev.buildGo122Module;
go = prev.go_1_24;
buildGoModule = prev.buildGo124Module;
})
gomod2nix.overlays.default
];
};
inputs = [
pkgs.go_1_22
pkgs.go_1_24
];
serverUncompressed = pkgs.buildGoModule {
nativeBuildInputs = inputs;
pname = "github.com/Myzel394/config-lsp";
version = version;
src = ./server;
vendorHash = "sha256-eO1eY+2XuOCd/dKwgFtu05+bnn/Cv8ZbUIwRjCwJF+U=";
vendorHash = "sha256-0/oMmrdQGnx7opL4SYaYU2FdroKkF60FtRTvZ1dYr/Y";
proxyVendor = true;
ldflags = [ "-s" "-w" ];
checkPhase = ''
go test -v $(pwd)/...
@ -68,6 +69,7 @@
in {
packages = {
default = server;
"server-uncompressed" = serverUncompressed;
"vs-code-extension-bare" = let
name = "config-lsp";
node-modules = pkgs.mkYarnPackage {
@ -131,20 +133,27 @@
};
in node-modules;
};
devShells.default = pkgs.mkShell {
buildInputs = inputs ++ (with pkgs; [
mailutils
wireguard-tools
antlr
]) ++ (if pkgs.stdenv.isLinux then with pkgs; [
postfix
] else []);
};
devShells.default = let
ourGopls = pkgs.gopls;
in
pkgs.mkShell {
buildInputs = inputs ++ (with pkgs; [
mailutils
wireguard-tools
antlr
just
ourGopls
]) ++ (if pkgs.stdenv.isLinux then with pkgs; [
postfix
] else []);
};
devShells."vs-code-extension" = pkgs.mkShell {
buildInputs = [
pkgs.nodejs
pkgs.vsce
pkgs.yarn2nix
buildInputs = with pkgs; [
nodejs
vsce
yarn2nix
];
};
}

50
justfile Normal file
View File

@ -0,0 +1,50 @@
#!/usr/bin/env just --justfile
set dotenv-load := true
default:
@just --list
# Lint whole project
lint:
cd server && gofmt -s -w .
# cd vs-code-extension && yarn run lint
# Build config-lsp and test it in nvim (config-lsp will be loaded automatically)
[working-directory: "./server"]
test-nvim file:
go build -o ./result/bin/config-lsp && rm -rf ~/.local/state/nvim/lsp.log && DOTFILES_IGNORE_CONFIG_LSP=1 nvim {{file}} -c ':source nvim-lsp-debug.lua'
# Show Mason Logs
show-nvim-logs:
bat ~/.local/state/nvim/lsp.log
[working-directory: "./server"]
test:
nix develop --command bash -c 'go test ./... -count=1'
[working-directory: "./server"]
update-antlr-parsers:
# aliases
cd handlers/aliases && antlr4 -Dlanguage=Go -o ast/parser Aliases.g4
# fstab
cd handlers/fstab && antlr4 -Dlanguage=Go -o ast/parser Fstab.g4
# sshd_config
cd handlers/sshd_config && antlr4 -Dlanguage=Go -o ast/parser Config.g4
cd handlers/sshd_config/match-parser && antlr4 -Dlanguage=Go -o parser Match.g4
# ssh_config
cd handlers/ssh_config && antlr4 -Dlanguage=Go -o ast/parser Config.g4
cd handlers/ssh_config/match-parser && antlr4 -Dlanguage=Go -o parser Match.g4
# hosts
cd handlers/hosts && antlr4 -Dlanguage=Go -o ast/parser Hosts.g4
# Ready for a PR? Run this recipe before opening the PR!
ready:
just lint
just test

View File

@ -0,0 +1,75 @@
package commondocumentation
import (
docvalues "config-lsp/doc-values"
)
var APFSDocumentationAssignable = map[docvalues.EnumString]docvalues.DeprecatedValue{
docvalues.CreateEnumStringWithDoc(
"user",
"Set the owner of the files in the file system to user. The default owner is the owner of the directory on which the file system is being mounted. The user may be a user-name, or a numeric value.",
): docvalues.UIDValue{},
docvalues.CreateEnumStringWithDoc(
"group",
"Set the group of the files in the file system to group. The default group is the group of the directory on which the file system is being mounted. The group may be a group-name, or a numeric value.",
): docvalues.GIDValue{},
docvalues.CreateEnumStringWithDoc(
"snapshot",
"The name of the snapshot to mount. In this usage pathname is the mounted root directory of the base volume containing the snapshot.",
): docvalues.StringValue{},
}
var APFSDocumentationEnums = []docvalues.EnumString{
docvalues.CreateEnumStringWithDoc(
"async",
"All I/O to the file system should be done asynchronously. This can be somewhat dangerous with respect to losing data when faced with system crashes and power outages. This is also the default. It can be avoided with the noasync option.",
),
docvalues.CreateEnumStringWithDoc(
"noauto",
"This filesystem should be skipped when mount is run with the -a flag.",
),
docvalues.CreateEnumStringWithDoc(
"nodev",
"Do not interpret character or block special devices on the file system. This option is useful for a server that has file systems containing special devices for architectures other than its own.",
),
docvalues.CreateEnumStringWithDoc(
"noexec",
"Do not allow execution of any binaries on the mounted file system. This option is useful for a server that has file systems containing binaries for architectures other than its own.",
),
docvalues.CreateEnumStringWithDoc(
"noowners",
"Ignore the ownership field for the entire volume. This causes all objects to appear as owned by user ID 99 and group ID 99. User ID 99 is interpreted as the current effective user ID, while group ID 99 is used directly and translates to ``unknown''.",
),
docvalues.CreateEnumStringWithDoc(
"nosuid",
"Do not allow set-user-identifier or set-group-identifier bits to take effect.",
),
docvalues.CreateEnumStringWithDoc(
"rdonly",
"The same as -r; mount the file system read-only (even the super-user may not write it).",
),
docvalues.CreateEnumStringWithDoc(
"update",
"The same as -u; indicate that the status of an already mounted file system should be changed.",
),
docvalues.CreateEnumStringWithDoc(
"union",
"Causes the namespace to appear as the union of directories of the mounted filesystem with corresponding directories in the underlying filesystem. Lookups will be done in the mounted filesystem first. If those operations fail due to a non-existent file the underlying directory is then accessed.",
),
docvalues.CreateEnumStringWithDoc(
"noatime",
"Do not update the file access time when reading from a file. This option is useful on file systems where there are large numbers of files and performance is more critical than updating the file access time (which is rarely ever important).",
),
docvalues.CreateEnumStringWithDoc(
"strictatime",
"Always update the file access time when reading from a file. Without this option the filesystem may default to a less strict update mode, where some access time updates are skipped for performance reasons. This option could be ignored if it is not supported by the filesystem.",
),
docvalues.CreateEnumStringWithDoc(
"nobrowse",
"This option indicates that the mount point should not be visible via the GUI (i.e., appear on the Desktop as a separate volume).",
),
docvalues.CreateEnumStringWithDoc(
"nofollow",
"This option indicates that in the course of the mount system call, the kernel should not follow any symlinks that may be present in the provided mount-on directory. This is the same as the -k option.",
),
}

View File

@ -0,0 +1,195 @@
package commondocumentation
import docvalues "config-lsp/doc-values"
var checksumType = docvalues.EnumValue{
EnforceValues: true,
Values: []docvalues.EnumString{
docvalues.CreateEnumString("none"),
docvalues.CreateEnumString("crc32c"),
docvalues.CreateEnumString("crc64"),
},
}
var compressionType = docvalues.EnumValue{
EnforceValues: true,
Values: []docvalues.EnumString{
docvalues.CreateEnumStringWithDoc("none", "(default)"),
docvalues.CreateEnumString("lz4"),
docvalues.CreateEnumString("gzip"),
docvalues.CreateEnumString("zstd"),
},
}
// No idea if those enums are correct,
// the documentation does not provide any information
var booleanEnumValue = docvalues.EnumValue{
EnforceValues: true,
Values: []docvalues.EnumString{
docvalues.CreateEnumString("yes"),
docvalues.CreateEnumString("no"),
},
}
var BcacheFSDocumentationAssignable = map[docvalues.EnumString]docvalues.DeprecatedValue{
docvalues.CreateEnumStringWithDoc(
"errors",
"Action to take on filesystem error. The errors option is used for inconsistencies that indicate some sort of a bug",
): docvalues.EnumValue{
EnforceValues: true,
Values: []docvalues.EnumString{
docvalues.CreateEnumStringWithDoc("continue", "Log the error but continue normal operation"),
docvalues.CreateEnumStringWithDoc("ro", "Emergency read only, immediately halting any changes to the filesystem on disk"),
docvalues.CreateEnumStringWithDoc("panic", "Immediately halt the entire machine, printing a backtrace on the system console"),
},
},
docvalues.CreateEnumStringWithDoc(
"metadata_replicas",
"Number of replicas for metadata (journal and btree)",
): docvalues.PositiveNumberValue(),
docvalues.CreateEnumStringWithDoc(
"data_replicas",
"Number of replicas for user data",
): docvalues.PositiveNumberValue(),
docvalues.CreateEnumStringWithDoc(
"metadata_checksum",
"Checksum type for metadata writes",
): checksumType,
docvalues.CreateEnumStringWithDoc(
"data_checksum",
"Checksum type for data writes",
): checksumType,
docvalues.CreateEnumStringWithDoc(
"compression",
"Compression type",
): compressionType,
docvalues.CreateEnumStringWithDoc(
"background_compression",
"Background compression type",
): compressionType,
docvalues.CreateEnumStringWithDoc(
"str_hash",
"Hash function for string hash tables (directories and xattrs)",
): docvalues.EnumValue{
EnforceValues: true,
Values: []docvalues.EnumString{
docvalues.CreateEnumString("crc32c"),
docvalues.CreateEnumString("crc64"),
docvalues.CreateEnumString("siphash"),
},
},
docvalues.CreateEnumStringWithDoc(
"metadata_target",
"Preferred target for metadata writes",
): docvalues.StringValue{},
docvalues.CreateEnumStringWithDoc(
"foreground_target",
"Preferred target for foreground writes",
): docvalues.StringValue{},
docvalues.CreateEnumStringWithDoc(
"background_target",
"Target for data to be moved to in the background",
): docvalues.StringValue{},
docvalues.CreateEnumStringWithDoc(
"promote_target",
"Target for data to be copied to on read",
): docvalues.StringValue{},
docvalues.CreateEnumStringWithDoc(
"erasure_code",
"Enable erasure coding",
): booleanEnumValue,
docvalues.CreateEnumStringWithDoc(
"inodes_32bit",
"Restrict new inode numbers to 32 bits",
): booleanEnumValue,
docvalues.CreateEnumStringWithDoc(
"shard_inode_numbers",
"Use CPU id for high bits of new inode numbers.",
): booleanEnumValue,
docvalues.CreateEnumStringWithDoc(
"wide_macs",
"Store full 128 bit cryptographic MACs (default 80)",
): booleanEnumValue,
docvalues.CreateEnumStringWithDoc(
"inline_data",
"Enable inline data extents (default on)",
): booleanEnumValue,
docvalues.CreateEnumStringWithDoc(
"journal_flush_delay",
"Delay in milliseconds before automatic journal commit (default 1000)",
): docvalues.PositiveNumberValue(),
docvalues.CreateEnumStringWithDoc(
"journal_flush_disabled",
"Disables journal flush on sync/fsync. `journal_flush_delay` remains in effect, thus with the default setting not more than 1 second of work will be lost",
): booleanEnumValue,
docvalues.CreateEnumStringWithDoc(
"journal_reclaim",
"Reclaim journal space after a certain amount of time",
): docvalues.PositiveNumberValue(),
docvalues.CreateEnumStringWithDoc(
"journal_reclaim_delay",
"Delay in milliseconds before automatic journal reclaim",
): docvalues.PositiveNumberValue(),
docvalues.CreateEnumStringWithDoc(
"acl",
"Enable POSIX ACLs",
): booleanEnumValue,
docvalues.CreateEnumStringWithDoc(
"usrquota",
"Enable user quotas",
): booleanEnumValue,
docvalues.CreateEnumStringWithDoc(
"grpquota",
"Enable group quotas",
): booleanEnumValue,
docvalues.CreateEnumStringWithDoc(
"prjquota",
"Enable project quotas",
): booleanEnumValue,
docvalues.CreateEnumStringWithDoc(
"degraded",
"Allow mounting with data degraded",
): booleanEnumValue,
docvalues.CreateEnumStringWithDoc(
"very_degraded",
"Allow mounting with data missing",
): booleanEnumValue,
docvalues.CreateEnumStringWithDoc(
"verbose",
"Extra debugging info during mount/recovery",
): booleanEnumValue,
docvalues.CreateEnumStringWithDoc(
"fsck",
"Run fsck during mount",
): booleanEnumValue,
docvalues.CreateEnumStringWithDoc(
"fix_errors",
"Fix errors without asking during fsck",
): booleanEnumValue,
docvalues.CreateEnumStringWithDoc(
"ratelimit_errors",
"Ratelimit error messages during fsck",
): booleanEnumValue,
docvalues.CreateEnumStringWithDoc(
"read_only",
"Mount in read only mode",
): booleanEnumValue,
docvalues.CreateEnumStringWithDoc(
"nochanges",
"Issue no writes, even for journal replay",
): booleanEnumValue,
docvalues.CreateEnumStringWithDoc(
"norecovery",
"Dont replay the journal (not recommended)",
): booleanEnumValue,
docvalues.CreateEnumStringWithDoc(
"noexcl",
"Dont open devices in exclusive mode",
): booleanEnumValue,
docvalues.CreateEnumStringWithDoc(
"version_upgrade",
"Upgrade on disk format to latest version",
): booleanEnumValue,
}
var BcacheFSDocumentationEnums = []docvalues.EnumString{}

View File

@ -6,7 +6,10 @@ import (
)
func ClearDiagnostics(context *glsp.Context, uri protocol.DocumentUri) {
go context.Notify(
// Diagnostics are sent synchronously, as sending them async
// could result in a race condition when we send diagnostics
// to the client.
context.Notify(
protocol.ServerTextDocumentPublishDiagnostics,
protocol.PublishDiagnosticsParams{
URI: uri,

View File

@ -0,0 +1,43 @@
package common
import (
"github.com/hbollon/go-edlib"
)
// Find items that are similar to the given input.
// This is used to find typos & suggest the correct item.
// Once an item is found that has a Damerau-Levenshtein distance of 1, it is immediately returned.
// If not, then the next 2 items of similarity 2, or 3 items of similarity 3 are returned.
// If no items with similarity <= 3 are found, then an empty slice is returned.
func FindSimilarItems[T ~string](
input T,
items []T,
) []T {
itemsPerSimilarity := map[uint8][]T{
2: make([]T, 0, 2),
3: make([]T, 0, 3),
}
for _, item := range items {
similarity := edlib.DamerauLevenshteinDistance(string(item), string(input))
switch similarity {
case 1:
return []T{item}
case 2:
itemsPerSimilarity[2] = append(itemsPerSimilarity[2], item)
if len(itemsPerSimilarity[2]) >= 2 {
return itemsPerSimilarity[2]
}
case 3:
itemsPerSimilarity[3] = append(itemsPerSimilarity[3], item)
if len(itemsPerSimilarity[3]) >= 3 {
return itemsPerSimilarity[3]
}
}
}
return append(itemsPerSimilarity[2], itemsPerSimilarity[3]...)
}

View File

@ -13,13 +13,30 @@ type ServerOptionsType struct {
// we show a native warning. The error message boxes just clutter
// the interface.
NoUndetectableErrors bool
// If true, the server will not detect typos and suggest
// the correct keywords.
// Since the server finds typos using the Damerau-Levenshtein distance,
// and this is done each time code actions are requested
// (which happens quite often), these suggestions can eat a lot of resources.
// You may want to enable this option if you are dealing with little
// resources or if you're low on battery.
NoTypoSuggestions bool
}
var ServerOptions = new(ServerOptionsType)
func InitServerOptions() {
ServerOptions.NoUndetectableErrors = false
ServerOptions.NoTypoSuggestions = false
if slices.Contains(os.Args, "--no-undetectable-errors") {
Log.Info("config-lsp will not return errors for undetectable files")
ServerOptions.NoUndetectableErrors = true
}
if slices.Contains(os.Args, "--no-typo-suggestions") {
Log.Info("config-lsp will not detect typos for keywords")
ServerOptions.NoTypoSuggestions = true
}
}

View File

@ -177,7 +177,7 @@ func (v KeyEnumAssignmentValue) DeprecatedFetchCompletions(line string, cursor u
)
if found {
relativePosition := max(1, foundPosition) - 1
relativePosition := min(foundPosition, len(line)-1)
selectedKey := line[:uint32(relativePosition)]
line = line[uint32(relativePosition+len(v.Separator)):]
cursor -= uint32(relativePosition)

View File

@ -2,31 +2,21 @@ package docvalues
import (
"config-lsp/utils"
protocol "github.com/tliron/glsp/protocol_3_16"
"errors"
"strings"
protocol "github.com/tliron/glsp/protocol_3_16"
)
type PathDoesNotExistError struct{}
func (e PathDoesNotExistError) Error() string {
return "This path does not exist"
}
type PathInvalidError struct{}
func (e PathInvalidError) Error() string {
return "This path is invalid"
}
type PathType uint8
const (
PathTypeExistenceOptional PathType = 0
PathTypeFile PathType = 1
PathTypeDirectory PathType = 2
PathTypeFile PathType = 1
PathTypeDirectory PathType = 2
)
type PathValue struct {
IsOptional bool
RequiredType PathType
}
@ -34,47 +24,88 @@ func (v PathValue) GetTypeDescription() []string {
hints := make([]string, 0)
switch v.RequiredType {
case PathTypeExistenceOptional:
hints = append(hints, "Optional")
break
case PathTypeFile:
hints = append(hints, "File")
case PathTypeDirectory:
hints = append(hints, "Directory")
}
if v.IsOptional {
hints = append(hints, "Optional")
}
return []string{strings.Join(hints, ", ")}
}
func (v PathValue) DeprecatedCheckIsValid(value string) []*InvalidValue {
if !utils.DoesPathExist(value) {
return []*InvalidValue{{
Err: PathDoesNotExistError{},
Start: 0,
End: uint32(len(value)),
}}
if v.IsOptional {
return nil
} else {
return []*InvalidValue{{
Err: errors.New("This path does not exist"),
Start: 0,
End: uint32(len(value)),
}}
}
}
isValid := false
fileExpected := (v.RequiredType & PathTypeFile) == PathTypeFile
directoryExpected := (v.RequiredType & PathTypeDirectory) == PathTypeDirectory
if (v.RequiredType & PathTypeFile) == PathTypeFile {
isValid := true
// If file is expected
if fileExpected {
// and exists
isValid = isValid && utils.IsPathFile(value)
// file not expected
} else {
// and should not exist
isValid = isValid && !utils.IsPathFile(value)
}
if (v.RequiredType & PathTypeDirectory) == PathTypeDirectory {
// if directory
if directoryExpected {
// and exists
isValid = isValid && utils.IsPathDirectory(value)
// directory not expected
} else {
// and should not exist
isValid = isValid && !utils.IsPathDirectory(value)
}
if isValid {
return nil
}
if fileExpected && directoryExpected {
return []*InvalidValue{{
Err: errors.New("This must be either a file or a directory"),
Start: 0,
End: uint32(len(value)),
}}
}
if fileExpected {
return []*InvalidValue{{
Err: errors.New("This must be a file"),
Start: 0,
End: uint32(len(value)),
}}
}
if directoryExpected {
return []*InvalidValue{{
Err: errors.New("This must be a directory"),
Start: 0,
End: uint32(len(value)),
}}
}
return []*InvalidValue{{
Err: PathInvalidError{},
Err: errors.New("This path is invalid"),
Start: 0,
End: uint32(len(value)),
},
}
}}
}
func (v PathValue) DeprecatedFetchCompletions(line string, cursor uint32) []protocol.CompletionItem {

27
server/fetch_tags.js Normal file
View File

@ -0,0 +1,27 @@
// Creates a JSON object in the form of:
// {
// [<Option name>]: documentation
// }
//
// Searches for <dl> elements with <dt> and <dd> children based
// on the currently selected element in the Elements tab of the
(() => {
const content = {}
let currentOption = ""
const $elements = $0.querySelectorAll(":scope > dt,dd")
for (const $element of $elements) {
switch ($element.tagName) {
case "DT":
currentOption = $element.textContent.trim()
break
case "DD":
content[currentOption] = $element.textContent.trim()
break
}
}
console.log(content)
})()

View File

@ -1,35 +1,32 @@
module config-lsp
go 1.22.5
go 1.24
require (
github.com/antlr4-go/antlr/v4 v4.13.1
github.com/emirpasic/gods v1.18.1
github.com/google/go-cmp v0.6.0
github.com/k0kubun/pp v3.0.1+incompatible
github.com/tliron/commonlog v0.2.17
github.com/google/go-cmp v0.7.0
github.com/hbollon/go-edlib v1.6.0
github.com/tliron/commonlog v0.2.19
github.com/tliron/glsp v0.2.2
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6
)
require (
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
github.com/gorilla/websocket v1.5.3 // indirect
github.com/iancoleman/strcase v0.3.0 // indirect
github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88 // indirect
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-runewidth v0.0.16 // indirect
github.com/muesli/termenv v0.15.2 // indirect
github.com/petermattis/goid v0.0.0-20240716203034-badd1c0974d6 // indirect
github.com/muesli/termenv v0.16.0 // indirect
github.com/petermattis/goid v0.0.0-20250508124226-395b08cebbdb // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/rivo/uniseg v0.4.7 // indirect
github.com/sasha-s/go-deadlock v0.3.1 // indirect
github.com/sasha-s/go-deadlock v0.3.5 // indirect
github.com/segmentio/ksuid v1.0.4 // indirect
github.com/sourcegraph/jsonrpc2 v0.2.0 // indirect
github.com/tliron/kutil v0.3.24 // indirect
golang.org/x/crypto v0.25.0 // indirect
golang.org/x/sys v0.22.0 // indirect
golang.org/x/term v0.22.0 // indirect
github.com/sourcegraph/jsonrpc2 v0.2.1 // indirect
github.com/tliron/kutil v0.3.26 // indirect
golang.org/x/crypto v0.38.0 // indirect
golang.org/x/sys v0.33.0 // indirect
golang.org/x/term v0.32.0 // indirect
)

View File

@ -4,55 +4,46 @@ github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiE
github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8=
github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc=
github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/hbollon/go-edlib v1.6.0 h1:ga7AwwVIvP8mHm9GsPueC0d71cfRU/52hmPJ7Tprv4E=
github.com/hbollon/go-edlib v1.6.0/go.mod h1:wnt6o6EIVEzUfgbUZY7BerzQ2uvzp354qmS2xaLkrhM=
github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI=
github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho=
github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88 h1:uC1QfSlInpQF+M0ao65imhwqKnz3Q2z/d8PWZRMQvDM=
github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88/go.mod h1:3w7q1U84EfirKl04SVQ/s7nPm1ZPhiXd34z40TNz36k=
github.com/k0kubun/pp v3.0.1+incompatible h1:3tqvf7QgUnZ5tXO6pNAZlrvHgl6DvifjDrd9g2S9Z40=
github.com/k0kubun/pp v3.0.1+incompatible/go.mod h1:GWse8YhT0p8pT4ir3ZgBbfZild3tgzSScAn6HmfYukg=
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/muesli/termenv v0.15.2 h1:GohcuySI0QmI3wN8Ok9PtKGkgkFIk7y6Vpb5PvrY+Wo=
github.com/muesli/termenv v0.15.2/go.mod h1:Epx+iuz8sNs7mNKhxzH4fWXGNpZwUaJKRS1noLXviQ8=
github.com/petermattis/goid v0.0.0-20180202154549-b0b1615b78e5/go.mod h1:jvVRKCrJTQWu0XVbaOlby/2lO20uSCHEMzzplHXte1o=
github.com/petermattis/goid v0.0.0-20240716203034-badd1c0974d6 h1:DUDJI8T/9NcGbbL+AWk6vIYlmQ8ZBS8LZqVre6zbkPQ=
github.com/petermattis/goid v0.0.0-20240716203034-badd1c0974d6/go.mod h1:pxMtw7cyUw6B2bRH0ZBANSPg+AoSud1I1iyJHI69jH4=
github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc=
github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk=
github.com/petermattis/goid v0.0.0-20240813172612-4fcff4a6cae7/go.mod h1:pxMtw7cyUw6B2bRH0ZBANSPg+AoSud1I1iyJHI69jH4=
github.com/petermattis/goid v0.0.0-20250508124226-395b08cebbdb h1:3PrKuO92dUTMrQ9dx0YNejC6U/Si6jqKmyQ9vWjwqR4=
github.com/petermattis/goid v0.0.0-20250508124226-395b08cebbdb/go.mod h1:pxMtw7cyUw6B2bRH0ZBANSPg+AoSud1I1iyJHI69jH4=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
github.com/sasha-s/go-deadlock v0.3.1 h1:sqv7fDNShgjcaxkO0JNcOAlr8B9+cV5Ey/OB71efZx0=
github.com/sasha-s/go-deadlock v0.3.1/go.mod h1:F73l+cr82YSh10GxyRI6qZiCgK64VaZjwesgfQ1/iLM=
github.com/sasha-s/go-deadlock v0.3.5 h1:tNCOEEDG6tBqrNDOX35j/7hL5FcFViG6awUGROb2NsU=
github.com/sasha-s/go-deadlock v0.3.5/go.mod h1:bugP6EGbdGYObIlx7pUZtWqlvo8k9H6vCBBsiChJQ5U=
github.com/segmentio/ksuid v1.0.4 h1:sBo2BdShXjmcugAMwjugoGUdUV0pcxY5mW4xKRn3v4c=
github.com/segmentio/ksuid v1.0.4/go.mod h1:/XUiZBD3kVx5SmUOl55voK5yeAbBNNIed+2O73XgrPE=
github.com/sourcegraph/jsonrpc2 v0.2.0 h1:KjN/dC4fP6aN9030MZCJs9WQbTOjWHhrtKVpzzSrr/U=
github.com/sourcegraph/jsonrpc2 v0.2.0/go.mod h1:ZafdZgk/axhT1cvZAPOhw+95nz2I/Ra5qMlU4gTRwIo=
github.com/tliron/commonlog v0.2.17 h1:GFVvzDZbNLkuQfT45IZeWkrR5AyqiX7Du8pWAtFuPTY=
github.com/tliron/commonlog v0.2.17/go.mod h1:J2Hb63/mMjYmkDzd7E+VL9wCHT6NFNSzV/IOjJWMJqc=
github.com/sourcegraph/jsonrpc2 v0.2.1 h1:2GtljixMQYUYCmIg7W9aF2dFmniq/mOr2T9tFRh6zSQ=
github.com/sourcegraph/jsonrpc2 v0.2.1/go.mod h1:ZafdZgk/axhT1cvZAPOhw+95nz2I/Ra5qMlU4gTRwIo=
github.com/tliron/commonlog v0.2.19 h1:v1mOH1TyzFLqkshR03khw7ENAZPjAyZTQBQrqN+vX9c=
github.com/tliron/commonlog v0.2.19/go.mod h1:AcdhfcUqlAWukDrzTGyaPhUgYiNdZhS4dKzD/e0tjcY=
github.com/tliron/glsp v0.2.2 h1:IKPfwpE8Lu8yB6Dayta+IyRMAbTVunudeauEgjXBt+c=
github.com/tliron/glsp v0.2.2/go.mod h1:GMVWDNeODxHzmDPvYbYTCs7yHVaEATfYtXiYJ9w1nBg=
github.com/tliron/kutil v0.3.24 h1:LvaqizF4htpEef9tC0B//sqtvQzEjDu69A4a1HrY+ko=
github.com/tliron/kutil v0.3.24/go.mod h1:2iSIhOnOe1reqczZQy6TauVHhItsq6xRLV2rVBvodpk=
golang.org/x/crypto v0.25.0 h1:ypSNr+bnYL2YhwoMt2zPxHFmbAN1KZs/njMG3hxUp30=
golang.org/x/crypto v0.25.0/go.mod h1:T+wALwcMOSE0kXgUAnPAHqTLW+XHgcELELW8VaDgm/M=
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 h1:2dVuKD2vS7b0QIHQbpyTISPd0LeHDbnYEryqj5Q1ug8=
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56/go.mod h1:M4RDyNAINzryxdtnbRXRL/OHtkFuWGRjvuhBJpk2IlY=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
github.com/tliron/kutil v0.3.26 h1:G+dicQLvzm3zdOMrrQFLBfHJXtk57fEu2kf1IFNyJxw=
github.com/tliron/kutil v0.3.26/go.mod h1:1/HRVAb+fnRIRnzmhu0FPP+ZJKobrpwHStDVMuaXDzY=
golang.org/x/crypto v0.38.0 h1:jt+WWG8IZlBnVbomuhg2Mdq0+BBQaHbtqHEFEigjUV8=
golang.org/x/crypto v0.38.0/go.mod h1:MvrbAqul58NNYPKnOra203SB9vpuZW0e+RRZV+Ggqjw=
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6 h1:y5zboxd6LQAqYIhHnB48p0ByQ/GnQx2BE33L8BOHQkI=
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6/go.mod h1:U6Lno4MTRCDY+Ba7aCcauB9T60gsv5s4ralQzP72ZoQ=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.22.0 h1:RI27ohtqKCnwULzJLqkv897zojh5/DwS/ENaMzUOaWI=
golang.org/x/sys v0.22.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.22.0 h1:BbsgPEJULsl2fV/AT3v15Mjva5yXKQDyKf+TbDz7QJk=
golang.org/x/term v0.22.0/go.mod h1:F3qCibpT5AMpCRfhfT53vVJwhLtIVHhB9XDjfFvnMI4=
golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw=
golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg=
golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ=

View File

@ -68,7 +68,7 @@ func (s *aliasesParserListener) EnterValues(ctx *parser.ValuesContext) {
}
}
// === Value === //
// === Name === //
func (s *aliasesParserListener) EnterUser(ctx *parser.UserContext) {
location := common.CharacterRangeFromCtx(ctx.BaseParserRuleContext)

View File

@ -1,4 +1,4 @@
// Code generated from Aliases.g4 by ANTLR 4.13.0. DO NOT EDIT.
// Code generated from Aliases.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser // Aliases

View File

@ -1,4 +1,4 @@
// Code generated from Aliases.g4 by ANTLR 4.13.0. DO NOT EDIT.
// Code generated from Aliases.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser

View File

@ -1,4 +1,4 @@
// Code generated from Aliases.g4 by ANTLR 4.13.0. DO NOT EDIT.
// Code generated from Aliases.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser // Aliases

View File

@ -1,4 +1,4 @@
// Code generated from Aliases.g4 by ANTLR 4.13.0. DO NOT EDIT.
// Code generated from Aliases.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser // Aliases

View File

@ -15,6 +15,7 @@ var UserDeclaration = "`user`"
var PathField = docvalues.DocumentationValue{
Documentation: "Append messages to file, specified by its absolute pathname",
Value: docvalues.PathValue{
IsOptional: true,
RequiredType: docvalues.PathTypeFile,
},
}
@ -40,6 +41,7 @@ var EmailDeclaration = "`user-part@domain-part`"
var IncludeField = docvalues.DocumentationValue{
Documentation: "Include any definitions in file as alias entries. The format of the file is identical to this one.",
Value: docvalues.PathValue{
IsOptional: false,
RequiredType: docvalues.PathTypeFile,
},
}

View File

@ -51,7 +51,7 @@ func TextDocumentHover(
contents := []string{}
contents = append(contents, handlers.GetAliasValueTypeInfo(value)...)
contents = append(contents, "")
contents = append(contents, "#### Value")
contents = append(contents, "#### Name")
contents = append(contents, handlers.GetAliasValueHoverInfo(*document.Indexes, value))
text := strings.Join(contents, "\n")

View File

@ -0,0 +1,38 @@
package analyzer
import (
"config-lsp/common"
"config-lsp/handlers/fstab/ast"
"regexp"
protocol "github.com/tliron/glsp/protocol_3_16"
)
var volatileBlockFields = regexp.MustCompile(`^/dev/(sd|nvme|mmcblk|sr|vd|loop|cdrom)[a-zA-Z0-9]*$`)
func analyzeSpecField(
ctx *analyzerContext,
field *ast.FstabField,
) {
if field == nil {
return
}
if field.Value.Value == "" {
return
}
if !volatileBlockFields.MatchString(field.Value.Value) {
return
}
codeDescription := protocol.CodeDescription{
HRef: protocol.URI("https://wiki.archlinux.org/title/Persistent_block_device_naming"),
}
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
Range: field.ToLSPRange(),
Message: "Kernel name descriptors for block devices are not persistent and can change each boot, they should not be used in configuration files. Prefer device UUIDs or LABELs instead.",
CodeDescription: &codeDescription,
Severity: &common.SeverityWarning,
})
}

View File

@ -21,6 +21,8 @@ func analyzeValuesAreValid(
checkField(ctx, entry.Fields.MountPoint, fields.MountPointField)
checkField(ctx, entry.Fields.FilesystemType, fields.FileSystemTypeField)
analyzeSpecField(ctx, entry.Fields.Spec)
if entry.Fields.Options != nil {
mountOptions := entry.FetchMountOptionsField(true)

View File

@ -141,8 +141,8 @@ func (e FstabEntry) FetchMountOptionsField(includeDefaults bool) docvalues.Depre
return nil
}
var enums []docvalues.EnumString
var assignable map[docvalues.EnumString]docvalues.DeprecatedValue
var enums []docvalues.EnumString = make([]docvalues.EnumString, 0)
var assignable map[docvalues.EnumString]docvalues.DeprecatedValue = make(map[docvalues.EnumString]docvalues.DeprecatedValue, 0)
if includeDefaults {
enums = append(option.Enums, fields.DefaultOptions...)

View File

@ -5,10 +5,6 @@ null
'#'
null
null
null
null
null
null
token symbolic names:
null
@ -17,10 +13,6 @@ WHITESPACE
HASH
STRING
QUOTED_STRING
ADFS
AFFS
BTRFS
EXFAT
rule names:
entry
@ -33,4 +25,4 @@ pass
atn:
[4, 1, 9, 68, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 1, 0, 3, 0, 16, 8, 0, 1, 0, 3, 0, 19, 8, 0, 1, 0, 3, 0, 22, 8, 0, 1, 0, 3, 0, 25, 8, 0, 1, 0, 3, 0, 28, 8, 0, 1, 0, 3, 0, 31, 8, 0, 1, 0, 3, 0, 34, 8, 0, 1, 0, 3, 0, 37, 8, 0, 1, 0, 3, 0, 40, 8, 0, 1, 0, 3, 0, 43, 8, 0, 1, 0, 3, 0, 46, 8, 0, 1, 0, 3, 0, 49, 8, 0, 1, 0, 3, 0, 52, 8, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 0, 0, 7, 0, 2, 4, 6, 8, 10, 12, 0, 2, 1, 0, 4, 5, 1, 0, 4, 9, 73, 0, 15, 1, 0, 0, 0, 2, 55, 1, 0, 0, 0, 4, 57, 1, 0, 0, 0, 6, 59, 1, 0, 0, 0, 8, 61, 1, 0, 0, 0, 10, 63, 1, 0, 0, 0, 12, 65, 1, 0, 0, 0, 14, 16, 5, 2, 0, 0, 15, 14, 1, 0, 0, 0, 15, 16, 1, 0, 0, 0, 16, 18, 1, 0, 0, 0, 17, 19, 3, 2, 1, 0, 18, 17, 1, 0, 0, 0, 18, 19, 1, 0, 0, 0, 19, 21, 1, 0, 0, 0, 20, 22, 5, 2, 0, 0, 21, 20, 1, 0, 0, 0, 21, 22, 1, 0, 0, 0, 22, 24, 1, 0, 0, 0, 23, 25, 3, 4, 2, 0, 24, 23, 1, 0, 0, 0, 24, 25, 1, 0, 0, 0, 25, 27, 1, 0, 0, 0, 26, 28, 5, 2, 0, 0, 27, 26, 1, 0, 0, 0, 27, 28, 1, 0, 0, 0, 28, 30, 1, 0, 0, 0, 29, 31, 3, 6, 3, 0, 30, 29, 1, 0, 0, 0, 30, 31, 1, 0, 0, 0, 31, 33, 1, 0, 0, 0, 32, 34, 5, 2, 0, 0, 33, 32, 1, 0, 0, 0, 33, 34, 1, 0, 0, 0, 34, 36, 1, 0, 0, 0, 35, 37, 3, 8, 4, 0, 36, 35, 1, 0, 0, 0, 36, 37, 1, 0, 0, 0, 37, 39, 1, 0, 0, 0, 38, 40, 5, 2, 0, 0, 39, 38, 1, 0, 0, 0, 39, 40, 1, 0, 0, 0, 40, 42, 1, 0, 0, 0, 41, 43, 3, 10, 5, 0, 42, 41, 1, 0, 0, 0, 42, 43, 1, 0, 0, 0, 43, 45, 1, 0, 0, 0, 44, 46, 5, 2, 0, 0, 45, 44, 1, 0, 0, 0, 45, 46, 1, 0, 0, 0, 46, 48, 1, 0, 0, 0, 47, 49, 3, 12, 6, 0, 48, 47, 1, 0, 0, 0, 48, 49, 1, 0, 0, 0, 49, 51, 1, 0, 0, 0, 50, 52, 5, 2, 0, 0, 51, 50, 1, 0, 0, 0, 51, 52, 1, 0, 0, 0, 52, 53, 1, 0, 0, 0, 53, 54, 5, 0, 0, 1, 54, 1, 1, 0, 0, 0, 55, 56, 7, 0, 0, 0, 56, 3, 1, 0, 0, 0, 57, 58, 7, 0, 0, 0, 58, 5, 1, 0, 0, 0, 59, 60, 7, 1, 0, 0, 60, 7, 1, 0, 0, 0, 61, 62, 7, 0, 0, 0, 62, 9, 1, 0, 0, 0, 63, 64, 5, 1, 0, 0, 64, 11, 1, 0, 0, 0, 65, 66, 5, 1, 0, 0, 66, 13, 1, 0, 0, 0, 13, 15, 18, 21, 24, 27, 30, 33, 36, 39, 42, 45, 48, 51]
[4, 1, 5, 68, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 1, 0, 3, 0, 16, 8, 0, 1, 0, 3, 0, 19, 8, 0, 1, 0, 3, 0, 22, 8, 0, 1, 0, 3, 0, 25, 8, 0, 1, 0, 3, 0, 28, 8, 0, 1, 0, 3, 0, 31, 8, 0, 1, 0, 3, 0, 34, 8, 0, 1, 0, 3, 0, 37, 8, 0, 1, 0, 3, 0, 40, 8, 0, 1, 0, 3, 0, 43, 8, 0, 1, 0, 3, 0, 46, 8, 0, 1, 0, 3, 0, 49, 8, 0, 1, 0, 3, 0, 52, 8, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 0, 0, 7, 0, 2, 4, 6, 8, 10, 12, 0, 1, 1, 0, 4, 5, 73, 0, 15, 1, 0, 0, 0, 2, 55, 1, 0, 0, 0, 4, 57, 1, 0, 0, 0, 6, 59, 1, 0, 0, 0, 8, 61, 1, 0, 0, 0, 10, 63, 1, 0, 0, 0, 12, 65, 1, 0, 0, 0, 14, 16, 5, 2, 0, 0, 15, 14, 1, 0, 0, 0, 15, 16, 1, 0, 0, 0, 16, 18, 1, 0, 0, 0, 17, 19, 3, 2, 1, 0, 18, 17, 1, 0, 0, 0, 18, 19, 1, 0, 0, 0, 19, 21, 1, 0, 0, 0, 20, 22, 5, 2, 0, 0, 21, 20, 1, 0, 0, 0, 21, 22, 1, 0, 0, 0, 22, 24, 1, 0, 0, 0, 23, 25, 3, 4, 2, 0, 24, 23, 1, 0, 0, 0, 24, 25, 1, 0, 0, 0, 25, 27, 1, 0, 0, 0, 26, 28, 5, 2, 0, 0, 27, 26, 1, 0, 0, 0, 27, 28, 1, 0, 0, 0, 28, 30, 1, 0, 0, 0, 29, 31, 3, 6, 3, 0, 30, 29, 1, 0, 0, 0, 30, 31, 1, 0, 0, 0, 31, 33, 1, 0, 0, 0, 32, 34, 5, 2, 0, 0, 33, 32, 1, 0, 0, 0, 33, 34, 1, 0, 0, 0, 34, 36, 1, 0, 0, 0, 35, 37, 3, 8, 4, 0, 36, 35, 1, 0, 0, 0, 36, 37, 1, 0, 0, 0, 37, 39, 1, 0, 0, 0, 38, 40, 5, 2, 0, 0, 39, 38, 1, 0, 0, 0, 39, 40, 1, 0, 0, 0, 40, 42, 1, 0, 0, 0, 41, 43, 3, 10, 5, 0, 42, 41, 1, 0, 0, 0, 42, 43, 1, 0, 0, 0, 43, 45, 1, 0, 0, 0, 44, 46, 5, 2, 0, 0, 45, 44, 1, 0, 0, 0, 45, 46, 1, 0, 0, 0, 46, 48, 1, 0, 0, 0, 47, 49, 3, 12, 6, 0, 48, 47, 1, 0, 0, 0, 48, 49, 1, 0, 0, 0, 49, 51, 1, 0, 0, 0, 50, 52, 5, 2, 0, 0, 51, 50, 1, 0, 0, 0, 51, 52, 1, 0, 0, 0, 52, 53, 1, 0, 0, 0, 53, 54, 5, 0, 0, 1, 54, 1, 1, 0, 0, 0, 55, 56, 7, 0, 0, 0, 56, 3, 1, 0, 0, 0, 57, 58, 7, 0, 0, 0, 58, 5, 1, 0, 0, 0, 59, 60, 7, 0, 0, 0, 60, 7, 1, 0, 0, 0, 61, 62, 7, 0, 0, 0, 62, 9, 1, 0, 0, 0, 63, 64, 5, 1, 0, 0, 64, 11, 1, 0, 0, 0, 65, 66, 5, 1, 0, 0, 66, 13, 1, 0, 0, 0, 13, 15, 18, 21, 24, 27, 30, 33, 36, 39, 42, 45, 48, 51]

View File

@ -3,8 +3,4 @@ WHITESPACE=2
HASH=3
STRING=4
QUOTED_STRING=5
ADFS=6
AFFS=7
BTRFS=8
EXFAT=9
'#'=3

View File

@ -5,10 +5,6 @@ null
'#'
null
null
null
null
null
null
token symbolic names:
null
@ -17,10 +13,6 @@ WHITESPACE
HASH
STRING
QUOTED_STRING
ADFS
AFFS
BTRFS
EXFAT
rule names:
DIGITS
@ -28,10 +20,6 @@ WHITESPACE
HASH
STRING
QUOTED_STRING
ADFS
AFFS
BTRFS
EXFAT
channel names:
DEFAULT_TOKEN_CHANNEL
@ -41,4 +29,4 @@ mode names:
DEFAULT_MODE
atn:
[4, 0, 9, 76, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 1, 0, 4, 0, 21, 8, 0, 11, 0, 12, 0, 22, 1, 1, 4, 1, 26, 8, 1, 11, 1, 12, 1, 27, 1, 2, 1, 2, 1, 3, 4, 3, 33, 8, 3, 11, 3, 12, 3, 34, 1, 4, 1, 4, 3, 4, 39, 8, 4, 1, 4, 1, 4, 1, 4, 5, 4, 44, 8, 4, 10, 4, 12, 4, 47, 9, 4, 1, 4, 3, 4, 50, 8, 4, 1, 4, 3, 4, 53, 8, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 0, 0, 9, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 1, 0, 12, 1, 0, 48, 57, 2, 0, 9, 9, 32, 32, 3, 0, 9, 9, 32, 32, 35, 35, 2, 0, 65, 65, 97, 97, 2, 0, 68, 68, 100, 100, 2, 0, 70, 70, 102, 102, 2, 0, 83, 83, 115, 115, 2, 0, 66, 66, 98, 98, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 69, 69, 101, 101, 2, 0, 88, 88, 120, 120, 82, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 1, 20, 1, 0, 0, 0, 3, 25, 1, 0, 0, 0, 5, 29, 1, 0, 0, 0, 7, 32, 1, 0, 0, 0, 9, 36, 1, 0, 0, 0, 11, 54, 1, 0, 0, 0, 13, 59, 1, 0, 0, 0, 15, 64, 1, 0, 0, 0, 17, 70, 1, 0, 0, 0, 19, 21, 7, 0, 0, 0, 20, 19, 1, 0, 0, 0, 21, 22, 1, 0, 0, 0, 22, 20, 1, 0, 0, 0, 22, 23, 1, 0, 0, 0, 23, 2, 1, 0, 0, 0, 24, 26, 7, 1, 0, 0, 25, 24, 1, 0, 0, 0, 26, 27, 1, 0, 0, 0, 27, 25, 1, 0, 0, 0, 27, 28, 1, 0, 0, 0, 28, 4, 1, 0, 0, 0, 29, 30, 5, 35, 0, 0, 30, 6, 1, 0, 0, 0, 31, 33, 8, 2, 0, 0, 32, 31, 1, 0, 0, 0, 33, 34, 1, 0, 0, 0, 34, 32, 1, 0, 0, 0, 34, 35, 1, 0, 0, 0, 35, 8, 1, 0, 0, 0, 36, 38, 5, 34, 0, 0, 37, 39, 3, 3, 1, 0, 38, 37, 1, 0, 0, 0, 38, 39, 1, 0, 0, 0, 39, 45, 1, 0, 0, 0, 40, 41, 3, 7, 3, 0, 41, 42, 3, 3, 1, 0, 42, 44, 1, 0, 0, 0, 43, 40, 1, 0, 0, 0, 44, 47, 1, 0, 0, 0, 45, 43, 1, 0, 0, 0, 45, 46, 1, 0, 0, 0, 46, 49, 1, 0, 0, 0, 47, 45, 1, 0, 0, 0, 48, 50, 3, 7, 3, 0, 49, 48, 1, 0, 0, 0, 49, 50, 1, 0, 0, 0, 50, 52, 1, 0, 0, 0, 51, 53, 5, 34, 0, 0, 52, 51, 1, 0, 0, 0, 52, 53, 1, 0, 0, 0, 53, 10, 1, 0, 0, 0, 54, 55, 7, 3, 0, 0, 55, 56, 7, 4, 0, 0, 56, 57, 7, 5, 0, 0, 57, 58, 7, 6, 0, 0, 58, 12, 1, 0, 0, 0, 59, 60, 7, 3, 0, 0, 60, 61, 7, 5, 0, 0, 61, 62, 7, 5, 0, 0, 62, 63, 7, 6, 0, 0, 63, 14, 1, 0, 0, 0, 64, 65, 7, 7, 0, 0, 65, 66, 7, 8, 0, 0, 66, 67, 7, 9, 0, 0, 67, 68, 7, 5, 0, 0, 68, 69, 7, 6, 0, 0, 69, 16, 1, 0, 0, 0, 70, 71, 7, 10, 0, 0, 71, 72, 7, 11, 0, 0, 72, 73, 7, 5, 0, 0, 73, 74, 7, 3, 0, 0, 74, 75, 7, 8, 0, 0, 75, 18, 1, 0, 0, 0, 8, 0, 22, 27, 34, 38, 45, 49, 52, 0]
[4, 0, 5, 46, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 1, 0, 4, 0, 13, 8, 0, 11, 0, 12, 0, 14, 1, 1, 4, 1, 18, 8, 1, 11, 1, 12, 1, 19, 1, 2, 1, 2, 1, 3, 4, 3, 25, 8, 3, 11, 3, 12, 3, 26, 1, 4, 1, 4, 3, 4, 31, 8, 4, 1, 4, 1, 4, 1, 4, 5, 4, 36, 8, 4, 10, 4, 12, 4, 39, 9, 4, 1, 4, 3, 4, 42, 8, 4, 1, 4, 3, 4, 45, 8, 4, 0, 0, 5, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 1, 0, 3, 1, 0, 48, 57, 2, 0, 9, 9, 32, 32, 3, 0, 9, 9, 32, 32, 35, 35, 52, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 1, 12, 1, 0, 0, 0, 3, 17, 1, 0, 0, 0, 5, 21, 1, 0, 0, 0, 7, 24, 1, 0, 0, 0, 9, 28, 1, 0, 0, 0, 11, 13, 7, 0, 0, 0, 12, 11, 1, 0, 0, 0, 13, 14, 1, 0, 0, 0, 14, 12, 1, 0, 0, 0, 14, 15, 1, 0, 0, 0, 15, 2, 1, 0, 0, 0, 16, 18, 7, 1, 0, 0, 17, 16, 1, 0, 0, 0, 18, 19, 1, 0, 0, 0, 19, 17, 1, 0, 0, 0, 19, 20, 1, 0, 0, 0, 20, 4, 1, 0, 0, 0, 21, 22, 5, 35, 0, 0, 22, 6, 1, 0, 0, 0, 23, 25, 8, 2, 0, 0, 24, 23, 1, 0, 0, 0, 25, 26, 1, 0, 0, 0, 26, 24, 1, 0, 0, 0, 26, 27, 1, 0, 0, 0, 27, 8, 1, 0, 0, 0, 28, 30, 5, 34, 0, 0, 29, 31, 3, 3, 1, 0, 30, 29, 1, 0, 0, 0, 30, 31, 1, 0, 0, 0, 31, 37, 1, 0, 0, 0, 32, 33, 3, 7, 3, 0, 33, 34, 3, 3, 1, 0, 34, 36, 1, 0, 0, 0, 35, 32, 1, 0, 0, 0, 36, 39, 1, 0, 0, 0, 37, 35, 1, 0, 0, 0, 37, 38, 1, 0, 0, 0, 38, 41, 1, 0, 0, 0, 39, 37, 1, 0, 0, 0, 40, 42, 3, 7, 3, 0, 41, 40, 1, 0, 0, 0, 41, 42, 1, 0, 0, 0, 42, 44, 1, 0, 0, 0, 43, 45, 5, 34, 0, 0, 44, 43, 1, 0, 0, 0, 44, 45, 1, 0, 0, 0, 45, 10, 1, 0, 0, 0, 8, 0, 14, 19, 26, 30, 37, 41, 44, 0]

View File

@ -3,8 +3,4 @@ WHITESPACE=2
HASH=3
STRING=4
QUOTED_STRING=5
ADFS=6
AFFS=7
BTRFS=8
EXFAT=9
'#'=3

View File

@ -1,4 +1,4 @@
// Code generated from Fstab.g4 by ANTLR 4.13.0. DO NOT EDIT.
// Code generated from Fstab.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser // Fstab

View File

@ -1,4 +1,4 @@
// Code generated from Fstab.g4 by ANTLR 4.13.0. DO NOT EDIT.
// Code generated from Fstab.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser
@ -46,51 +46,34 @@ func fstablexerLexerInit() {
"", "", "", "'#'",
}
staticData.SymbolicNames = []string{
"", "DIGITS", "WHITESPACE", "HASH", "STRING", "QUOTED_STRING", "ADFS",
"AFFS", "BTRFS", "EXFAT",
"", "DIGITS", "WHITESPACE", "HASH", "STRING", "QUOTED_STRING",
}
staticData.RuleNames = []string{
"DIGITS", "WHITESPACE", "HASH", "STRING", "QUOTED_STRING", "ADFS", "AFFS",
"BTRFS", "EXFAT",
"DIGITS", "WHITESPACE", "HASH", "STRING", "QUOTED_STRING",
}
staticData.PredictionContextCache = antlr.NewPredictionContextCache()
staticData.serializedATN = []int32{
4, 0, 9, 76, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2,
4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 1, 0, 4, 0, 21,
8, 0, 11, 0, 12, 0, 22, 1, 1, 4, 1, 26, 8, 1, 11, 1, 12, 1, 27, 1, 2, 1,
2, 1, 3, 4, 3, 33, 8, 3, 11, 3, 12, 3, 34, 1, 4, 1, 4, 3, 4, 39, 8, 4,
1, 4, 1, 4, 1, 4, 5, 4, 44, 8, 4, 10, 4, 12, 4, 47, 9, 4, 1, 4, 3, 4, 50,
8, 4, 1, 4, 3, 4, 53, 8, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1,
6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1,
8, 1, 8, 1, 8, 0, 0, 9, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15,
8, 17, 9, 1, 0, 12, 1, 0, 48, 57, 2, 0, 9, 9, 32, 32, 3, 0, 9, 9, 32, 32,
35, 35, 2, 0, 65, 65, 97, 97, 2, 0, 68, 68, 100, 100, 2, 0, 70, 70, 102,
102, 2, 0, 83, 83, 115, 115, 2, 0, 66, 66, 98, 98, 2, 0, 84, 84, 116, 116,
2, 0, 82, 82, 114, 114, 2, 0, 69, 69, 101, 101, 2, 0, 88, 88, 120, 120,
82, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0,
0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0,
0, 0, 0, 17, 1, 0, 0, 0, 1, 20, 1, 0, 0, 0, 3, 25, 1, 0, 0, 0, 5, 29, 1,
0, 0, 0, 7, 32, 1, 0, 0, 0, 9, 36, 1, 0, 0, 0, 11, 54, 1, 0, 0, 0, 13,
59, 1, 0, 0, 0, 15, 64, 1, 0, 0, 0, 17, 70, 1, 0, 0, 0, 19, 21, 7, 0, 0,
0, 20, 19, 1, 0, 0, 0, 21, 22, 1, 0, 0, 0, 22, 20, 1, 0, 0, 0, 22, 23,
1, 0, 0, 0, 23, 2, 1, 0, 0, 0, 24, 26, 7, 1, 0, 0, 25, 24, 1, 0, 0, 0,
26, 27, 1, 0, 0, 0, 27, 25, 1, 0, 0, 0, 27, 28, 1, 0, 0, 0, 28, 4, 1, 0,
0, 0, 29, 30, 5, 35, 0, 0, 30, 6, 1, 0, 0, 0, 31, 33, 8, 2, 0, 0, 32, 31,
1, 0, 0, 0, 33, 34, 1, 0, 0, 0, 34, 32, 1, 0, 0, 0, 34, 35, 1, 0, 0, 0,
35, 8, 1, 0, 0, 0, 36, 38, 5, 34, 0, 0, 37, 39, 3, 3, 1, 0, 38, 37, 1,
0, 0, 0, 38, 39, 1, 0, 0, 0, 39, 45, 1, 0, 0, 0, 40, 41, 3, 7, 3, 0, 41,
42, 3, 3, 1, 0, 42, 44, 1, 0, 0, 0, 43, 40, 1, 0, 0, 0, 44, 47, 1, 0, 0,
0, 45, 43, 1, 0, 0, 0, 45, 46, 1, 0, 0, 0, 46, 49, 1, 0, 0, 0, 47, 45,
1, 0, 0, 0, 48, 50, 3, 7, 3, 0, 49, 48, 1, 0, 0, 0, 49, 50, 1, 0, 0, 0,
50, 52, 1, 0, 0, 0, 51, 53, 5, 34, 0, 0, 52, 51, 1, 0, 0, 0, 52, 53, 1,
0, 0, 0, 53, 10, 1, 0, 0, 0, 54, 55, 7, 3, 0, 0, 55, 56, 7, 4, 0, 0, 56,
57, 7, 5, 0, 0, 57, 58, 7, 6, 0, 0, 58, 12, 1, 0, 0, 0, 59, 60, 7, 3, 0,
0, 60, 61, 7, 5, 0, 0, 61, 62, 7, 5, 0, 0, 62, 63, 7, 6, 0, 0, 63, 14,
1, 0, 0, 0, 64, 65, 7, 7, 0, 0, 65, 66, 7, 8, 0, 0, 66, 67, 7, 9, 0, 0,
67, 68, 7, 5, 0, 0, 68, 69, 7, 6, 0, 0, 69, 16, 1, 0, 0, 0, 70, 71, 7,
10, 0, 0, 71, 72, 7, 11, 0, 0, 72, 73, 7, 5, 0, 0, 73, 74, 7, 3, 0, 0,
74, 75, 7, 8, 0, 0, 75, 18, 1, 0, 0, 0, 8, 0, 22, 27, 34, 38, 45, 49, 52,
0,
4, 0, 5, 46, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2,
4, 7, 4, 1, 0, 4, 0, 13, 8, 0, 11, 0, 12, 0, 14, 1, 1, 4, 1, 18, 8, 1,
11, 1, 12, 1, 19, 1, 2, 1, 2, 1, 3, 4, 3, 25, 8, 3, 11, 3, 12, 3, 26, 1,
4, 1, 4, 3, 4, 31, 8, 4, 1, 4, 1, 4, 1, 4, 5, 4, 36, 8, 4, 10, 4, 12, 4,
39, 9, 4, 1, 4, 3, 4, 42, 8, 4, 1, 4, 3, 4, 45, 8, 4, 0, 0, 5, 1, 1, 3,
2, 5, 3, 7, 4, 9, 5, 1, 0, 3, 1, 0, 48, 57, 2, 0, 9, 9, 32, 32, 3, 0, 9,
9, 32, 32, 35, 35, 52, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0,
0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 1, 12, 1, 0, 0, 0, 3, 17, 1,
0, 0, 0, 5, 21, 1, 0, 0, 0, 7, 24, 1, 0, 0, 0, 9, 28, 1, 0, 0, 0, 11, 13,
7, 0, 0, 0, 12, 11, 1, 0, 0, 0, 13, 14, 1, 0, 0, 0, 14, 12, 1, 0, 0, 0,
14, 15, 1, 0, 0, 0, 15, 2, 1, 0, 0, 0, 16, 18, 7, 1, 0, 0, 17, 16, 1, 0,
0, 0, 18, 19, 1, 0, 0, 0, 19, 17, 1, 0, 0, 0, 19, 20, 1, 0, 0, 0, 20, 4,
1, 0, 0, 0, 21, 22, 5, 35, 0, 0, 22, 6, 1, 0, 0, 0, 23, 25, 8, 2, 0, 0,
24, 23, 1, 0, 0, 0, 25, 26, 1, 0, 0, 0, 26, 24, 1, 0, 0, 0, 26, 27, 1,
0, 0, 0, 27, 8, 1, 0, 0, 0, 28, 30, 5, 34, 0, 0, 29, 31, 3, 3, 1, 0, 30,
29, 1, 0, 0, 0, 30, 31, 1, 0, 0, 0, 31, 37, 1, 0, 0, 0, 32, 33, 3, 7, 3,
0, 33, 34, 3, 3, 1, 0, 34, 36, 1, 0, 0, 0, 35, 32, 1, 0, 0, 0, 36, 39,
1, 0, 0, 0, 37, 35, 1, 0, 0, 0, 37, 38, 1, 0, 0, 0, 38, 41, 1, 0, 0, 0,
39, 37, 1, 0, 0, 0, 40, 42, 3, 7, 3, 0, 41, 40, 1, 0, 0, 0, 41, 42, 1,
0, 0, 0, 42, 44, 1, 0, 0, 0, 43, 45, 5, 34, 0, 0, 44, 43, 1, 0, 0, 0, 44,
45, 1, 0, 0, 0, 45, 10, 1, 0, 0, 0, 8, 0, 14, 19, 26, 30, 37, 41, 44, 0,
}
deserializer := antlr.NewATNDeserializer(nil)
staticData.atn = deserializer.Deserialize(staticData.serializedATN)
@ -136,8 +119,4 @@ const (
FstabLexerHASH = 3
FstabLexerSTRING = 4
FstabLexerQUOTED_STRING = 5
FstabLexerADFS = 6
FstabLexerAFFS = 7
FstabLexerBTRFS = 8
FstabLexerEXFAT = 9
)

View File

@ -1,4 +1,4 @@
// Code generated from Fstab.g4 by ANTLR 4.13.0. DO NOT EDIT.
// Code generated from Fstab.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser // Fstab

View File

@ -1,4 +1,4 @@
// Code generated from Fstab.g4 by ANTLR 4.13.0. DO NOT EDIT.
// Code generated from Fstab.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser // Fstab
@ -36,8 +36,7 @@ func fstabParserInit() {
"", "", "", "'#'",
}
staticData.SymbolicNames = []string{
"", "DIGITS", "WHITESPACE", "HASH", "STRING", "QUOTED_STRING", "ADFS",
"AFFS", "BTRFS", "EXFAT",
"", "DIGITS", "WHITESPACE", "HASH", "STRING", "QUOTED_STRING",
}
staticData.RuleNames = []string{
"entry", "spec", "mountPoint", "fileSystem", "mountOptions", "freq",
@ -45,35 +44,35 @@ func fstabParserInit() {
}
staticData.PredictionContextCache = antlr.NewPredictionContextCache()
staticData.serializedATN = []int32{
4, 1, 9, 68, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4,
4, 1, 5, 68, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4,
2, 5, 7, 5, 2, 6, 7, 6, 1, 0, 3, 0, 16, 8, 0, 1, 0, 3, 0, 19, 8, 0, 1,
0, 3, 0, 22, 8, 0, 1, 0, 3, 0, 25, 8, 0, 1, 0, 3, 0, 28, 8, 0, 1, 0, 3,
0, 31, 8, 0, 1, 0, 3, 0, 34, 8, 0, 1, 0, 3, 0, 37, 8, 0, 1, 0, 3, 0, 40,
8, 0, 1, 0, 3, 0, 43, 8, 0, 1, 0, 3, 0, 46, 8, 0, 1, 0, 3, 0, 49, 8, 0,
1, 0, 3, 0, 52, 8, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1,
4, 1, 4, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 0, 0, 7, 0, 2, 4, 6, 8, 10, 12,
0, 2, 1, 0, 4, 5, 1, 0, 4, 9, 73, 0, 15, 1, 0, 0, 0, 2, 55, 1, 0, 0, 0,
4, 57, 1, 0, 0, 0, 6, 59, 1, 0, 0, 0, 8, 61, 1, 0, 0, 0, 10, 63, 1, 0,
0, 0, 12, 65, 1, 0, 0, 0, 14, 16, 5, 2, 0, 0, 15, 14, 1, 0, 0, 0, 15, 16,
1, 0, 0, 0, 16, 18, 1, 0, 0, 0, 17, 19, 3, 2, 1, 0, 18, 17, 1, 0, 0, 0,
18, 19, 1, 0, 0, 0, 19, 21, 1, 0, 0, 0, 20, 22, 5, 2, 0, 0, 21, 20, 1,
0, 0, 0, 21, 22, 1, 0, 0, 0, 22, 24, 1, 0, 0, 0, 23, 25, 3, 4, 2, 0, 24,
23, 1, 0, 0, 0, 24, 25, 1, 0, 0, 0, 25, 27, 1, 0, 0, 0, 26, 28, 5, 2, 0,
0, 27, 26, 1, 0, 0, 0, 27, 28, 1, 0, 0, 0, 28, 30, 1, 0, 0, 0, 29, 31,
3, 6, 3, 0, 30, 29, 1, 0, 0, 0, 30, 31, 1, 0, 0, 0, 31, 33, 1, 0, 0, 0,
32, 34, 5, 2, 0, 0, 33, 32, 1, 0, 0, 0, 33, 34, 1, 0, 0, 0, 34, 36, 1,
0, 0, 0, 35, 37, 3, 8, 4, 0, 36, 35, 1, 0, 0, 0, 36, 37, 1, 0, 0, 0, 37,
39, 1, 0, 0, 0, 38, 40, 5, 2, 0, 0, 39, 38, 1, 0, 0, 0, 39, 40, 1, 0, 0,
0, 40, 42, 1, 0, 0, 0, 41, 43, 3, 10, 5, 0, 42, 41, 1, 0, 0, 0, 42, 43,
1, 0, 0, 0, 43, 45, 1, 0, 0, 0, 44, 46, 5, 2, 0, 0, 45, 44, 1, 0, 0, 0,
45, 46, 1, 0, 0, 0, 46, 48, 1, 0, 0, 0, 47, 49, 3, 12, 6, 0, 48, 47, 1,
0, 0, 0, 48, 49, 1, 0, 0, 0, 49, 51, 1, 0, 0, 0, 50, 52, 5, 2, 0, 0, 51,
50, 1, 0, 0, 0, 51, 52, 1, 0, 0, 0, 52, 53, 1, 0, 0, 0, 53, 54, 5, 0, 0,
1, 54, 1, 1, 0, 0, 0, 55, 56, 7, 0, 0, 0, 56, 3, 1, 0, 0, 0, 57, 58, 7,
0, 0, 0, 58, 5, 1, 0, 0, 0, 59, 60, 7, 1, 0, 0, 60, 7, 1, 0, 0, 0, 61,
62, 7, 0, 0, 0, 62, 9, 1, 0, 0, 0, 63, 64, 5, 1, 0, 0, 64, 11, 1, 0, 0,
0, 65, 66, 5, 1, 0, 0, 66, 13, 1, 0, 0, 0, 13, 15, 18, 21, 24, 27, 30,
33, 36, 39, 42, 45, 48, 51,
0, 1, 1, 0, 4, 5, 73, 0, 15, 1, 0, 0, 0, 2, 55, 1, 0, 0, 0, 4, 57, 1, 0,
0, 0, 6, 59, 1, 0, 0, 0, 8, 61, 1, 0, 0, 0, 10, 63, 1, 0, 0, 0, 12, 65,
1, 0, 0, 0, 14, 16, 5, 2, 0, 0, 15, 14, 1, 0, 0, 0, 15, 16, 1, 0, 0, 0,
16, 18, 1, 0, 0, 0, 17, 19, 3, 2, 1, 0, 18, 17, 1, 0, 0, 0, 18, 19, 1,
0, 0, 0, 19, 21, 1, 0, 0, 0, 20, 22, 5, 2, 0, 0, 21, 20, 1, 0, 0, 0, 21,
22, 1, 0, 0, 0, 22, 24, 1, 0, 0, 0, 23, 25, 3, 4, 2, 0, 24, 23, 1, 0, 0,
0, 24, 25, 1, 0, 0, 0, 25, 27, 1, 0, 0, 0, 26, 28, 5, 2, 0, 0, 27, 26,
1, 0, 0, 0, 27, 28, 1, 0, 0, 0, 28, 30, 1, 0, 0, 0, 29, 31, 3, 6, 3, 0,
30, 29, 1, 0, 0, 0, 30, 31, 1, 0, 0, 0, 31, 33, 1, 0, 0, 0, 32, 34, 5,
2, 0, 0, 33, 32, 1, 0, 0, 0, 33, 34, 1, 0, 0, 0, 34, 36, 1, 0, 0, 0, 35,
37, 3, 8, 4, 0, 36, 35, 1, 0, 0, 0, 36, 37, 1, 0, 0, 0, 37, 39, 1, 0, 0,
0, 38, 40, 5, 2, 0, 0, 39, 38, 1, 0, 0, 0, 39, 40, 1, 0, 0, 0, 40, 42,
1, 0, 0, 0, 41, 43, 3, 10, 5, 0, 42, 41, 1, 0, 0, 0, 42, 43, 1, 0, 0, 0,
43, 45, 1, 0, 0, 0, 44, 46, 5, 2, 0, 0, 45, 44, 1, 0, 0, 0, 45, 46, 1,
0, 0, 0, 46, 48, 1, 0, 0, 0, 47, 49, 3, 12, 6, 0, 48, 47, 1, 0, 0, 0, 48,
49, 1, 0, 0, 0, 49, 51, 1, 0, 0, 0, 50, 52, 5, 2, 0, 0, 51, 50, 1, 0, 0,
0, 51, 52, 1, 0, 0, 0, 52, 53, 1, 0, 0, 0, 53, 54, 5, 0, 0, 1, 54, 1, 1,
0, 0, 0, 55, 56, 7, 0, 0, 0, 56, 3, 1, 0, 0, 0, 57, 58, 7, 0, 0, 0, 58,
5, 1, 0, 0, 0, 59, 60, 7, 0, 0, 0, 60, 7, 1, 0, 0, 0, 61, 62, 7, 0, 0,
0, 62, 9, 1, 0, 0, 0, 63, 64, 5, 1, 0, 0, 64, 11, 1, 0, 0, 0, 65, 66, 5,
1, 0, 0, 66, 13, 1, 0, 0, 0, 13, 15, 18, 21, 24, 27, 30, 33, 36, 39, 42,
45, 48, 51,
}
deserializer := antlr.NewATNDeserializer(nil)
staticData.atn = deserializer.Deserialize(staticData.serializedATN)
@ -117,10 +116,6 @@ const (
FstabParserHASH = 3
FstabParserSTRING = 4
FstabParserQUOTED_STRING = 5
FstabParserADFS = 6
FstabParserAFFS = 7
FstabParserBTRFS = 8
FstabParserEXFAT = 9
)
// FstabParser rules.
@ -754,10 +749,6 @@ type IFileSystemContext interface {
GetParser() antlr.Parser
// Getter signatures
ADFS() antlr.TerminalNode
AFFS() antlr.TerminalNode
BTRFS() antlr.TerminalNode
EXFAT() antlr.TerminalNode
STRING() antlr.TerminalNode
QUOTED_STRING() antlr.TerminalNode
@ -797,22 +788,6 @@ func NewFileSystemContext(parser antlr.Parser, parent antlr.ParserRuleContext, i
func (s *FileSystemContext) GetParser() antlr.Parser { return s.parser }
func (s *FileSystemContext) ADFS() antlr.TerminalNode {
return s.GetToken(FstabParserADFS, 0)
}
func (s *FileSystemContext) AFFS() antlr.TerminalNode {
return s.GetToken(FstabParserAFFS, 0)
}
func (s *FileSystemContext) BTRFS() antlr.TerminalNode {
return s.GetToken(FstabParserBTRFS, 0)
}
func (s *FileSystemContext) EXFAT() antlr.TerminalNode {
return s.GetToken(FstabParserEXFAT, 0)
}
func (s *FileSystemContext) STRING() antlr.TerminalNode {
return s.GetToken(FstabParserSTRING, 0)
}
@ -851,7 +826,7 @@ func (p *FstabParser) FileSystem() (localctx IFileSystemContext) {
p.SetState(59)
_la = p.GetTokenStream().LA(1)
if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&1008) != 0) {
if !(_la == FstabParserSTRING || _la == FstabParserQUOTED_STRING) {
p.GetErrorHandler().RecoverInline(p)
} else {
p.GetErrorHandler().ReportMatch(p)

View File

@ -6,6 +6,31 @@ import (
"strings"
)
func createMountOptionField(
options []docvalues.EnumString,
assignOption map[docvalues.EnumString]docvalues.DeprecatedValue,
) docvalues.DeprecatedValue {
// dynamicOptions := docvalues.MergeKeyEnumAssignmentMaps(defaultAssignOptions, assignOption)
return docvalues.ArrayValue{
Separator: ",",
DuplicatesExtractor: &MountOptionsExtractor,
SubValue: docvalues.OrValue{
Values: []docvalues.DeprecatedValue{
docvalues.KeyEnumAssignmentValue{
Values: assignOption,
ValueIsOptional: false,
Separator: "=",
},
docvalues.EnumValue{
EnforceValues: true,
Values: options,
},
},
},
}
}
var MountOptionsExtractor = func(value string) string {
separatorIndex := strings.Index(value, "=")
@ -339,31 +364,6 @@ Added in version 233.`,
): docvalues.StringValue{},
}
func createMountOptionField(
options []docvalues.EnumString,
assignOption map[docvalues.EnumString]docvalues.DeprecatedValue,
) docvalues.DeprecatedValue {
// dynamicOptions := docvalues.MergeKeyEnumAssignmentMaps(defaultAssignOptions, assignOption)
return docvalues.ArrayValue{
Separator: ",",
DuplicatesExtractor: &MountOptionsExtractor,
SubValue: docvalues.OrValue{
Values: []docvalues.DeprecatedValue{
docvalues.KeyEnumAssignmentValue{
Values: assignOption,
ValueIsOptional: false,
Separator: "=",
},
docvalues.EnumValue{
EnforceValues: true,
Values: options,
},
},
},
}
}
type optionField struct {
Assignable map[docvalues.EnumString]docvalues.DeprecatedValue
Enums []docvalues.EnumString
@ -376,6 +376,10 @@ var MountOptionsMapField = map[string]optionField{
Enums: commondocumentation.AdfsDocumentationEnums,
Assignable: commondocumentation.AdfsDocumentationAssignable,
},
"apfs": {
Enums: commondocumentation.APFSDocumentationEnums,
Assignable: commondocumentation.APFSDocumentationAssignable,
},
"affs": {
Enums: commondocumentation.AffsDocumentationEnums,
Assignable: commondocumentation.AffsDocumentationAssignable,
@ -478,4 +482,8 @@ var MountOptionsMapField = map[string]optionField{
Enums: commondocumentation.VfatDocumentationEnums,
Assignable: commondocumentation.VfatDocumentationAssignable,
},
"bcachefs": {
Enums: commondocumentation.BcacheFSDocumentationEnums,
Assignable: commondocumentation.BcacheFSDocumentationAssignable,
},
}

View File

@ -6,7 +6,8 @@ import (
)
var UuidField = docvalues.RegexValue{
Regex: *regexp.MustCompile(`[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}`),
// Can either be a UUID or UID
Regex: *regexp.MustCompile(`(?i)([a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}|[a-f0-9]{4}-[a-f0-9]{4})`),
}
var LabelField = docvalues.RegexValue{
Regex: *regexp.MustCompile(`\S+`),
@ -15,7 +16,8 @@ var LabelField = docvalues.RegexValue{
var SpecField = docvalues.OrValue{
Values: []docvalues.DeprecatedValue{
docvalues.PathValue{
RequiredType: docvalues.PathTypeExistenceOptional,
IsOptional: false,
RequiredType: docvalues.PathTypeFile,
},
docvalues.KeyEnumAssignmentValue{
Separator: "=",

View File

@ -44,39 +44,29 @@ func GetCompletion(
fileSystemType := entry.Fields.FilesystemType.Value.Value
completions := make([]protocol.CompletionItem, 0, 50)
for _, completion := range fields.DefaultMountOptionsField.DeprecatedFetchCompletions(line, cursor) {
var documentation string
optionsValue := entry.FetchMountOptionsField(false)
switch completion.Documentation.(type) {
case string:
documentation = completion.Documentation.(string)
case *string:
documentation = *completion.Documentation.(*string)
}
if optionsValue != nil {
for _, completion := range optionsValue.DeprecatedFetchCompletions(line, cursor) {
var documentation string
completion.Documentation = protocol.MarkupContent{
Kind: protocol.MarkupKindMarkdown,
Value: documentation + "\n\n" + "From: _Default Mount Options_",
switch completion.Documentation.(type) {
case string:
documentation = completion.Documentation.(string)
case *string:
documentation = *completion.Documentation.(*string)
}
completion.Documentation = protocol.MarkupContent{
Kind: protocol.MarkupKindMarkdown,
Value: documentation + "\n\n" + fmt.Sprintf("From: _%s_", fileSystemType),
}
completions = append(completions, completion)
}
completions = append(completions, completion)
}
for _, completion := range entry.FetchMountOptionsField(false).DeprecatedFetchCompletions(line, cursor) {
var documentation string
switch completion.Documentation.(type) {
case string:
documentation = completion.Documentation.(string)
case *string:
documentation = *completion.Documentation.(*string)
}
completion.Documentation = protocol.MarkupContent{
Kind: protocol.MarkupKindMarkdown,
Value: documentation + "\n\n" + fmt.Sprintf("From: _%s_", fileSystemType),
}
completions = append(completions, completion)
}
// Add defaults
completions = append(completions, fields.DefaultMountOptionsField.DeprecatedFetchCompletions(line, cursor)...)
return completions, nil
case ast.FstabFieldFreq:

View File

@ -1,4 +1,4 @@
// Code generated from Hosts.g4 by ANTLR 4.13.0. DO NOT EDIT.
// Code generated from Hosts.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser // Hosts

View File

@ -1,4 +1,4 @@
// Code generated from Hosts.g4 by ANTLR 4.13.0. DO NOT EDIT.
// Code generated from Hosts.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser

View File

@ -1,4 +1,4 @@
// Code generated from Hosts.g4 by ANTLR 4.13.0. DO NOT EDIT.
// Code generated from Hosts.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser // Hosts

View File

@ -1,4 +1,4 @@
// Code generated from Hosts.g4 by ANTLR 4.13.0. DO NOT EDIT.
// Code generated from Hosts.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser // Hosts

View File

@ -38,27 +38,43 @@ func checkOption(
option *ast.SSHOption,
block ast.SSHBlock,
) {
if option.Key == nil {
return
}
///// General checks
checkIsUsingDoubleQuotes(ctx, option.Key.Value, option.Key.LocationRange)
checkQuotesAreClosed(ctx, option.Key.Value, option.Key.LocationRange)
docOption, found := fields.Options[option.Key.Key]
if option.Separator == nil || option.Separator.Value.Value == "" {
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
Range: option.Key.LocationRange.ToLSPRange(),
Message: "There should be a separator between an option and its value",
Severity: &common.SeverityError,
})
} else {
checkIsUsingDoubleQuotes(ctx, option.Separator.Value, option.Separator.LocationRange)
checkQuotesAreClosed(ctx, option.Separator.Value, option.Separator.LocationRange)
}
if !found {
///// Check if the key is valid
docOption, optionFound := fields.Options[option.Key.Key]
if !optionFound {
// Diagnostics will be handled by `values.go`
return
}
// Check for values that are not allowed in Host blocks
if block != nil && block.GetBlockType() == ast.SSHBlockTypeHost {
if utils.KeyExists(fields.HostDisallowedOptions, option.Key.Key) {
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
Range: option.Key.LocationRange.ToLSPRange(),
Message: fmt.Sprintf("Option '%s' is not allowed in Host blocks", option.Key.Key),
Severity: &common.SeverityError,
})
}
if block != nil && block.GetBlockType() == ast.SSHBlockTypeHost && utils.KeyExists(fields.HostDisallowedOptions, option.Key.Key) {
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
Range: option.Key.LocationRange.ToLSPRange(),
Message: fmt.Sprintf("Option '%s' is not allowed in Host blocks", option.Key.Key),
Severity: &common.SeverityError,
})
}
///// Check if the value is valid
if option.OptionValue != nil {
checkIsUsingDoubleQuotes(ctx, option.OptionValue.Value, option.OptionValue.LocationRange)
checkQuotesAreClosed(ctx, option.OptionValue.Value, option.OptionValue.LocationRange)
@ -75,17 +91,6 @@ func checkOption(
})
}
}
if option.Separator == nil || option.Separator.Value.Value == "" {
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
Range: option.Key.LocationRange.ToLSPRange(),
Message: fmt.Sprintf("There should be a separator between an option and its value"),
Severity: &common.SeverityError,
})
} else {
checkIsUsingDoubleQuotes(ctx, option.Separator.Value, option.Separator.LocationRange)
checkQuotesAreClosed(ctx, option.Separator.Value, option.Separator.LocationRange)
}
}
func checkBlock(

View File

@ -9,18 +9,6 @@ import (
protocol "github.com/tliron/glsp/protocol_3_16"
)
func analyzeQuotesAreValid(
ctx *analyzerContext,
) {
for _, info := range ctx.document.Config.GetAllOptions() {
checkIsUsingDoubleQuotes(ctx, info.Option.Key.Value, info.Option.Key.LocationRange)
checkIsUsingDoubleQuotes(ctx, info.Option.OptionValue.Value, info.Option.OptionValue.LocationRange)
checkQuotesAreClosed(ctx, info.Option.Key.Value, info.Option.Key.LocationRange)
checkQuotesAreClosed(ctx, info.Option.OptionValue.Value, info.Option.OptionValue.LocationRange)
}
}
func checkIsUsingDoubleQuotes(
ctx *analyzerContext,
value commonparser.ParsedString,

View File

@ -7,6 +7,18 @@ import (
protocol "github.com/tliron/glsp/protocol_3_16"
)
func testQuotes(
ctx *analyzerContext,
) {
for _, info := range ctx.document.Config.GetAllOptions() {
checkIsUsingDoubleQuotes(ctx, info.Option.Key.Value, info.Option.Key.LocationRange)
checkIsUsingDoubleQuotes(ctx, info.Option.OptionValue.Value, info.Option.OptionValue.LocationRange)
checkQuotesAreClosed(ctx, info.Option.Key.Value, info.Option.Key.LocationRange)
checkQuotesAreClosed(ctx, info.Option.OptionValue.Value, info.Option.OptionValue.LocationRange)
}
}
func TestSimpleInvalidQuotesExample(
t *testing.T,
) {
@ -17,7 +29,7 @@ PermitRootLogin 'yes'
document: d,
diagnostics: make([]protocol.Diagnostic, 0),
}
analyzeQuotesAreValid(ctx)
testQuotes(ctx)
if !(len(ctx.diagnostics) == 1) {
t.Errorf("Expected 1 error, got %v", len(ctx.diagnostics))
@ -34,7 +46,7 @@ func TestSingleQuotesKeyAndOptionExample(
document: d,
diagnostics: make([]protocol.Diagnostic, 0),
}
analyzeQuotesAreValid(ctx)
testQuotes(ctx)
if !(len(ctx.diagnostics) == 2) {
t.Errorf("Expected 2 ctx.diagnostics, got %v", len(ctx.diagnostics))
@ -51,7 +63,7 @@ PermitRootLogin "yes
document: d,
diagnostics: make([]protocol.Diagnostic, 0),
}
analyzeQuotesAreValid(ctx)
testQuotes(ctx)
if !(len(ctx.diagnostics) == 1) {
t.Errorf("Expected 1 error, got %v", len(ctx.diagnostics))
@ -68,7 +80,7 @@ func TestIncompleteQuotesExample(
document: d,
diagnostics: make([]protocol.Diagnostic, 0),
}
analyzeQuotesAreValid(ctx)
testQuotes(ctx)
if !(len(ctx.diagnostics) == 1) {
t.Errorf("Expected 1 error, got %v", len(ctx.diagnostics))

View File

@ -1,4 +1,4 @@
// Code generated from Config.g4 by ANTLR 4.13.0. DO NOT EDIT.
// Code generated from Config.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser // Config

View File

@ -1,4 +1,4 @@
// Code generated from Config.g4 by ANTLR 4.13.0. DO NOT EDIT.
// Code generated from Config.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser

View File

@ -1,4 +1,4 @@
// Code generated from Config.g4 by ANTLR 4.13.0. DO NOT EDIT.
// Code generated from Config.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser // Config

View File

@ -1,4 +1,4 @@
// Code generated from Config.g4 by ANTLR 4.13.0. DO NOT EDIT.
// Code generated from Config.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser // Config

View File

@ -139,6 +139,7 @@ rsa-sha2-512,rsa-sha2-256
Arguments to CertificateFile may use the tilde syntax to refer to a user's home directory, the tokens described in the TOKENS section and environment variables as described in the ENVIRONMENT VARIABLES section.
It is possible to have multiple certificate files specified in configuration files; these certificates will be tried in sequence. Multiple CertificateFile directives will add to the list of certificates used for authentication.`,
Value: docvalues.PathValue{
IsOptional: true,
RequiredType: docvalues.PathTypeFile,
},
},
@ -366,6 +367,7 @@ aes128-gcm@openssh.com,aes256-gcm@openssh.com
DuplicatesExtractor: &docvalues.SimpleDuplicatesExtractor,
RespectQuotes: true,
SubValue: docvalues.PathValue{
IsOptional: true,
RequiredType: docvalues.PathTypeFile,
},
},
@ -834,6 +836,7 @@ rsa-sha2-512,rsa-sha2-256
Documentation: `Specifies a path to a library that will be used when loading any FIDO authenticator-hosted keys, overriding the default of using the built-in USB HID support.
If the specified value begins with a $ character, then it will be treated as an environment variable containing the path to the library.`,
Value: docvalues.PathValue{
IsOptional: false,
RequiredType: docvalues.PathTypeFile,
},
},
@ -963,6 +966,7 @@ rsa-sha2-512,rsa-sha2-256
DuplicatesExtractor: &docvalues.SimpleDuplicatesExtractor,
RespectQuotes: true,
SubValue: docvalues.PathValue{
IsOptional: true,
RequiredType: docvalues.PathTypeFile,
},
},
@ -986,6 +990,7 @@ rsa-sha2-512,rsa-sha2-256
"xauthlocation": {
Documentation: `Specifies the full pathname of the xauth(1) program. The default is /usr/X11R6/bin/xauth.`,
Value: docvalues.PathValue{
IsOptional: false,
RequiredType: docvalues.PathTypeFile,
},
},

View File

@ -2,8 +2,6 @@ package handlers
import (
sshconfig "config-lsp/handlers/ssh_config"
"config-lsp/handlers/ssh_config/diagnostics"
"fmt"
protocol "github.com/tliron/glsp/protocol_3_16"
)
@ -12,49 +10,12 @@ func FetchCodeActions(
d *sshconfig.SSHDocument,
params *protocol.CodeActionParams,
) []protocol.CodeAction {
line := params.Range.Start.Line
if d.Indexes == nil {
return nil
}
if unknownOption, found := d.Indexes.UnknownOptions[line]; found {
var blockLine *uint32
actions := getAddToUnknownCodeAction(d, params)
actions = append(actions, getKeywordTypoFixes(d, params)...)
if unknownOption.Block != nil {
blockLineValue := uint32(unknownOption.Block.GetLocation().Start.Line)
blockLine = &blockLineValue
}
commandID := "sshconfig." + CodeActionAddToUnknown
command := protocol.Command{
Title: fmt.Sprintf("Add %s to unknown options", unknownOption.Option.Key.Key),
Command: string(commandID),
Arguments: []any{
codeActionAddToUnknownArgs{
URI: params.TextDocument.URI,
OptionLine: unknownOption.Option.Start.Line,
BlockLine: blockLine,
},
},
}
kind := protocol.CodeActionKindQuickFix
codeAction := &protocol.CodeAction{
Title: fmt.Sprintf("Add %s to unknown options", unknownOption.Option.Key.Key),
Command: &command,
Kind: &kind,
Diagnostics: []protocol.Diagnostic{
diagnostics.GenerateUnknownOption(
unknownOption.Option.Key.ToLSPRange(),
unknownOption.Option.Key.Value.Value,
),
},
}
return []protocol.CodeAction{
*codeAction,
}
}
return nil
return actions
}

View File

@ -0,0 +1,56 @@
package handlers
import (
sshconfig "config-lsp/handlers/ssh_config"
"config-lsp/handlers/ssh_config/diagnostics"
"fmt"
protocol "github.com/tliron/glsp/protocol_3_16"
)
func getAddToUnknownCodeAction(
d *sshconfig.SSHDocument,
params *protocol.CodeActionParams,
) []protocol.CodeAction {
line := params.Range.Start.Line
if unknownOption, found := d.Indexes.UnknownOptions[line]; found {
var blockLine *uint32
if unknownOption.Block != nil {
blockLineValue := uint32(unknownOption.Block.GetLocation().Start.Line)
blockLine = &blockLineValue
}
commandID := "sshconfig." + CodeActionAddToUnknown
command := protocol.Command{
Title: fmt.Sprintf("Add %s to unknown options", unknownOption.Option.Key.Key),
Command: string(commandID),
Arguments: []any{
codeActionAddToUnknownArgs{
URI: params.TextDocument.URI,
OptionLine: unknownOption.Option.Start.Line,
BlockLine: blockLine,
},
},
}
kind := protocol.CodeActionKindQuickFix
codeAction := protocol.CodeAction{
Title: fmt.Sprintf("Add %s to unknown options", unknownOption.Option.Key.Key),
Command: &command,
Kind: &kind,
Diagnostics: []protocol.Diagnostic{
diagnostics.GenerateUnknownOption(
unknownOption.Option.Key.ToLSPRange(),
unknownOption.Option.Key.Value.Value,
),
},
}
return []protocol.CodeAction{
codeAction,
}
}
return nil
}

View File

@ -0,0 +1,64 @@
package handlers
import (
"config-lsp/common"
sshconfig "config-lsp/handlers/ssh_config"
"config-lsp/handlers/ssh_config/diagnostics"
"config-lsp/handlers/ssh_config/fields"
"config-lsp/utils"
"fmt"
protocol "github.com/tliron/glsp/protocol_3_16"
)
func getKeywordTypoFixes(
d *sshconfig.SSHDocument,
params *protocol.CodeActionParams,
) []protocol.CodeAction {
if common.ServerOptions.NoTypoSuggestions {
return nil
}
line := params.Range.Start.Line
if typoOption, found := d.Indexes.UnknownOptions[line]; found {
name := typoOption.Option.Key.Value.Value
opts := utils.KeysOfMap(fields.Options)
suggestedOptions := common.FindSimilarItems(fields.CreateNormalizedName(name), opts)
actions := make([]protocol.CodeAction, 0, len(suggestedOptions))
kind := protocol.CodeActionKindQuickFix
for index, normalizedOptionName := range suggestedOptions {
isPreferred := index == 0
optionName := fields.FieldsNameFormattedMap[normalizedOptionName]
actions = append(actions, protocol.CodeAction{
Title: fmt.Sprintf("Typo Fix: %s", optionName),
IsPreferred: &isPreferred,
Kind: &kind,
Diagnostics: []protocol.Diagnostic{
diagnostics.GenerateUnknownOption(
typoOption.Option.Key.ToLSPRange(),
typoOption.Option.Key.Value.Value,
),
},
Edit: &protocol.WorkspaceEdit{
Changes: map[protocol.DocumentUri][]protocol.TextEdit{
params.TextDocument.URI: {
{
Range: typoOption.Option.Key.ToLSPRange(),
NewText: optionName,
},
},
},
},
})
}
return actions
}
return nil
}

View File

@ -33,7 +33,7 @@ func FormatDocument(
// it := d.Config.Options.Iterator()
// for it.Next() {
// line := it.Key().(uint32)
// entry := it.Value().(ast.SSHEntry)
// entry := it.Name().(ast.SSHEntry)
//
// if !(line >= textRange.Start.Line && line <= textRange.End.Line) {
// continue

View File

@ -1,4 +1,4 @@
// Code generated from Match.g4 by ANTLR 4.13.0. DO NOT EDIT.
// Code generated from Match.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser // Match

View File

@ -1,4 +1,4 @@
// Code generated from Match.g4 by ANTLR 4.13.0. DO NOT EDIT.
// Code generated from Match.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser

View File

@ -1,4 +1,4 @@
// Code generated from Match.g4 by ANTLR 4.13.0. DO NOT EDIT.
// Code generated from Match.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser // Match

View File

@ -1,4 +1,4 @@
// Code generated from Match.g4 by ANTLR 4.13.0. DO NOT EDIT.
// Code generated from Match.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser // Match

View File

@ -4,7 +4,9 @@ import (
"config-lsp/common"
docvalues "config-lsp/doc-values"
"config-lsp/handlers/sshd_config/ast"
"config-lsp/handlers/sshd_config/diagnostics"
"config-lsp/handlers/sshd_config/fields"
"config-lsp/utils"
"fmt"
protocol "github.com/tliron/glsp/protocol_3_16"
@ -20,7 +22,7 @@ func analyzeStructureIsValid(
switch entry.(type) {
case *ast.SSHDOption:
checkOption(ctx, entry.(*ast.SSHDOption), false)
checkOption(ctx, entry.(*ast.SSHDOption), nil)
case *ast.SSHDMatchBlock:
matchBlock := entry.(*ast.SSHDMatchBlock)
checkMatchBlock(ctx, matchBlock)
@ -31,36 +33,52 @@ func analyzeStructureIsValid(
func checkOption(
ctx *analyzerContext,
option *ast.SSHDOption,
isInMatchBlock bool,
matchBlock *ast.SSHDMatchBlock,
) {
if option.Key == nil {
return
}
///// General checks
checkIsUsingDoubleQuotes(ctx, option.Key.Value, option.Key.LocationRange)
checkQuotesAreClosed(ctx, option.Key.Value, option.Key.LocationRange)
key := option.Key.Key
docOption, found := fields.Options[key]
if !found {
if option.Separator == nil || option.Separator.Value.Value == "" {
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
Range: option.Key.ToLSPRange(),
Message: fmt.Sprintf("Unknown option: %s", option.Key.Key),
Range: option.Key.LocationRange.ToLSPRange(),
Message: "There should be a separator between an option and its value",
Severity: &common.SeverityError,
})
} else {
checkIsUsingDoubleQuotes(ctx, option.Separator.Value, option.Separator.LocationRange)
checkQuotesAreClosed(ctx, option.Separator.Value, option.Separator.LocationRange)
}
///// Check if the key is valid
docOption, optionFound := fields.Options[option.Key.Key]
if !optionFound {
ctx.diagnostics = append(ctx.diagnostics, diagnostics.GenerateUnknownOption(
option.Key.ToLSPRange(),
option.Key.Value.Value,
))
ctx.document.Indexes.UnknownOptions[option.Start.Line] = ast.SSHDOptionInfo{
Option: option,
MatchBlock: matchBlock,
}
// Since we don't know the option, we can't verify the value
return
}
if _, found := fields.MatchAllowedOptions[key]; !found && isInMatchBlock {
// Check for values that are not allowed in Match blocks
} else if matchBlock != nil && !utils.KeyExists(fields.MatchAllowedOptions, option.Key.Key) {
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
Range: option.Key.ToLSPRange(),
Message: fmt.Sprintf("Option '%s' is not allowed inside Match blocks", option.Key.Key),
Message: fmt.Sprintf("Option '%s' is not allowed in Match blocks", option.Key.Key),
Severity: &common.SeverityError,
})
}
///// Check if the value is valid
if option.OptionValue != nil {
checkIsUsingDoubleQuotes(ctx, option.OptionValue.Value, option.OptionValue.LocationRange)
checkQuotesAreClosed(ctx, option.OptionValue.Value, option.OptionValue.LocationRange)
@ -78,16 +96,6 @@ func checkOption(
}
}
if option.Separator == nil || option.Separator.Value.Value == "" {
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
Range: option.Key.LocationRange.ToLSPRange(),
Message: fmt.Sprintf("There should be a separator between an option and its value"),
Severity: &common.SeverityError,
})
} else {
checkIsUsingDoubleQuotes(ctx, option.Separator.Value, option.Separator.LocationRange)
checkQuotesAreClosed(ctx, option.Separator.Value, option.Separator.LocationRange)
}
}
func checkMatchBlock(
@ -99,6 +107,6 @@ func checkMatchBlock(
for it.Next() {
option := it.Value().(*ast.SSHDOption)
checkOption(ctx, option, true)
checkOption(ctx, option, matchBlock)
}
}

View File

@ -0,0 +1,34 @@
package analyzer
import (
testutils_test "config-lsp/handlers/sshd_config/test_utils"
"testing"
protocol "github.com/tliron/glsp/protocol_3_16"
)
func TestUnknownOptionExample(
t *testing.T,
) {
d := testutils_test.DocumentFromInput(t, `
ThisOptionDoesNotExist okay
`)
ctx := &analyzerContext{
document: d,
diagnostics: make([]protocol.Diagnostic, 0),
}
analyzeStructureIsValid(ctx)
if !(len(ctx.diagnostics) == 1) {
t.Errorf("Expected 1 error, got %v", len(ctx.diagnostics))
}
if !(len(ctx.document.Indexes.UnknownOptions) == 1) {
t.Errorf("Expected 1 unknown option, got %v", len(ctx.document.Indexes.UnknownOptions))
}
if !(ctx.document.Indexes.UnknownOptions[0].Option.Key.Value.Value == "ThisOptionDoesNotExist") {
t.Errorf("Expected 'ThisOptionDoesNotExist', got %v", ctx.document.Indexes.UnknownOptions[0].Option.Key.Value.Value)
}
}

View File

@ -11,12 +11,12 @@ import (
func analyzeQuotesAreValid(
ctx *analyzerContext,
) {
for _, option := range ctx.document.Config.GetAllOptions() {
checkIsUsingDoubleQuotes(ctx, option.Key.Value, option.Key.LocationRange)
checkIsUsingDoubleQuotes(ctx, option.OptionValue.Value, option.OptionValue.LocationRange)
for _, info := range ctx.document.Config.GetAllOptions() {
checkIsUsingDoubleQuotes(ctx, info.Option.Key.Value, info.Option.Key.LocationRange)
checkIsUsingDoubleQuotes(ctx, info.Option.OptionValue.Value, info.Option.OptionValue.LocationRange)
checkQuotesAreClosed(ctx, option.Key.Value, option.Key.LocationRange)
checkQuotesAreClosed(ctx, option.OptionValue.Value, option.OptionValue.LocationRange)
checkQuotesAreClosed(ctx, info.Option.Key.Value, info.Option.Key.LocationRange)
checkQuotesAreClosed(ctx, info.Option.OptionValue.Value, info.Option.OptionValue.LocationRange)
}
}

View File

@ -13,13 +13,13 @@ import (
func analyzeTokens(
ctx *analyzerContext,
) {
for _, option := range ctx.document.Config.GetAllOptions() {
if option.Key == nil || option.OptionValue == nil {
for _, info := range ctx.document.Config.GetAllOptions() {
if info.Option.Key == nil || info.Option.OptionValue == nil {
continue
}
key := option.Key.Key
text := option.OptionValue.Value.Value
key := info.Option.Key.Key
text := info.Option.OptionValue.Value.Value
var tokens []string
if foundTokens, found := fields.OptionsTokensMap[key]; found {
@ -39,7 +39,7 @@ func analyzeTokens(
}
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
Range: option.OptionValue.ToLSPRange(),
Range: info.Option.OptionValue.ToLSPRange(),
Message: fmt.Sprintf("Token '%s' is not allowed for option '%s'", token, optionName),
Severity: &common.SeverityError,
})

View File

@ -1,4 +1,4 @@
// Code generated from Config.g4 by ANTLR 4.13.0. DO NOT EDIT.
// Code generated from Config.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser // Config

View File

@ -1,4 +1,4 @@
// Code generated from Config.g4 by ANTLR 4.13.0. DO NOT EDIT.
// Code generated from Config.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser

View File

@ -1,4 +1,4 @@
// Code generated from Config.g4 by ANTLR 4.13.0. DO NOT EDIT.
// Code generated from Config.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser // Config

View File

@ -1,4 +1,4 @@
// Code generated from Config.g4 by ANTLR 4.13.0. DO NOT EDIT.
// Code generated from Config.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser // Config

View File

@ -0,0 +1,8 @@
// Contains structs that are used as utilities, but are
// not used for the AST itself
package ast
type SSHDOptionInfo struct {
MatchBlock *SSHDMatchBlock
Option *SSHDOption
}

View File

@ -64,26 +64,32 @@ func (c SSHDConfig) FindOption(line uint32) (*SSHDOption, *SSHDMatchBlock) {
return nil, nil
}
func (c SSHDConfig) GetAllOptions() []*SSHDOption {
options := make(
[]*SSHDOption,
func (c SSHDConfig) GetAllOptions() []SSHDOptionInfo {
infos := make(
[]SSHDOptionInfo,
0,
// Approximation, this does not need to be exact
c.Options.Size()+10,
)
var currentMatchBlock *SSHDMatchBlock = nil
for _, rawEntry := range c.Options.Values() {
switch entry := rawEntry.(type) {
case *SSHDOption:
options = append(options, entry)
infos = append(infos, SSHDOptionInfo{
Option: entry,
MatchBlock: currentMatchBlock,
})
case *SSHDMatchBlock:
options = append(options, entry.MatchOption)
currentMatchBlock = entry
for _, rawOption := range entry.Options.Values() {
options = append(options, rawOption.(*SSHDOption))
}
infos = append(infos, SSHDOptionInfo{
Option: entry.MatchOption,
MatchBlock: currentMatchBlock,
})
}
}
return options
return infos
}

View File

@ -0,0 +1,19 @@
package diagnostics
import (
"config-lsp/common"
"fmt"
protocol "github.com/tliron/glsp/protocol_3_16"
)
func GenerateUnknownOption(
diagnosticRange protocol.Range,
optionName string,
) protocol.Diagnostic {
return protocol.Diagnostic{
Range: diagnosticRange,
Message: fmt.Sprintf("Unknown option: %s", optionName),
Severity: &common.SeverityError,
}
}

View File

@ -83,31 +83,36 @@ See PATTERNS in ssh_config(5) for more information on patterns. This keyword may
},
},
docvalues.ArrayValue{
Separator: ",",
DuplicatesExtractor: &docvalues.SimpleDuplicatesExtractor,
Separator: " ",
DuplicatesExtractor: nil,
RespectQuotes: true,
SubValue: docvalues.EnumValue{
EnforceValues: true,
Values: []docvalues.EnumString{
docvalues.CreateEnumString("none"),
SubValue: docvalues.ArrayValue{
Separator: ",",
DuplicatesExtractor: &docvalues.SimpleDuplicatesExtractor,
RespectQuotes: true,
SubValue: docvalues.EnumValue{
EnforceValues: true,
Values: []docvalues.EnumString{
docvalues.CreateEnumString("none"),
docvalues.CreateEnumString("password"),
docvalues.CreateEnumString("publickey"),
docvalues.CreateEnumString("gssapi-with-mic"),
docvalues.CreateEnumString("keyboard-interactive"),
docvalues.CreateEnumString("hostbased"),
docvalues.CreateEnumString("password"),
docvalues.CreateEnumString("publickey"),
docvalues.CreateEnumString("gssapi-with-mic"),
docvalues.CreateEnumString("keyboard-interactive"),
docvalues.CreateEnumString("hostbased"),
docvalues.CreateEnumString("password:bsdauth"),
docvalues.CreateEnumString("publickey:bsdauth"),
docvalues.CreateEnumString("gssapi-with-mic:bsdauth"),
docvalues.CreateEnumString("keyboard-interactive:bsdauth"),
docvalues.CreateEnumString("hostbased:bsdauth"),
docvalues.CreateEnumString("password:bsdauth"),
docvalues.CreateEnumString("publickey:bsdauth"),
docvalues.CreateEnumString("gssapi-with-mic:bsdauth"),
docvalues.CreateEnumString("keyboard-interactive:bsdauth"),
docvalues.CreateEnumString("hostbased:bsdauth"),
docvalues.CreateEnumString("password:pam"),
docvalues.CreateEnumString("publickey:pam"),
docvalues.CreateEnumString("gssapi-with-mic:pam"),
docvalues.CreateEnumString("keyboard-interactive:pam"),
docvalues.CreateEnumString("hostbased:pam"),
docvalues.CreateEnumString("password:pam"),
docvalues.CreateEnumString("publickey:pam"),
docvalues.CreateEnumString("gssapi-with-mic:pam"),
docvalues.CreateEnumString("keyboard-interactive:pam"),
docvalues.CreateEnumString("hostbased:pam"),
},
},
},
},
@ -146,14 +151,26 @@ See PATTERNS in ssh_config(5) for more information on patterns. This keyword may
Documentation: `Specifies a file that lists principal names that are accepted for certificate authentication. When using certificates signed by a key listed in TrustedUserCAKeys, this file lists names, one of which must appear in the certificate for it to be accepted for authentication. Names are listed one per line preceded by key options (as described in AUTHORIZED_KEYS FILE FORMAT in sshd(8)). Empty lines and comments starting with # are ignored.
Arguments to AuthorizedPrincipalsFile accept the tokens described in the TOKENS section. After expansion, AuthorizedPrincipalsFile is taken to be an absolute path or one relative to the user's home directory. The default is none, i.e. not to use a principals file in this case, the username of the user must appear in a certificate's principals list for it to be accepted.
Note that AuthorizedPrincipalsFile is only used when authentication proceeds using a CA listed in TrustedUserCAKeys and is not consulted for certification authorities trusted via ~/.ssh/authorized_keys, though the principals= key option offers a similar facility (see sshd(8) for details).`,
Value: docvalues.PathValue{
RequiredType: docvalues.PathTypeFile,
Value: docvalues.OrValue{
Values: []docvalues.DeprecatedValue{
docvalues.SingleEnumValue("none"),
docvalues.PathValue{
IsOptional: false,
RequiredType: docvalues.PathTypeFile,
},
},
},
},
"banner": {
Documentation: `The contents of the specified file are sent to the remote user before authentication is allowed. If the argument is none then no banner is displayed. By default, no banner is displayed.`,
Value: docvalues.PathValue{
RequiredType: docvalues.PathTypeFile,
Value: docvalues.OrValue{
Values: []docvalues.DeprecatedValue{
docvalues.SingleEnumValue("none"),
docvalues.PathValue{
IsOptional: false,
RequiredType: docvalues.PathTypeFile,
},
},
},
},
"casignaturealgorithms": {
@ -343,13 +360,19 @@ See PATTERNS in ssh_config(5) for more information on patterns. This keyword may
},
"hostcertificate": {
Documentation: `Specifies a file containing a public host certificate. The certificate's public key must match a private host key already specified by HostKey. The default behaviour of sshd(8) is not to load any certificates.`,
Value: docvalues.PathValue{},
Value: docvalues.PathValue{
IsOptional: true,
RequiredType: docvalues.PathTypeFile,
},
},
"hostkey": {
Documentation: `Specifies a file containing a private host key used by SSH. The defaults are /etc/ssh/ssh_host_ecdsa_key, /etc/ssh/ssh_host_ed25519_key and /etc/ssh/ssh_host_rsa_key.
Note that sshd(8) will refuse to use a file if it is group/world-accessible and that the HostKeyAlgorithms option restricts which of the keys are actually used by sshd(8).
It is possible to have multiple host key files. It is also possible to specify public host key files instead. In this case operations on the private key will be delegated to an ssh-agent(1).`,
Value: docvalues.PathValue{},
Value: docvalues.PathValue{
IsOptional: false,
RequiredType: docvalues.PathTypeFile,
},
},
"hostkeyagent": {
Documentation: `Identifies the UNIX-domain socket used to communicate with an agent that has access to the private host keys. If the string "SSH_AUTH_SOCK" is specified, the location of the socket will be read from the SSH_AUTH_SOCK environment variable.`,
@ -592,8 +615,9 @@ Only a subset of keywords may be used on the lines following a Match keyword. Av
},
},
"modulifile": {
Documentation: `Specifies the moduli(5) file that contains the Diffie- Hellman groups used for the “diffie-hellman-group-exchange-sha1” and “diffie-hellman-group-exchange-sha256” key exchange methods. The default is /etc/moduli.`,
Documentation: `Specifies the moduli(5) file that contains the Diffie-Hellman groups used for the “diffie-hellman-group-exchange-sha1” and “diffie-hellman-group-exchange-sha256” key exchange methods. The default is /etc/moduli.`,
Value: docvalues.PathValue{
IsOptional: false,
RequiredType: docvalues.PathTypeFile,
},
},
@ -859,6 +883,7 @@ Only a subset of keywords may be used on the lines following a Match keyword. Av
"securitykeyprovider": {
Documentation: `Specifies a path to a library that will be used when loading FIDO authenticator-hosted keys, overriding the default of using the built-in USB HID support.`,
Value: docvalues.PathValue{
IsOptional: false,
RequiredType: docvalues.PathTypeFile,
},
},

View File

@ -8,4 +8,5 @@ var AllowedDuplicateOptions = map[NormalizedOptionName]struct{}{
"listenaddress": {},
"match": {},
"port": {},
"hostkey": {},
}

View File

@ -18,6 +18,10 @@ func GetRootCompletions(
parentMatchBlock *ast.SSHDMatchBlock,
suggestValue bool,
) ([]protocol.CompletionItem, error) {
if d.Indexes == nil {
return nil, nil
}
kind := protocol.CompletionItemKindField
availableOptions := make(map[fields.NormalizedOptionName]docvalues.DocumentationValue, 0)

View File

@ -0,0 +1,20 @@
package handlers
import (
sshdconfig "config-lsp/handlers/sshd_config"
protocol "github.com/tliron/glsp/protocol_3_16"
)
func FetchCodeActions(
d *sshdconfig.SSHDDocument,
params *protocol.CodeActionParams,
) []protocol.CodeAction {
if d.Indexes == nil {
return nil
}
actions := getKeywordTypoFixes(d, params)
return actions
}

View File

@ -0,0 +1,110 @@
package handlers
import (
"config-lsp/common"
sshdconfig "config-lsp/handlers/sshd_config"
"config-lsp/handlers/sshd_config/diagnostics"
"config-lsp/handlers/sshd_config/fields"
"fmt"
"github.com/hbollon/go-edlib"
protocol "github.com/tliron/glsp/protocol_3_16"
)
func getKeywordTypoFixes(
d *sshdconfig.SSHDDocument,
params *protocol.CodeActionParams,
) []protocol.CodeAction {
if common.ServerOptions.NoTypoSuggestions {
return nil
}
line := params.Range.Start.Line
if typoOption, found := d.Indexes.UnknownOptions[line]; found {
name := typoOption.Option.Key.Value.Value
suggestedOptions := findSimilarOptions(name, typoOption.MatchBlock != nil)
actions := make([]protocol.CodeAction, 0, len(suggestedOptions))
kind := protocol.CodeActionKindQuickFix
for index, normalizedOptionName := range suggestedOptions {
isPreferred := index == 0
optionName := fields.FieldsNameFormattedMap[normalizedOptionName]
actions = append(actions, protocol.CodeAction{
Title: fmt.Sprintf("Typo Fix: %s", optionName),
IsPreferred: &isPreferred,
Kind: &kind,
Diagnostics: []protocol.Diagnostic{
diagnostics.GenerateUnknownOption(
typoOption.Option.Key.ToLSPRange(),
typoOption.Option.Key.Value.Value,
),
},
Edit: &protocol.WorkspaceEdit{
Changes: map[protocol.DocumentUri][]protocol.TextEdit{
params.TextDocument.URI: {
{
Range: typoOption.Option.Key.ToLSPRange(),
NewText: optionName,
},
},
},
},
})
}
return actions
}
return nil
}
// Find options that are similar to the given option name.
// This is used to find typos & suggest the correct option name.
// Once an option is found that has a Damerau-Levenshtein distance of 1, it is immediately returned.
// If not, then the next 2 options of similarity 2, or 3 options of similarity 3 are returned.
// If no options with similarity <= 3 are found, then an empty slice is returned.
func findSimilarOptions(
optionName string,
restrictToMatchOptions bool,
) []fields.NormalizedOptionName {
normalizedOptionName := string(fields.CreateNormalizedName(optionName))
optionsPerSimilarity := map[uint8][]fields.NormalizedOptionName{
2: make([]fields.NormalizedOptionName, 0, 2),
3: make([]fields.NormalizedOptionName, 0, 3),
}
for name := range fields.Options {
if restrictToMatchOptions {
if _, found := fields.MatchAllowedOptions[name]; !found {
continue
}
}
normalizedName := string(name)
similarity := edlib.DamerauLevenshteinDistance(normalizedName, normalizedOptionName)
switch similarity {
case 1:
return []fields.NormalizedOptionName{name}
case 2:
optionsPerSimilarity[2] = append(optionsPerSimilarity[2], name)
if len(optionsPerSimilarity[2]) >= 2 {
return optionsPerSimilarity[2]
}
case 3:
optionsPerSimilarity[3] = append(optionsPerSimilarity[3], name)
if len(optionsPerSimilarity[3]) >= 3 {
return optionsPerSimilarity[3]
}
}
}
return append(optionsPerSimilarity[2], optionsPerSimilarity[3]...)
}

View File

@ -37,4 +37,6 @@ type SSHDIndexes struct {
AllOptionsPerName map[fields.NormalizedOptionName](map[*ast.SSHDMatchBlock]([]*ast.SSHDOption))
Includes map[uint32]*SSHDIndexIncludeLine
UnknownOptions map[uint32]ast.SSHDOptionInfo
}

View File

@ -18,6 +18,7 @@ func CreateIndexes(config ast.SSHDConfig) (*SSHDIndexes, []common.LSPError) {
indexes := &SSHDIndexes{
AllOptionsPerName: make(map[fields.NormalizedOptionName](map[*ast.SSHDMatchBlock]([]*ast.SSHDOption))),
Includes: make(map[uint32]*SSHDIndexIncludeLine),
UnknownOptions: make(map[uint32]ast.SSHDOptionInfo),
}
it := config.Options.Iterator()

View File

@ -0,0 +1,16 @@
package lsp
import (
sshdconfig "config-lsp/handlers/sshd_config"
"config-lsp/handlers/sshd_config/handlers"
"github.com/tliron/glsp"
protocol "github.com/tliron/glsp/protocol_3_16"
)
func TextDocumentCodeAction(context *glsp.Context, params *protocol.CodeActionParams) ([]protocol.CodeAction, error) {
d := sshdconfig.DocumentParserMap[params.TextDocument.URI]
actions := handlers.FetchCodeActions(d, params)
return actions, nil
}

View File

@ -1,4 +1,4 @@
// Code generated from Match.g4 by ANTLR 4.13.0. DO NOT EDIT.
// Code generated from Match.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser // Match

View File

@ -1,4 +1,4 @@
// Code generated from Match.g4 by ANTLR 4.13.0. DO NOT EDIT.
// Code generated from Match.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser

View File

@ -1,4 +1,4 @@
// Code generated from Match.g4 by ANTLR 4.13.0. DO NOT EDIT.
// Code generated from Match.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser // Match

View File

@ -1,4 +1,4 @@
// Code generated from Match.g4 by ANTLR 4.13.0. DO NOT EDIT.
// Code generated from Match.g4 by ANTLR 4.13.2. DO NOT EDIT.
package parser // Match

View File

@ -0,0 +1,51 @@
package analyzer
import (
"config-lsp/common"
"config-lsp/handlers/wireguard"
"config-lsp/handlers/wireguard/indexes"
protocol "github.com/tliron/glsp/protocol_3_16"
)
type analyzerContext struct {
document *wireguard.WGDocument
diagnostics []protocol.Diagnostic
}
func Analyze(
d *wireguard.WGDocument,
) []protocol.Diagnostic {
ctx := &analyzerContext{
document: d,
diagnostics: make([]protocol.Diagnostic, 0),
}
analyzeStructureIsValid(ctx)
if len(ctx.diagnostics) > 0 {
return ctx.diagnostics
}
i, indexErrors := indexes.CreateIndexes(d.Config)
if len(indexErrors) > 0 {
return common.ErrsToDiagnostics(indexErrors)
}
d.Indexes = i
analyzeProperties(ctx)
if len(ctx.diagnostics) > 0 {
return ctx.diagnostics
}
analyzeInterfaceSection(ctx)
analyzeDNSPropertyContainsFallback(ctx)
analyzeKeepAlivePropertyIsSet(ctx)
analyzeSymmetricPropertiesSet(ctx)
analyzeDuplicateAllowedIPs(ctx)
return ctx.diagnostics
}

View File

@ -1,7 +1,8 @@
package handlers
package analyzer
import (
"config-lsp/handlers/wireguard/parser"
"config-lsp/handlers/wireguard"
"config-lsp/handlers/wireguard/ast"
"config-lsp/utils"
"testing"
)
@ -14,12 +15,14 @@ PrivateKey = abc
[Interface]
PrivateKey = def
`)
p := parser.CreateWireguardParser()
p.ParseFromString(content)
d := &wireguard.WGDocument{
Config: ast.NewWGConfig(),
}
d.Config.Parse(content)
diagnostics := Analyze(p)
diagnostics := Analyze(d)
if len(diagnostics) == 0 {
if !(len(diagnostics) > 0) {
t.Errorf("Expected diagnostic errors, got %d", len(diagnostics))
}
}
@ -29,12 +32,14 @@ func TestInvalidValue(t *testing.T) {
[Interface]
DNS = nope
`)
p := parser.CreateWireguardParser()
p.ParseFromString(content)
d := &wireguard.WGDocument{
Config: ast.NewWGConfig(),
}
d.Config.Parse(content)
diagnostics := Analyze(p)
diagnostics := Analyze(d)
if len(diagnostics) == 0 {
if !(len(diagnostics) > 0) {
t.Errorf("Expected diagnostic errors, got %d", len(diagnostics))
}
}
@ -46,12 +51,15 @@ PrivateKey = abc
DNS = 1.1.1.1
PrivateKey = def
`)
p := parser.CreateWireguardParser()
p.ParseFromString(content)
diagnostics := Analyze(p)
d := &wireguard.WGDocument{
Config: ast.NewWGConfig(),
}
d.Config.Parse(content)
if len(diagnostics) == 0 {
diagnostics := Analyze(d)
if !(len(diagnostics) > 0) {
t.Errorf("Expected diagnostic errors, got %d", len(diagnostics))
}
}

View File

@ -0,0 +1,89 @@
package analyzer
import (
"config-lsp/common"
docvalues "config-lsp/doc-values"
"config-lsp/handlers/wireguard/ast"
"config-lsp/handlers/wireguard/diagnostics"
"config-lsp/handlers/wireguard/fields"
"config-lsp/handlers/wireguard/indexes"
"fmt"
protocol "github.com/tliron/glsp/protocol_3_16"
)
func analyzeProperties(
ctx *analyzerContext,
) {
for _, section := range ctx.document.Config.Sections {
normalizedHeaderName := fields.CreateNormalizedName(section.Header.Name)
// Whether to check if the property is allowed in the section
checkAllowedProperty := true
existingProperties := make(map[fields.NormalizedName]*ast.WGProperty)
it := section.Properties.Iterator()
for it.Next() {
property := it.Value().(*ast.WGProperty)
normalizedPropertyName := fields.CreateNormalizedName(property.Key.Name)
if property.Key.Name == "" {
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
Message: "This property is missing a name",
Range: property.Key.ToLSPRange(),
Severity: &common.SeverityError,
})
}
if property.Value == nil || property.Value.Value == "" {
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
Message: "This property is missing a value",
Range: property.ToLSPRange(),
Severity: &common.SeverityError,
})
checkAllowedProperty = false
}
if checkAllowedProperty {
availableOptions := fields.OptionsHeaderMap[normalizedHeaderName]
// Duplicate check
if existingProperty, found := existingProperties[normalizedPropertyName]; found {
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
Message: fmt.Sprintf("Property '%s' has already been defined on line %d", property.Key.Name, existingProperty.Start.Line+1),
Severity: &common.SeverityError,
Range: existingProperty.ToLSPRange(),
})
// Check if value is valid
} else if option, found := availableOptions[normalizedPropertyName]; found {
invalidValues := option.DeprecatedCheckIsValid(property.Value.Value)
for _, invalidValue := range invalidValues {
err := docvalues.LSPErrorFromInvalidValue(property.Start.Line, *invalidValue).ShiftCharacter(property.Value.Start.Character)
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
Range: err.Range.ToLSPRange(),
Message: err.Err.Error(),
Severity: &common.SeverityError,
})
}
// Unknown property
} else {
ctx.diagnostics = append(ctx.diagnostics,
diagnostics.GenerateUnknownOption(
property.ToLSPRange(),
property.Key.Name,
),
)
ctx.document.Indexes.UnknownProperties[property.Key.Start.Line] = indexes.WGIndexPropertyInfo{
Section: section,
Property: property,
}
}
existingProperties[normalizedPropertyName] = property
}
}
}
}

View File

@ -0,0 +1,152 @@
package analyzer
import (
"config-lsp/common"
"config-lsp/utils"
"context"
"fmt"
"net/netip"
"strings"
protocol "github.com/tliron/glsp/protocol_3_16"
)
func analyzeDNSPropertyContainsFallback(
ctx *analyzerContext,
) {
sections, found := ctx.document.Indexes.SectionsByName["Interface"]
if !found {
return
}
interfaceSection := sections[0]
_, property := interfaceSection.FindFirstPropertyByName("DNS")
if property == nil {
return
}
dnsAmount := len(strings.Split(property.Value.Value, ","))
if dnsAmount == 1 {
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
Message: "There is only one DNS server specified. It is recommended to set up fallback DNS servers",
Severity: &common.SeverityHint,
Range: property.Value.ToLSPRange(),
})
}
}
func analyzeKeepAlivePropertyIsSet(
ctx *analyzerContext,
) {
for _, section := range ctx.document.Indexes.SectionsByName["Peer"] {
// If an endpoint is set, then we should only check for the keepalive property
_, endpoint := section.FindFirstPropertyByName("Endpoint")
_, persistentKeepAlive := section.FindFirstPropertyByName("PersistentKeepalive")
if endpoint != nil && persistentKeepAlive == nil {
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
Message: "PersistentKeepalive is not set. It is recommended to set this property, as it helps to maintain the connection when users are behind NAT",
Severity: &common.SeverityHint,
Range: section.Header.ToLSPRange(),
})
}
}
}
func analyzeSymmetricPropertiesSet(
ctx *analyzerContext,
) {
for _, section := range ctx.document.Indexes.SectionsByName["Interface"] {
_, preUpProperty := section.FindFirstPropertyByName("PreUp")
_, preDownProperty := section.FindFirstPropertyByName("PreDown")
_, postUpProperty := section.FindFirstPropertyByName("PostUp")
_, postDownProperty := section.FindFirstPropertyByName("PostDown")
if preUpProperty != nil && preDownProperty == nil {
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
Message: "PreUp is set, but PreDown is not. It is recommended to set both properties symmetrically",
Range: preUpProperty.ToLSPRange(),
Severity: &common.SeverityHint,
})
} else if preUpProperty == nil && preDownProperty != nil {
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
Message: "PreDown is set, but PreUp is not. It is recommended to set both properties symmetrically",
Range: preDownProperty.ToLSPRange(),
Severity: &common.SeverityHint,
})
}
if postUpProperty != nil && postDownProperty == nil {
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
Message: "PostUp is set, but PostDown is not. It is recommended to set both properties symmetrically",
Range: postUpProperty.ToLSPRange(),
Severity: &common.SeverityHint,
})
} else if postUpProperty == nil && postDownProperty != nil {
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
Message: "PostDown is set, but PostUp is not. It is recommended to set both properties symmetrically",
Range: postDownProperty.ToLSPRange(),
Severity: &common.SeverityHint,
})
}
}
}
type key uint8
const (
lineKey key = iota
)
// Strategy
// Simply compare the host bits of the IP addresses.
// Use a binary tree to store the host bits.
func analyzeDuplicateAllowedIPs(
ctx *analyzerContext,
) {
ipHostSet := utils.CreateIPv4HostSet()
for _, section := range ctx.document.Indexes.SectionsByName["Peer"] {
_, property := section.FindFirstPropertyByName("AllowedIPs")
if property == nil {
continue
}
ipAddress, err := netip.ParsePrefix(property.Value.Value)
if err != nil {
// This should not happen...
continue
}
if ipContext, _ := ipHostSet.ContainsIP(ipAddress); ipContext != nil {
ctxx := *ipContext
definedLineRaw := ctxx.Value(lineKey)
definedLine := definedLineRaw.(uint32)
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
Message: fmt.Sprintf("This IP range is already covered on line %d", definedLine+1),
Severity: &common.SeverityError,
Range: property.Value.ToLSPRange(),
})
} else {
ipContext := context.WithValue(
context.Background(),
lineKey,
property.Start.Line,
)
ipHostSet.AddIP(
ipAddress,
ipContext,
)
}
}
}

View File

@ -0,0 +1,18 @@
package analyzer
import (
"config-lsp/common"
protocol "github.com/tliron/glsp/protocol_3_16"
)
func analyzeInterfaceSection(ctx *analyzerContext) {
sections := ctx.document.Indexes.SectionsByName["Interface"]
if len(sections) > 1 {
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
Message: "Only one [Interface] section is allowed",
Severity: &common.SeverityError,
Range: sections[1].Header.ToLSPRange(),
})
}
}

View File

@ -0,0 +1,41 @@
package analyzer
import (
"config-lsp/common"
"config-lsp/handlers/wireguard/fields"
"config-lsp/utils"
"fmt"
protocol "github.com/tliron/glsp/protocol_3_16"
)
func analyzeStructureIsValid(ctx *analyzerContext) {
for _, section := range ctx.document.Config.Sections {
normalizedHeaderName := fields.CreateNormalizedName(section.Header.Name)
if section.Header.Name == "" {
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
Message: "This section is missing a name",
Range: section.Header.ToLSPRange(),
Severity: &common.SeverityError,
})
} else if !utils.KeyExists(fields.OptionsHeaderMap, normalizedHeaderName) {
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
Message: fmt.Sprintf("Unknown section '%s'. It must be one of: [Interface], [Peer]", section.Header.Name),
Range: section.Header.ToLSPRange(),
Severity: &common.SeverityError,
})
}
if section.Properties.Size() == 0 {
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
Message: "This section is empty",
Range: section.Header.ToLSPRange(),
Severity: &common.SeverityInformation,
Tags: []protocol.DiagnosticTag{
protocol.DiagnosticTagUnnecessary,
},
})
}
}
}

View File

@ -0,0 +1,246 @@
package ast
import (
"config-lsp/common"
"config-lsp/utils"
"fmt"
"regexp"
"strings"
"github.com/emirpasic/gods/maps/treemap"
gods "github.com/emirpasic/gods/utils"
)
func NewWGConfig() *WGConfig {
config := &WGConfig{}
config.Clear()
return config
}
func (c *WGConfig) Clear() {
c.Sections = make([]*WGSection, 0, 2)
c.CommentLines = make(map[uint32]struct{})
}
var commentPattern = regexp.MustCompile(`^\s*([;#])`)
var emptyPattern = regexp.MustCompile(`^\s*$`)
var headerPattern = regexp.MustCompile(`^\s*\[(\w+)?]?`)
var linePattern = regexp.MustCompile(`^\s*(?P<key>.+?)\s*(?P<separator>=)\s*(?P<value>\S.*?)?\s*(?:[;#].*)?\s*$`)
func (c *WGConfig) Parse(input string) []common.LSPError {
errors := make([]common.LSPError, 0)
lines := utils.SplitIntoLines(input)
var currentSection *WGSection
for rawLineNumber, line := range lines {
lineNumber := uint32(rawLineNumber)
if emptyPattern.MatchString(line) {
// Set end of last section
if currentSection != nil {
currentSection.End.Line = lineNumber
currentSection.End.Character = 0
}
continue
}
if commentPattern.MatchString(line) {
c.CommentLines[lineNumber] = struct{}{}
// Set end of last section
if currentSection != nil {
currentSection.End.Line = lineNumber
currentSection.End.Character = uint32(len(line))
}
continue
}
if headerPattern.MatchString(line) {
name := headerPattern.FindStringSubmatch(line)[1]
currentSection = &WGSection{
LocationRange: common.LocationRange{
Start: common.Location{
Line: lineNumber,
Character: 0,
},
End: common.Location{
Line: lineNumber,
Character: uint32(len(line)) + 1,
},
},
Header: WGHeader{
LocationRange: common.LocationRange{
Start: common.Location{
Line: lineNumber,
Character: 0,
},
End: common.Location{
Line: lineNumber,
Character: uint32(len(line)) + 1,
},
},
Name: name,
},
Properties: treemap.NewWith(gods.UInt32Comparator),
}
c.Sections = append(c.Sections, currentSection)
continue
}
// Else property
// Set end of last section
if currentSection != nil {
currentSection.End.Line = lineNumber
currentSection.End.Character = uint32(len(line))
}
if currentSection == nil {
// Root properties are not allowed
errors = append(errors, common.LSPError{
Range: common.LocationRange{
Start: common.Location{
Line: lineNumber,
Character: 0,
},
End: common.Location{
Line: lineNumber,
Character: uint32(len(line)),
},
},
Err: fmt.Errorf("A header is missing before a property. This property has no header above it."),
})
continue
}
if !strings.Contains(line, "=") {
// Incomplete property
indexes := utils.GetTrimIndex(line)
newProperty := &WGProperty{
Key: WGPropertyKey{
LocationRange: common.LocationRange{
Start: common.Location{
Line: lineNumber,
Character: uint32(indexes[0]),
},
End: common.Location{
Line: lineNumber,
Character: uint32(indexes[1]),
},
},
Name: line[indexes[0]:indexes[1]],
},
}
currentSection.Properties.Put(lineNumber, newProperty)
} else {
// Fully written out property
indexes := linePattern.FindStringSubmatchIndex(line)
if len(indexes) == 0 {
// Error
errors = append(errors, common.LSPError{
Range: common.LocationRange{
Start: common.Location{
Line: lineNumber,
Character: 0,
},
End: common.Location{
Line: lineNumber,
Character: uint32(len(line)),
},
},
Err: fmt.Errorf("This property seems to be malformed"),
})
continue
}
// Construct key
keyStart := uint32(indexes[2])
keyEnd := uint32(indexes[3])
key := WGPropertyKey{
LocationRange: common.LocationRange{
Start: common.Location{
Line: lineNumber,
Character: keyStart,
},
End: common.Location{
Line: lineNumber,
Character: keyEnd,
},
},
Name: line[keyStart:keyEnd],
}
// Construct separator
separatorStart := uint32(indexes[4])
separatorEnd := uint32(indexes[5])
separator := WGPropertySeparator{
LocationRange: common.LocationRange{
Start: common.Location{
Line: lineNumber,
Character: separatorStart,
},
End: common.Location{
Line: lineNumber,
Character: separatorEnd,
},
},
}
// Construct value
var value *WGPropertyValue
propertyEnd := uint32(len(line))
if indexes[6] != -1 && indexes[7] != -1 {
// value exists
valueStart := uint32(indexes[6])
valueEnd := uint32(indexes[7])
propertyEnd = valueEnd
value = &WGPropertyValue{
LocationRange: common.LocationRange{
Start: common.Location{
Line: lineNumber,
Character: valueStart,
},
End: common.Location{
Line: lineNumber,
Character: valueEnd,
},
},
Value: line[valueStart:valueEnd],
}
}
// And lastly, add the property
newProperty := &WGProperty{
LocationRange: common.LocationRange{
Start: common.Location{
Line: lineNumber,
Character: keyStart,
},
End: common.Location{
Line: lineNumber,
Character: propertyEnd,
},
},
RawValue: line,
Key: key,
Separator: &separator,
Value: value,
}
currentSection.Properties.Put(lineNumber, newProperty)
}
}
return errors
}

View File

@ -0,0 +1,68 @@
package ast
import (
"config-lsp/utils"
"testing"
)
func TestExample1Works(
t *testing.T,
) {
sample := utils.Dedent(`
# A comment at the very top
[Interface]
PrivateKey = 1234567890 # Some comment
Address = 10.0.0.1
[Peer]
PublicKey = 1234567890
; I'm a comment
`)
config := NewWGConfig()
errors := config.Parse(sample)
if len(errors) > 0 {
t.Fatalf("Parse: Expected no errors, but got %v", errors)
}
if !(utils.KeyExists(config.CommentLines, 0) && utils.KeyExists(config.CommentLines, 12)) {
t.Errorf("Parse: Expected comments to be present on lines 0 and 12")
}
if !(config.Sections[0].Start.Line == 3 && config.Sections[0].End.Line == 8) {
t.Errorf("Parse: Expected section 0 to be present on lines 3 and 6, but it is: %v", config.Sections[0].End)
}
if !(config.Sections[0].Start.Character == 0 && config.Sections[0].End.Character == 0) {
t.Errorf("Parse: Expected section 0 to be present on characters 0 and 0, but it is: %v", config.Sections[0].End)
}
if !(config.Sections[0].Header.Name == "Interface" && config.Sections[1].Header.Name == "Peer") {
t.Errorf("Parse: Expected sections to be present on lines 0, 1, and 2")
}
rawFourthProperty, _ := config.Sections[0].Properties.Get(uint32(4))
fourthProperty := rawFourthProperty.(*WGProperty)
if !(fourthProperty.Key.Name == "PrivateKey" && fourthProperty.Value.Value == "1234567890") {
t.Errorf("Parse: Expected property line 4 to be correct")
}
rawFifthProperty, _ := config.Sections[0].Properties.Get(uint32(5))
fifthProperty := rawFifthProperty.(*WGProperty)
if !(fifthProperty.Key.Name == "Address" && fifthProperty.Value.Value == "10.0.0.1") {
t.Errorf("Parse: Expected property line 5 to be correct")
}
rawTenthProperty, _ := config.Sections[1].Properties.Get(uint32(10))
tenthProperty := rawTenthProperty.(*WGProperty)
if !(tenthProperty.Key.Name == "PublicKey" && tenthProperty.Value.Value == "1234567890") {
t.Errorf("Parse: Expected property line 10 to be correct")
}
}

View File

@ -0,0 +1,46 @@
package ast
import (
"config-lsp/common"
"github.com/emirpasic/gods/maps/treemap"
)
type WGPropertyKey struct {
common.LocationRange
Name string
}
type WGPropertyValue struct {
common.LocationRange
Value string
}
type WGPropertySeparator struct {
common.LocationRange
}
type WGProperty struct {
common.LocationRange
RawValue string
Key WGPropertyKey
Separator *WGPropertySeparator
Value *WGPropertyValue
}
type WGHeader struct {
common.LocationRange
Name string
}
type WGSection struct {
common.LocationRange
Header WGHeader
// [uint32]*WGProperty: line number -> *WGProperty
Properties *treemap.Map
}
type WGConfig struct {
Sections []*WGSection
// Used to identify where not to show diagnostics
CommentLines map[uint32]struct{}
}

View File

@ -0,0 +1,66 @@
package ast
import (
"slices"
)
func (c *WGConfig) FindSectionByLine(line uint32) *WGSection {
index, found := slices.BinarySearchFunc(
c.Sections,
line,
func(current *WGSection, target uint32) int {
if target < current.Start.Line {
return 1
}
if target > current.End.Line {
return -1
}
return 0
},
)
if !found {
return nil
}
return c.Sections[index]
}
func (c *WGConfig) FindPropertyByLine(line uint32) *WGProperty {
section := c.FindSectionByLine(line)
if section == nil {
return nil
}
if property, found := section.Properties.Get(line); found {
return property.(*WGProperty)
}
return nil
}
func (s *WGSection) FindFirstPropertyByName(name string) (uint32, *WGProperty) {
it := s.Properties.Iterator()
for it.Next() {
line := it.Key().(uint32)
property := it.Value().(*WGProperty)
if property.Key.Name == name {
return line, property
}
}
return 0, nil
}
func (s *WGSection) GetLastProperty() *WGProperty {
if s.Properties.Size() == 0 {
return nil
}
lastLine, _ := s.Properties.Max()
lastProperty, _ := s.Properties.Get(lastLine)
return lastProperty.(*WGProperty)
}

View File

@ -0,0 +1,19 @@
package diagnostics
import (
"config-lsp/common"
"fmt"
protocol "github.com/tliron/glsp/protocol_3_16"
)
func GenerateUnknownOption(
diagnosticRange protocol.Range,
propertyName string,
) protocol.Diagnostic {
return protocol.Diagnostic{
Range: diagnosticRange,
Message: fmt.Sprintf("Unknown property: %s", propertyName),
Severity: &common.SeverityError,
}
}

View File

@ -0,0 +1,9 @@
package fields
import "strings"
type NormalizedName string
func CreateNormalizedName(s string) NormalizedName {
return NormalizedName(strings.ToLower(s))
}

View File

@ -25,8 +25,8 @@ var maxPortValue = 65535
var minMTUValue = 68
var maxMTUValue = 1500
var InterfaceOptions = map[string]docvalues.DocumentationValue{
"Address": {
var InterfaceOptions = map[NormalizedName]docvalues.DocumentationValue{
"address": {
Documentation: `Defines what address range the local node should route traffic for. Depending on whether the node is a simple client joining the VPN subnet, or a bounce server that's relaying traffic between multiple clients, this can be set to a single IP of the node itself (specified with CIDR notation), e.g. 192.0.2.3/32), or a range of IPv4/IPv6 subnets that the node can route traffic for.
## Examples
@ -49,7 +49,7 @@ You can also specify multiple subnets or IPv6 subnets like so:
AllowRange: true,
},
},
"ListenPort": {
"listenport": {
Documentation: `When the node is acting as a public bounce server, it should hardcode a port to listen for incoming VPN connections from the public internet. Clients not acting as relays should not set this value. If not specified, chosen randomly.
## Examples
@ -66,14 +66,14 @@ Using custom WireGuard port
Max: &maxPortValue,
},
},
"PrivateKey": {
"privatekey": {
Documentation: `This is the private key for the local node, never shared with other servers. All nodes must have a private key set, regardless of whether they are public bounce servers relaying traffic, or simple clients joining the VPN.
This key can be generated with [wg genkey > example.key]
`,
Value: docvalues.StringValue{},
},
"DNS": {
"dns": {
Documentation: `The DNS server(s) to announce to VPN clients via DHCP, most clients will use this server for DNS requests over the VPN, but clients can also override this value locally on their nodes
The value can be left unconfigured to use the system's default DNS servers
@ -97,7 +97,7 @@ or multiple DNS servers can be provided
},
},
},
"Table": {
"table": {
Documentation: `Optionally defines which routing table to use for the WireGuard routes, not necessary to configure for most setups.
There are two special values: off disables the creation of routes altogether, and auto (the default) adds routes to the default table and enables special handling of default routes.
@ -127,7 +127,7 @@ https://git.zx2c4.com/WireGuard/about/src/tools/man/wg-quick.8
},
},
},
"MTU": {
"mtu": {
Documentation: `Optionally defines the maximum transmission unit (MTU, aka packet/frame size) to use when connecting to the peer, not necessary to configure for most setups.
The MTU is automatically determined from the endpoint addresses or the system default route, which is usually a sane choice.
@ -142,7 +142,7 @@ https://git.zx2c4.com/WireGuard/about/src/tools/man/wg-quick.8
Max: &maxMTUValue,
},
},
"PreUp": {
"preup": {
Documentation: `Optionally run a command before the interface is brought up. This option can be specified multiple times, with commands executed in the order they appear in the file.
## Examples
@ -152,7 +152,7 @@ Add an IP route
PreUp = ip rule add ipproto tcp dport 22 table 1234
`, Value: docvalues.StringValue{},
},
"PostUp": {
"postup": {
Documentation: `Optionally run a command after the interface is brought up. This option can appear multiple times, as with PreUp
## Examples
@ -182,7 +182,7 @@ Force WireGuard to re-resolve IP address for peer domain
`,
Value: docvalues.StringValue{},
},
"PreDown": {
"predown": {
Documentation: `Optionally run a command before the interface is brought down. This option can appear multiple times, as with PreUp
## Examples
@ -196,7 +196,7 @@ Hit a webhook on another server
`,
Value: docvalues.StringValue{},
},
"PostDown": {
"postdown": {
Documentation: `Optionally run a command after the interface is brought down. This option can appear multiple times, as with PreUp
## Examples
@ -215,21 +215,21 @@ Remove the iptables rule that forwards packets on the WireGuard interface
`,
Value: docvalues.StringValue{},
},
"FwMark": {
"fwmark": {
Documentation: "a 32-bit fwmark for outgoing packets. If set to 0 or \"off\", this option is disabled. May be specified in hexadecimal by prepending \"0x\". Optional",
Value: docvalues.StringValue{},
},
}
var InterfaceAllowedDuplicateFields = map[string]struct{}{
"PreUp": {},
"PostUp": {},
"PreDown": {},
"PostDown": {},
var InterfaceAllowedDuplicateFields = map[NormalizedName]struct{}{
"preup": {},
"postup": {},
"predown": {},
"postdown": {},
}
var PeerOptions = map[string]docvalues.DocumentationValue{
"Endpoint": {
var PeerOptions = map[NormalizedName]docvalues.DocumentationValue{
"endpoint": {
Documentation: `Defines the publicly accessible address for a remote peer. This should be left out for peers behind a NAT or peers that don't have a stable publicly accessible IP:PORT pair. Typically, this only needs to be defined on the main bounce server, but it can also be defined on other public nodes with stable IPs like public-server2 in the example config below.
## Examples
@ -243,7 +243,7 @@ Endpoint is a hostname/FQDN
`,
Value: docvalues.StringValue{},
},
"AllowedIPs": {
"allowedips": {
Documentation: `This defines the IP ranges for which a peer will route traffic. On simple clients, this is usually a single address (the VPN address of the simple client itself). For bounce servers this will be a range of the IPs or subnets that the relay server is capable of routing traffic for. Multiple IPs and subnets may be specified using comma-separated IPv4 or IPv6 CIDR notation (from a single /32 or /128 address, all the way up to 0.0.0.0/0 and ::/0 to indicate a default route to send all internet and VPN traffic through that peer). This option may be specified multiple times.
When deciding how to route a packet, the system chooses the most specific route first, and falls back to broader routes. So for a packet destined to 192.0.2.3, the system would first look for a peer advertising 192.0.2.3/32 specifically, and would fall back to a peer advertising 192.0.2.1/24 or a larger range like 0.0.0.0/0 as a last resort.
@ -280,7 +280,7 @@ Peer is a relay server that routes to itself and all nodes on its local LAN
},
},
},
"PublicKey": {
"publickey": {
Documentation: `This is the public key for the remote node, shareable with all peers. All nodes must have a public key set, regardless of whether they are public bounce servers relaying traffic, or simple clients joining the VPN.
This key can be generated with wg pubkey < example.key > example.key.pub. (see above for how to generate the private key example.key)
@ -291,7 +291,7 @@ This key can be generated with wg pubkey < example.key > example.key.pub. (see a
`,
Value: docvalues.StringValue{},
},
"PersistentKeepalive": {
"persistentkeepalive": {
Documentation: `If the connection is going from a NAT-ed peer to a public peer, the node behind the NAT must regularly send an outgoing ping in order to keep the bidirectional connection alive in the NAT router's connection table.
## Examples
@ -310,17 +310,17 @@ Oocal NAT-ed node to remote public node
`,
Value: docvalues.PositiveNumberValue(),
},
"PresharedKey": {
"presharedkey": {
Documentation: "Optionally defines a pre-shared key for the peer, used to authenticate the connection. This is not necessary, but strongly recommended for security.",
Value: docvalues.StringValue{},
},
}
var PeerAllowedDuplicateFields = map[string]struct{}{
"AllowedIPs": {},
var PeerAllowedDuplicateFields = map[NormalizedName]struct{}{
"allowedips": {},
}
var OptionsHeaderMap = map[string](map[string]docvalues.DocumentationValue){
"Interface": InterfaceOptions,
"Peer": PeerOptions,
var OptionsHeaderMap = map[NormalizedName](map[NormalizedName]docvalues.DocumentationValue){
"interface": InterfaceOptions,
"peer": PeerOptions,
}

View File

@ -0,0 +1,23 @@
package fields
var AllOptionsFormatted = map[NormalizedName]string{
// Interface
"address": "Address",
"listenport": "ListenPort",
"privatekey": "PrivateKey",
"dns": "DNS",
"table": "Table",
"mtu": "MTU",
"preup": "PreUp",
"postup": "PostUp",
"predown": "Predown",
"postdown": "PostDown",
"fwmark": "FwMark",
// Peer Options
"endpoint": "Endpoint",
"allowedips": "AllowedIPs",
"publickey": "PublicKey",
"persistentkeepalive": "PersistentKeepalive",
"presharedkey": "PresharedKey",
}

View File

@ -1,452 +0,0 @@
package handlers
import (
docvalues "config-lsp/doc-values"
"config-lsp/handlers/wireguard/fields"
"config-lsp/handlers/wireguard/parser"
"config-lsp/utils"
"context"
"fmt"
"net/netip"
"slices"
"strings"
protocol "github.com/tliron/glsp/protocol_3_16"
)
func Analyze(
p parser.WireguardParser,
) []protocol.Diagnostic {
sectionsErrors := analyzeSections(p.Sections)
sectionsErrors = append(sectionsErrors, analyzeOnlyOneInterfaceSectionSpecified(p)...)
if len(sectionsErrors) > 0 {
return sectionsErrors
}
validCheckErrors := checkIfValuesAreValid(p.Sections)
if len(validCheckErrors) > 0 {
return validCheckErrors
}
diagnostics := make([]protocol.Diagnostic, 0)
diagnostics = append(diagnostics, analyzeParserForDuplicateProperties(p)...)
diagnostics = append(diagnostics, analyzeDNSContainsFallback(p)...)
diagnostics = append(diagnostics, analyzeKeepAliveIsSet(p)...)
diagnostics = append(diagnostics, analyzeSymmetricPropertiesExist(p)...)
diagnostics = append(diagnostics, analyzeDuplicateAllowedIPs(p)...)
return diagnostics
}
func analyzeSections(
sections []*parser.WireguardSection,
) []protocol.Diagnostic {
var diagnostics []protocol.Diagnostic
for _, section := range sections {
sectionDiagnostics := analyzeSection(*section)
if len(sectionDiagnostics) > 0 {
diagnostics = append(diagnostics, sectionDiagnostics...)
}
}
if len(diagnostics) > 0 {
return diagnostics
}
return diagnostics
}
func analyzeOnlyOneInterfaceSectionSpecified(
p parser.WireguardParser,
) []protocol.Diagnostic {
var diagnostics []protocol.Diagnostic
alreadyFound := false
for _, section := range p.GetSectionsByName("Interface") {
if alreadyFound {
severity := protocol.DiagnosticSeverityError
diagnostics = append(diagnostics, protocol.Diagnostic{
Message: "Only one [Interface] section is allowed",
Severity: &severity,
Range: section.GetHeaderLineRange(),
})
}
alreadyFound = true
}
return diagnostics
}
func analyzeDNSContainsFallback(
p parser.WireguardParser,
) []protocol.Diagnostic {
lineNumber, property := p.FindFirstPropertyByName("DNS")
if property == nil {
return []protocol.Diagnostic{}
}
dnsAmount := len(strings.Split(property.Value.Value, ","))
if dnsAmount == 1 {
severity := protocol.DiagnosticSeverityHint
return []protocol.Diagnostic{
{
Message: "There is only one DNS server specified. It is recommended to set up fallback DNS servers",
Severity: &severity,
Range: protocol.Range{
Start: protocol.Position{
Line: *lineNumber,
Character: property.Value.Location.Start,
},
End: protocol.Position{
Line: *lineNumber,
Character: property.Value.Location.End,
},
},
},
}
}
return []protocol.Diagnostic{}
}
func analyzeKeepAliveIsSet(
p parser.WireguardParser,
) []protocol.Diagnostic {
var diagnostics []protocol.Diagnostic
for _, section := range p.GetSectionsByName("Peer") {
// If an endpoint is set, then we should only check for the keepalive property
if section.ExistsProperty("Endpoint") && !section.ExistsProperty("PersistentKeepalive") {
severity := protocol.DiagnosticSeverityHint
diagnostics = append(diagnostics, protocol.Diagnostic{
Message: "PersistentKeepalive is not set. It is recommended to set this property, as it helps to maintain the connection when users are behind NAT",
Severity: &severity,
Range: section.GetRange(),
})
}
}
return diagnostics
}
// Check if the values are valid.
// Assumes that sections have been analyzed already.
func checkIfValuesAreValid(
sections []*parser.WireguardSection,
) []protocol.Diagnostic {
var diagnostics []protocol.Diagnostic
for _, section := range sections {
for lineNumber, property := range section.Properties {
diagnostics = append(
diagnostics,
analyzeProperty(property, section, lineNumber)...,
)
}
}
return diagnostics
}
func analyzeSection(
s parser.WireguardSection,
) []protocol.Diagnostic {
var diagnostics []protocol.Diagnostic
if s.Name == nil {
// No section name
severity := protocol.DiagnosticSeverityError
diagnostics = append(diagnostics, protocol.Diagnostic{
Message: "This section is missing a name",
Severity: &severity,
Range: s.GetRange(),
})
return diagnostics
}
if _, found := fields.OptionsHeaderMap[*s.Name]; !found {
// Unknown section
severity := protocol.DiagnosticSeverityError
diagnostics = append(diagnostics, protocol.Diagnostic{
Message: fmt.Sprintf("Unknown section '%s'. It must be one of: [Interface], [Peer]", *s.Name),
Severity: &severity,
Range: s.GetHeaderLineRange(),
})
return diagnostics
}
return diagnostics
}
// Check if the property is valid.
// Returns a list of diagnostics.
// `belongingSection` is the section to which the property belongs. This value is
// expected to be non-nil and expected to be a valid Wireguard section.
func analyzeProperty(
p parser.WireguardProperty,
belongingSection *parser.WireguardSection,
propertyLine uint32,
) []protocol.Diagnostic {
sectionOptions := fields.OptionsHeaderMap[*belongingSection.Name]
option, found := sectionOptions[p.Key.Name]
if !found {
// Unknown property
severity := protocol.DiagnosticSeverityError
return []protocol.Diagnostic{
{
Message: fmt.Sprintf("Unknown property '%s'", p.Key.Name),
Severity: &severity,
Range: protocol.Range{
Start: protocol.Position{
Line: propertyLine,
Character: p.Key.Location.Start,
},
End: protocol.Position{
Line: propertyLine,
Character: p.Key.Location.End,
},
},
},
}
}
if p.Value == nil {
// No value to check
severity := protocol.DiagnosticSeverityWarning
return []protocol.Diagnostic{
{
Message: "Property is missing a value",
Severity: &severity,
Range: p.GetLineRange(propertyLine),
},
}
}
errors := option.DeprecatedCheckIsValid(p.Value.Value)
return utils.Map(errors, func(err *docvalues.InvalidValue) protocol.Diagnostic {
severity := protocol.DiagnosticSeverityError
return protocol.Diagnostic{
Message: err.GetMessage(),
Severity: &severity,
Range: protocol.Range{
Start: protocol.Position{
Line: propertyLine,
Character: p.Value.Location.Start + err.Start,
},
End: protocol.Position{
Line: propertyLine,
Character: p.Value.Location.Start + err.End,
},
},
}
})
}
func analyzeParserForDuplicateProperties(
p parser.WireguardParser,
) []protocol.Diagnostic {
diagnostics := make([]protocol.Diagnostic, 0)
for _, section := range p.Sections {
diagnostics = append(diagnostics, analyzeDuplicateProperties(*section)...)
}
return diagnostics
}
func analyzeDuplicateProperties(
s parser.WireguardSection,
) []protocol.Diagnostic {
var diagnostics []protocol.Diagnostic
existingProperties := make(map[string]uint32)
lines := utils.KeysOfMap(s.Properties)
slices.Sort(lines)
for _, currentLineNumber := range lines {
property := s.Properties[currentLineNumber]
var skipCheck = false
if s.Name != nil {
switch *s.Name {
case "Interface":
if _, found := fields.InterfaceAllowedDuplicateFields[property.Key.Name]; found {
skipCheck = true
}
case "Peer":
if _, found := fields.PeerAllowedDuplicateFields[property.Key.Name]; found {
skipCheck = true
}
}
}
if skipCheck {
continue
}
if existingLineNumber, found := existingProperties[property.Key.Name]; found {
severity := protocol.DiagnosticSeverityError
diagnostics = append(diagnostics, protocol.Diagnostic{
Message: fmt.Sprintf("Property '%s' is already defined on line %d", property.Key.Name, existingLineNumber+1),
Severity: &severity,
Range: protocol.Range{
Start: protocol.Position{
Line: currentLineNumber,
Character: 0,
},
End: protocol.Position{
Line: currentLineNumber,
Character: 99999,
},
},
})
} else {
existingProperties[property.Key.Name] = currentLineNumber
}
}
return diagnostics
}
type propertyWithLine struct {
Line uint32
Property parser.WireguardProperty
IpPrefix netip.Prefix
}
func mapAllowedIPsToMasks(p parser.WireguardParser) map[uint8][]propertyWithLine {
ips := make(map[uint8][]propertyWithLine)
for _, section := range p.GetSectionsByName("Peer") {
for lineNumber, property := range section.Properties {
if property.Key.Name == "AllowedIPs" {
ipAddress, err := netip.ParsePrefix(property.Value.Value)
if err != nil {
// This should not happen...
continue
}
hostBits := uint8(ipAddress.Bits())
if _, found := ips[uint8(hostBits)]; !found {
ips[hostBits] = make([]propertyWithLine, 0)
}
ips[hostBits] = append(ips[hostBits], propertyWithLine{
Line: uint32(lineNumber),
Property: property,
IpPrefix: ipAddress,
})
}
}
}
return ips
}
// Strategy
// Simply compare the host bits of the IP addresses.
// Use a binary tree to store the host bits.
func analyzeDuplicateAllowedIPs(p parser.WireguardParser) []protocol.Diagnostic {
diagnostics := make([]protocol.Diagnostic, 0)
maskedIPs := mapAllowedIPsToMasks(p)
hostBits := utils.KeysOfMap(maskedIPs)
slices.Sort(hostBits)
ipHostSet := utils.CreateIPv4HostSet()
for _, hostBit := range hostBits {
ips := maskedIPs[hostBit]
for _, ipInfo := range ips {
if ctx, _ := ipHostSet.ContainsIP(ipInfo.IpPrefix); ctx != nil {
severity := protocol.DiagnosticSeverityError
definedLine := (*ctx).Value("line").(uint32)
diagnostics = append(diagnostics, protocol.Diagnostic{
Message: fmt.Sprintf("This IP range is already covered on line %d", definedLine),
Severity: &severity,
Range: protocol.Range{
Start: protocol.Position{
Line: ipInfo.Line,
Character: ipInfo.Property.Key.Location.Start,
},
End: protocol.Position{
Line: ipInfo.Line,
Character: ipInfo.Property.Value.Location.End,
},
},
})
} else {
humanLineNumber := ipInfo.Line + 1
ctx := context.WithValue(context.Background(), "line", humanLineNumber)
ipHostSet.AddIP(
ipInfo.IpPrefix,
ctx,
)
}
}
}
return diagnostics
}
func analyzeSymmetricPropertiesExist(
p parser.WireguardParser,
) []protocol.Diagnostic {
diagnostics := make([]protocol.Diagnostic, 0, 4)
severity := protocol.DiagnosticSeverityHint
for _, section := range p.GetSectionsByName("Interface") {
preUpLine, preUpProperty := section.FetchFirstProperty("PreUp")
preDownLine, preDownProperty := section.FetchFirstProperty("PreDown")
postUpLine, postUpProperty := section.FetchFirstProperty("PostUp")
postDownLine, postDownProperty := section.FetchFirstProperty("PostDown")
if preUpProperty != nil && preDownProperty == nil {
diagnostics = append(diagnostics, protocol.Diagnostic{
Message: "PreUp is set, but PreDown is not. It is recommended to set both properties symmetrically",
Range: preUpProperty.GetLineRange(*preUpLine),
Severity: &severity,
})
} else if preUpProperty == nil && preDownProperty != nil {
diagnostics = append(diagnostics, protocol.Diagnostic{
Message: "PreDown is set, but PreUp is not. It is recommended to set both properties symmetrically",
Range: preDownProperty.GetLineRange(*preDownLine),
Severity: &severity,
})
}
if postUpProperty != nil && postDownProperty == nil {
diagnostics = append(diagnostics, protocol.Diagnostic{
Message: "PostUp is set, but PostDown is not. It is recommended to set both properties symmetrically",
Range: postUpProperty.GetLineRange(*postUpLine),
Severity: &severity,
})
} else if postUpProperty == nil && postDownProperty != nil {
diagnostics = append(diagnostics, protocol.Diagnostic{
Message: "PostDown is set, but PostUp is not. It is recommended to set both properties symmetrically",
Range: postDownProperty.GetLineRange(*postDownLine),
Severity: &severity,
})
}
}
return diagnostics
}

View File

@ -1,8 +1,8 @@
package handlers
import (
wgcommands "config-lsp/handlers/wireguard/commands"
"config-lsp/handlers/wireguard/parser"
"config-lsp/handlers/wireguard/ast"
protocol "github.com/tliron/glsp/protocol_3_16"
)
@ -11,133 +11,11 @@ type CodeActionName string
const (
CodeActionGeneratePrivateKey CodeActionName = "generatePrivateKey"
CodeActionGeneratePresharedKey CodeActionName = "generatePresharedKey"
CodeActionAddKeepalive CodeActionName = "addKeepalive"
CodeActionCreatePeer CodeActionName = "createPeer"
)
type CodeAction interface {
RunCommand(*parser.WireguardParser) (*protocol.ApplyWorkspaceEditParams, error)
RunCommand(*ast.WGConfig) (*protocol.ApplyWorkspaceEditParams, error)
}
type CodeActionArgs interface{}
type CodeActionGeneratePrivateKeyArgs struct {
URI protocol.DocumentUri
Line uint32
}
func CodeActionGeneratePrivateKeyArgsFromArguments(arguments map[string]any) CodeActionGeneratePrivateKeyArgs {
return CodeActionGeneratePrivateKeyArgs{
URI: arguments["URI"].(protocol.DocumentUri),
Line: uint32(arguments["Line"].(float64)),
}
}
func (args CodeActionGeneratePrivateKeyArgs) RunCommand(p *parser.WireguardParser) (*protocol.ApplyWorkspaceEditParams, error) {
privateKey, err := wgcommands.CreateNewPrivateKey()
if err != nil {
return &protocol.ApplyWorkspaceEditParams{}, err
}
section, property := p.GetPropertyByLine(args.Line)
if section == nil || property == nil {
return nil, nil
}
label := "Generate Private Key"
return &protocol.ApplyWorkspaceEditParams{
Label: &label,
Edit: protocol.WorkspaceEdit{
Changes: map[protocol.DocumentUri][]protocol.TextEdit{
args.URI: {
{
NewText: " " + privateKey,
Range: property.GetInsertRange(args.Line),
},
},
},
},
}, nil
}
type CodeActionGeneratePresharedKeyArgs struct {
URI protocol.DocumentUri
Line uint32
}
func CodeActionGeneratePresharedKeyArgsFromArguments(arguments map[string]any) CodeActionGeneratePresharedKeyArgs {
return CodeActionGeneratePresharedKeyArgs{
URI: arguments["URI"].(protocol.DocumentUri),
Line: uint32(arguments["Line"].(float64)),
}
}
func (args CodeActionGeneratePresharedKeyArgs) RunCommand(p *parser.WireguardParser) (*protocol.ApplyWorkspaceEditParams, error) {
presharedKey, err := wgcommands.CreatePresharedKey()
if err != nil {
return &protocol.ApplyWorkspaceEditParams{}, err
}
section, property := p.GetPropertyByLine(args.Line)
if section == nil || property == nil {
return nil, nil
}
label := "Generate Preshared Key"
return &protocol.ApplyWorkspaceEditParams{
Label: &label,
Edit: protocol.WorkspaceEdit{
Changes: map[protocol.DocumentUri][]protocol.TextEdit{
args.URI: {
{
NewText: " " + presharedKey,
Range: property.GetInsertRange(args.Line),
},
},
},
},
}, nil
}
type CodeActionAddKeepaliveArgs struct {
URI protocol.DocumentUri
SectionIndex uint32
}
func CodeActionAddKeepaliveArgsFromArguments(arguments map[string]any) CodeActionAddKeepaliveArgs {
return CodeActionAddKeepaliveArgs{
URI: arguments["URI"].(protocol.DocumentUri),
SectionIndex: uint32(arguments["SectionIndex"].(float64)),
}
}
func (args CodeActionAddKeepaliveArgs) RunCommand(p *parser.WireguardParser) (*protocol.ApplyWorkspaceEditParams, error) {
section := p.Sections[args.SectionIndex]
label := "Add PersistentKeepalive"
return &protocol.ApplyWorkspaceEditParams{
Label: &label,
Edit: protocol.WorkspaceEdit{
Changes: map[protocol.DocumentUri][]protocol.TextEdit{
args.URI: {
{
NewText: "PersistentKeepalive = 25\n",
Range: protocol.Range{
Start: protocol.Position{
Line: section.EndLine + 1,
Character: 0,
},
End: protocol.Position{
Line: section.EndLine + 1,
Character: 0,
},
},
},
},
},
},
}, nil
}

View File

@ -0,0 +1,191 @@
package handlers
import (
"config-lsp/common"
"config-lsp/handlers/wireguard"
"config-lsp/handlers/wireguard/ast"
wgcommands "config-lsp/handlers/wireguard/commands"
"fmt"
"net"
protocol "github.com/tliron/glsp/protocol_3_16"
)
type CodeActionCreatePeerArgs struct {
URI protocol.DocumentUri
Line uint32
}
func CodeActionCreatePeerArgsFromArguments(arguments map[string]any) CodeActionCreatePeerArgs {
return CodeActionCreatePeerArgs{
URI: arguments["URI"].(protocol.DocumentUri),
Line: uint32(arguments["Line"].(float64)),
}
}
func (args CodeActionCreatePeerArgs) RunCommand(d *wireguard.WGDocument) (*protocol.ApplyWorkspaceEditParams, error) {
interfaceSection := d.Indexes.SectionsByName["Interface"][0]
section := d.Config.FindSectionByLine(args.Line)
label := fmt.Sprintf("Add Peer based on peer on line %d", args.Line)
newSection := section
// IP Address
ipAddressLine, ipAddress := newSection.FindFirstPropertyByName("AllowedIPs")
_, address := interfaceSection.FindFirstPropertyByName("Address")
if ipAddress != nil && address != nil {
_, network, err := net.ParseCIDR(address.Value.Value)
if err == nil {
newIPAddress := createNewIP(*network, ipAddress.Value.Value)
valueEnd := common.Location{
Line: ipAddress.End.Line,
Character: ipAddress.Value.Start.Character + uint32(len(newIPAddress)) + 1,
}
newSection.Properties.Put(
ipAddressLine,
&ast.WGProperty{
LocationRange: common.LocationRange{
Start: ipAddress.Start,
End: valueEnd,
},
Key: ipAddress.Key,
RawValue: newIPAddress,
Separator: address.Separator,
Value: &ast.WGPropertyValue{
LocationRange: common.LocationRange{
Start: ipAddress.Value.Start,
End: valueEnd,
},
Value: newIPAddress,
},
},
)
}
}
// Preshared Key
presharedKeyLine, presharedKey := newSection.FindFirstPropertyByName("PresharedKey")
if presharedKey != nil {
var newKey string
if wgcommands.AreWireguardToolsAvailable() {
createdKey, err := wgcommands.CreatePresharedKey()
if err == nil {
newKey = createdKey
}
} else {
newKey = "[preshared key]"
}
valueEnd := common.Location{
Line: presharedKey.End.Line,
Character: presharedKey.Value.Start.Character + uint32(len(newKey)) + 1,
}
newSection.Properties.Put(
presharedKeyLine,
&ast.WGProperty{
LocationRange: common.LocationRange{
Start: presharedKey.Start,
End: valueEnd,
},
Key: presharedKey.Key,
RawValue: newKey,
Separator: presharedKey.Separator,
Value: &ast.WGPropertyValue{
LocationRange: common.LocationRange{
Start: presharedKey.Value.Start,
End: valueEnd,
},
Value: newKey,
},
},
)
}
lastProperty := newSection.GetLastProperty()
println("last line")
println(lastProperty.End.Line)
println(fmt.Sprintf("~~~%s~~~", writeSection(*newSection)))
newText := writeSection(*newSection)
return &protocol.ApplyWorkspaceEditParams{
Label: &label,
Edit: protocol.WorkspaceEdit{
Changes: map[protocol.DocumentUri][]protocol.TextEdit{
args.URI: {
{
Range: protocol.Range{
Start: protocol.Position{
Line: lastProperty.End.Line,
Character: lastProperty.End.Character,
},
End: protocol.Position{
Line: lastProperty.End.Line,
Character: lastProperty.End.Character,
},
},
NewText: newText,
},
},
},
},
}, nil
}
func writeSection(section ast.WGSection) string {
text := "\n\n"
text += fmt.Sprintf("[%s]\n", section.Header.Name)
it := section.Properties.Iterator()
for it.Next() {
property := it.Value().(*ast.WGProperty)
text += fmt.Sprintf("%s = %s\n", property.Key.Name, property.Value.Value)
}
return text
}
// Try incrementing the IP address
func createNewIP(
network net.IPNet,
rawIP string,
) string {
parsedIP, _, err := net.ParseCIDR(rawIP)
parsedIP = parsedIP.To4()
if parsedIP == nil {
// IPv6 is not supported
return ""
}
if err != nil {
return ""
}
lastAddress := uint32(network.IP[0])<<24 | uint32(network.IP[1])<<16 | uint32(network.IP[2])<<8 | uint32(network.IP[3])
networkMask, _ := network.Mask.Size()
for index := range 32 - networkMask {
lastAddress |= 1 << index
}
newIP := uint32(parsedIP[0])<<24 | uint32(parsedIP[1])<<16 | uint32(parsedIP[2])<<8 | uint32(parsedIP[3])
newIP += 1
if newIP >= lastAddress || newIP == 0 {
// The IP is the last one, which can't be used
// or even worse, it did a whole overflow
return ""
}
// Here, we successfully incremented the IP correctly
// Let's return the formatted IP now.
return fmt.Sprintf("%d.%d.%d.%d/32", newIP>>24, newIP>>16&0xFF, newIP>>8&0xFF, newIP&0xFF)
}

View File

@ -0,0 +1,42 @@
package handlers
import (
"net"
"testing"
)
func TestCreateNewIPSimple24Mask(t *testing.T) {
_, network, _ := net.ParseCIDR("10.0.0.0/24")
newIP := createNewIP(*network, "10.0.0.1/32")
if newIP != "10.0.0.2/32" {
t.Errorf("Expected 10.0.0.2/32, got %s", newIP)
}
}
func TestCreateNewIPDoesNotWorkWithLast24Mask(t *testing.T) {
_, network, _ := net.ParseCIDR("10.0.0.0/24")
newIP := createNewIP(*network, "10.0.0.254/32")
if newIP != "" {
t.Errorf("Expected empty string, got %s", newIP)
}
}
func TestCreateNewIPDoesNotWorkWithLast24Mask2(t *testing.T) {
_, network, _ := net.ParseCIDR("10.0.0.0/24")
newIP := createNewIP(*network, "10.0.0.255/32")
if newIP != "" {
t.Errorf("Expected empty string, got %s", newIP)
}
}
func TestCreateNewIPComplex20Mask(t *testing.T) {
_, network, _ := net.ParseCIDR("10.0.0.0/20")
newIP := createNewIP(*network, "10.0.0.255/32")
if newIP != "10.0.1.0/32" {
t.Errorf("Expected 10.0.1.0/32, got %s", newIP)
}
}

View File

@ -0,0 +1,59 @@
package handlers
import (
"config-lsp/handlers/wireguard"
wgcommands "config-lsp/handlers/wireguard/commands"
protocol "github.com/tliron/glsp/protocol_3_16"
)
type CodeActionGeneratePrivateKeyArgs struct {
URI protocol.DocumentUri
Line uint32
}
func CodeActionGeneratePrivateKeyArgsFromArguments(arguments map[string]any) CodeActionGeneratePrivateKeyArgs {
return CodeActionGeneratePrivateKeyArgs{
URI: arguments["URI"].(protocol.DocumentUri),
Line: uint32(arguments["Line"].(float64)),
}
}
func (args CodeActionGeneratePrivateKeyArgs) RunCommand(d *wireguard.WGDocument) (*protocol.ApplyWorkspaceEditParams, error) {
privateKey, err := wgcommands.CreateNewPrivateKey()
if err != nil {
return &protocol.ApplyWorkspaceEditParams{}, err
}
section := d.Config.FindSectionByLine(args.Line)
property := d.Config.FindPropertyByLine(args.Line)
if section == nil || property == nil {
return nil, nil
}
label := "Generate Private Key"
return &protocol.ApplyWorkspaceEditParams{
Label: &label,
Edit: protocol.WorkspaceEdit{
Changes: map[protocol.DocumentUri][]protocol.TextEdit{
args.URI: {
{
NewText: " " + privateKey,
Range: protocol.Range{
Start: protocol.Position{
Line: property.End.Line,
Character: property.End.Character,
},
End: protocol.Position{
Line: property.End.Line,
Character: property.End.Character,
},
},
},
},
},
},
}, nil
}

View File

@ -0,0 +1,50 @@
package handlers
import (
"config-lsp/handlers/wireguard"
wgcommands "config-lsp/handlers/wireguard/commands"
protocol "github.com/tliron/glsp/protocol_3_16"
)
type CodeActionGeneratePresharedKeyArgs struct {
URI protocol.DocumentUri
Line uint32
}
func CodeActionGeneratePresharedKeyArgsFromArguments(arguments map[string]any) CodeActionGeneratePresharedKeyArgs {
return CodeActionGeneratePresharedKeyArgs{
URI: arguments["URI"].(protocol.DocumentUri),
Line: uint32(arguments["Line"].(float64)),
}
}
func (args CodeActionGeneratePresharedKeyArgs) RunCommand(d *wireguard.WGDocument) (*protocol.ApplyWorkspaceEditParams, error) {
presharedKey, err := wgcommands.CreatePresharedKey()
if err != nil {
return &protocol.ApplyWorkspaceEditParams{}, err
}
section := d.Config.FindSectionByLine(args.Line)
property := d.Config.FindPropertyByLine(args.Line)
if section == nil || property == nil {
return nil, nil
}
label := "Generate Preshared Key"
return &protocol.ApplyWorkspaceEditParams{
Label: &label,
Edit: protocol.WorkspaceEdit{
Changes: map[protocol.DocumentUri][]protocol.TextEdit{
args.URI: {
{
NewText: presharedKey,
Range: property.Value.ToLSPRange(),
},
},
},
},
}, nil
}

Some files were not shown because too many files have changed in this diff Show More