mirror of
https://github.com/Myzel394/config-lsp.git
synced 2025-06-18 23:15:26 +02:00
Compare commits
28 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
66c93938fc | ||
e187040dd0 | |||
ac97ec77ef | |||
a2decaeff3 | |||
d5ec3694db | |||
5c6ca95912 | |||
6fe41b5040 | |||
0dda74c8cb | |||
d10655996f | |||
c578b4b448 | |||
3c9ee9da53 | |||
429c2cd4be | |||
9a1686a7d8 | |||
fa45351ec5 | |||
b520ada4ed | |||
![]() |
9b306f339e | ||
e1140ae757 | |||
9f22689cac | |||
e4d7521a4c | |||
c5fefad56d | |||
ef625f9bf6 | |||
db4e1bae4c | |||
5de2711b03 | |||
5e535741d2 | |||
ce7264aded | |||
e69edeaece | |||
15ce5958da | |||
3857bd5694 |
9
.github/workflows/pr-tests.yaml
vendored
9
.github/workflows/pr-tests.yaml
vendored
@ -5,9 +5,6 @@ on: [pull_request]
|
|||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
go-version: [ '1.22.x' ]
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
@ -16,6 +13,12 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
github_access_token: ${{ secrets.GITHUB_TOKEN }}
|
github_access_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Check if project can be linted
|
||||||
|
run: nix develop --command bash -c "just lint" && git diff --exit-code
|
||||||
|
|
||||||
|
- name: Check if antlr parsers are up to date
|
||||||
|
run: nix develop --command bash -c "just update-antlr-parsers" && git diff --exit-code
|
||||||
|
|
||||||
- name: Check Nix flake
|
- name: Check Nix flake
|
||||||
run: nix flake check
|
run: nix flake check
|
||||||
|
|
||||||
|
12
flake.lock
generated
12
flake.lock
generated
@ -26,11 +26,11 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1741396135,
|
"lastModified": 1742209644,
|
||||||
"narHash": "sha256-wqmdLr7h4Bk8gyKutgaApJKOM8JVvywI5P48NuqJ9Jg=",
|
"narHash": "sha256-jMy1XqXqD0/tJprEbUmKilTkvbDY/C0ZGSsJJH4TNCE=",
|
||||||
"owner": "tweag",
|
"owner": "tweag",
|
||||||
"repo": "gomod2nix",
|
"repo": "gomod2nix",
|
||||||
"rev": "0983848bf2a7ccbfe24d874065adb8fd0f23729b",
|
"rev": "8f3534eb8f6c5c3fce799376dc3b91bae6b11884",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@ -41,11 +41,11 @@
|
|||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1741513245,
|
"lastModified": 1742669843,
|
||||||
"narHash": "sha256-7rTAMNTY1xoBwz0h7ZMtEcd8LELk9R5TzBPoHuhNSCk=",
|
"narHash": "sha256-G5n+FOXLXcRx+3hCJ6Rt6ZQyF1zqQ0DL0sWAMn2Nk0w=",
|
||||||
"owner": "nixos",
|
"owner": "nixos",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "e3e32b642a31e6714ec1b712de8c91a3352ce7e1",
|
"rev": "1e5b653dff12029333a6546c11e108ede13052eb",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
48
flake.nix
48
flake.nix
@ -3,12 +3,12 @@
|
|||||||
|
|
||||||
inputs = {
|
inputs = {
|
||||||
nixpkgs.url = "github:nixos/nixpkgs?ref=nixos-unstable";
|
nixpkgs.url = "github:nixos/nixpkgs?ref=nixos-unstable";
|
||||||
|
utils.url = "github:numtide/flake-utils";
|
||||||
gomod2nix = {
|
gomod2nix = {
|
||||||
url = "github:tweag/gomod2nix";
|
url = "github:tweag/gomod2nix";
|
||||||
inputs.nixpkgs.follows = "nixpkgs";
|
inputs.nixpkgs.follows = "nixpkgs";
|
||||||
inputs.utils.follows = "utils";
|
inputs.utils.follows = "utils";
|
||||||
};
|
};
|
||||||
utils.url = "github:numtide/flake-utils";
|
|
||||||
};
|
};
|
||||||
|
|
||||||
outputs = { self, nixpkgs, utils, gomod2nix }:
|
outputs = { self, nixpkgs, utils, gomod2nix }:
|
||||||
@ -23,26 +23,27 @@
|
|||||||
"aarch64-windows"
|
"aarch64-windows"
|
||||||
] (system:
|
] (system:
|
||||||
let
|
let
|
||||||
version = "0.2.0"; # CI:CD-VERSION
|
version = "0.2.2"; # CI:CD-VERSION
|
||||||
pkgs = import nixpkgs {
|
pkgs = import nixpkgs {
|
||||||
inherit system;
|
inherit system;
|
||||||
overlays = [
|
overlays = [
|
||||||
(final: prev: {
|
(final: prev: {
|
||||||
go = prev.go_1_22;
|
go = prev.go_1_24;
|
||||||
buildGoModule = prev.buildGo122Module;
|
buildGoModule = prev.buildGo124Module;
|
||||||
})
|
})
|
||||||
gomod2nix.overlays.default
|
gomod2nix.overlays.default
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
inputs = [
|
inputs = [
|
||||||
pkgs.go_1_22
|
pkgs.go_1_24
|
||||||
];
|
];
|
||||||
serverUncompressed = pkgs.buildGoModule {
|
serverUncompressed = pkgs.buildGoModule {
|
||||||
nativeBuildInputs = inputs;
|
nativeBuildInputs = inputs;
|
||||||
pname = "github.com/Myzel394/config-lsp";
|
pname = "github.com/Myzel394/config-lsp";
|
||||||
version = version;
|
version = version;
|
||||||
src = ./server;
|
src = ./server;
|
||||||
vendorHash = "sha256-ttr45N8i86mSJX9Scy/Cf+YlxU5wAKMVb0YhKg28JKM=";
|
vendorHash = "sha256-0/oMmrdQGnx7opL4SYaYU2FdroKkF60FtRTvZ1dYr/Y";
|
||||||
|
proxyVendor = true;
|
||||||
ldflags = [ "-s" "-w" ];
|
ldflags = [ "-s" "-w" ];
|
||||||
checkPhase = ''
|
checkPhase = ''
|
||||||
go test -v $(pwd)/...
|
go test -v $(pwd)/...
|
||||||
@ -68,6 +69,7 @@
|
|||||||
in {
|
in {
|
||||||
packages = {
|
packages = {
|
||||||
default = server;
|
default = server;
|
||||||
|
"server-uncompressed" = serverUncompressed;
|
||||||
"vs-code-extension-bare" = let
|
"vs-code-extension-bare" = let
|
||||||
name = "config-lsp";
|
name = "config-lsp";
|
||||||
node-modules = pkgs.mkYarnPackage {
|
node-modules = pkgs.mkYarnPackage {
|
||||||
@ -131,21 +133,27 @@
|
|||||||
};
|
};
|
||||||
in node-modules;
|
in node-modules;
|
||||||
};
|
};
|
||||||
devShells.default = pkgs.mkShell {
|
|
||||||
buildInputs = inputs ++ (with pkgs; [
|
devShells.default = let
|
||||||
mailutils
|
ourGopls = pkgs.gopls;
|
||||||
wireguard-tools
|
in
|
||||||
antlr
|
pkgs.mkShell {
|
||||||
just
|
buildInputs = inputs ++ (with pkgs; [
|
||||||
]) ++ (if pkgs.stdenv.isLinux then with pkgs; [
|
mailutils
|
||||||
postfix
|
wireguard-tools
|
||||||
] else []);
|
antlr
|
||||||
};
|
just
|
||||||
|
ourGopls
|
||||||
|
]) ++ (if pkgs.stdenv.isLinux then with pkgs; [
|
||||||
|
postfix
|
||||||
|
] else []);
|
||||||
|
};
|
||||||
|
|
||||||
devShells."vs-code-extension" = pkgs.mkShell {
|
devShells."vs-code-extension" = pkgs.mkShell {
|
||||||
buildInputs = [
|
buildInputs = with pkgs; [
|
||||||
pkgs.nodejs
|
nodejs
|
||||||
pkgs.vsce
|
vsce
|
||||||
pkgs.yarn2nix
|
yarn2nix
|
||||||
];
|
];
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
20
justfile
20
justfile
@ -23,6 +23,26 @@ show-nvim-logs:
|
|||||||
test:
|
test:
|
||||||
nix develop --command bash -c 'go test ./... -count=1'
|
nix develop --command bash -c 'go test ./... -count=1'
|
||||||
|
|
||||||
|
[working-directory: "./server"]
|
||||||
|
update-antlr-parsers:
|
||||||
|
# aliases
|
||||||
|
cd handlers/aliases && antlr4 -Dlanguage=Go -o ast/parser Aliases.g4
|
||||||
|
|
||||||
|
# fstab
|
||||||
|
cd handlers/fstab && antlr4 -Dlanguage=Go -o ast/parser Fstab.g4
|
||||||
|
|
||||||
|
# sshd_config
|
||||||
|
cd handlers/sshd_config && antlr4 -Dlanguage=Go -o ast/parser Config.g4
|
||||||
|
cd handlers/sshd_config/match-parser && antlr4 -Dlanguage=Go -o parser Match.g4
|
||||||
|
|
||||||
|
# ssh_config
|
||||||
|
cd handlers/ssh_config && antlr4 -Dlanguage=Go -o ast/parser Config.g4
|
||||||
|
cd handlers/ssh_config/match-parser && antlr4 -Dlanguage=Go -o parser Match.g4
|
||||||
|
|
||||||
|
# hosts
|
||||||
|
cd handlers/hosts && antlr4 -Dlanguage=Go -o ast/parser Hosts.g4
|
||||||
|
|
||||||
|
|
||||||
# Ready for a PR? Run this recipe before opening the PR!
|
# Ready for a PR? Run this recipe before opening the PR!
|
||||||
ready:
|
ready:
|
||||||
just lint
|
just lint
|
||||||
|
75
server/common-documentation/mnt-apfs.go
Normal file
75
server/common-documentation/mnt-apfs.go
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
package commondocumentation
|
||||||
|
|
||||||
|
import (
|
||||||
|
docvalues "config-lsp/doc-values"
|
||||||
|
)
|
||||||
|
|
||||||
|
var APFSDocumentationAssignable = map[docvalues.EnumString]docvalues.DeprecatedValue{
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"user",
|
||||||
|
"Set the owner of the files in the file system to user. The default owner is the owner of the directory on which the file system is being mounted. The user may be a user-name, or a numeric value.",
|
||||||
|
): docvalues.UIDValue{},
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"group",
|
||||||
|
"Set the group of the files in the file system to group. The default group is the group of the directory on which the file system is being mounted. The group may be a group-name, or a numeric value.",
|
||||||
|
): docvalues.GIDValue{},
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"snapshot",
|
||||||
|
"The name of the snapshot to mount. In this usage pathname is the mounted root directory of the base volume containing the snapshot.",
|
||||||
|
): docvalues.StringValue{},
|
||||||
|
}
|
||||||
|
|
||||||
|
var APFSDocumentationEnums = []docvalues.EnumString{
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"async",
|
||||||
|
"All I/O to the file system should be done asynchronously. This can be somewhat dangerous with respect to losing data when faced with system crashes and power outages. This is also the default. It can be avoided with the noasync option.",
|
||||||
|
),
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"noauto",
|
||||||
|
"This filesystem should be skipped when mount is run with the -a flag.",
|
||||||
|
),
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"nodev",
|
||||||
|
"Do not interpret character or block special devices on the file system. This option is useful for a server that has file systems containing special devices for architectures other than its own.",
|
||||||
|
),
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"noexec",
|
||||||
|
"Do not allow execution of any binaries on the mounted file system. This option is useful for a server that has file systems containing binaries for architectures other than its own.",
|
||||||
|
),
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"noowners",
|
||||||
|
"Ignore the ownership field for the entire volume. This causes all objects to appear as owned by user ID 99 and group ID 99. User ID 99 is interpreted as the current effective user ID, while group ID 99 is used directly and translates to ``unknown''.",
|
||||||
|
),
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"nosuid",
|
||||||
|
"Do not allow set-user-identifier or set-group-identifier bits to take effect.",
|
||||||
|
),
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"rdonly",
|
||||||
|
"The same as -r; mount the file system read-only (even the super-user may not write it).",
|
||||||
|
),
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"update",
|
||||||
|
"The same as -u; indicate that the status of an already mounted file system should be changed.",
|
||||||
|
),
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"union",
|
||||||
|
"Causes the namespace to appear as the union of directories of the mounted filesystem with corresponding directories in the underlying filesystem. Lookups will be done in the mounted filesystem first. If those operations fail due to a non-existent file the underlying directory is then accessed.",
|
||||||
|
),
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"noatime",
|
||||||
|
"Do not update the file access time when reading from a file. This option is useful on file systems where there are large numbers of files and performance is more critical than updating the file access time (which is rarely ever important).",
|
||||||
|
),
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"strictatime",
|
||||||
|
"Always update the file access time when reading from a file. Without this option the filesystem may default to a less strict update mode, where some access time updates are skipped for performance reasons. This option could be ignored if it is not supported by the filesystem.",
|
||||||
|
),
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"nobrowse",
|
||||||
|
"This option indicates that the mount point should not be visible via the GUI (i.e., appear on the Desktop as a separate volume).",
|
||||||
|
),
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"nofollow",
|
||||||
|
"This option indicates that in the course of the mount system call, the kernel should not follow any symlinks that may be present in the provided mount-on directory. This is the same as the -k option.",
|
||||||
|
),
|
||||||
|
}
|
195
server/common-documentation/mnt-bcachefs.go
Normal file
195
server/common-documentation/mnt-bcachefs.go
Normal file
@ -0,0 +1,195 @@
|
|||||||
|
package commondocumentation
|
||||||
|
|
||||||
|
import docvalues "config-lsp/doc-values"
|
||||||
|
|
||||||
|
var checksumType = docvalues.EnumValue{
|
||||||
|
EnforceValues: true,
|
||||||
|
Values: []docvalues.EnumString{
|
||||||
|
docvalues.CreateEnumString("none"),
|
||||||
|
docvalues.CreateEnumString("crc32c"),
|
||||||
|
docvalues.CreateEnumString("crc64"),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
var compressionType = docvalues.EnumValue{
|
||||||
|
EnforceValues: true,
|
||||||
|
Values: []docvalues.EnumString{
|
||||||
|
docvalues.CreateEnumStringWithDoc("none", "(default)"),
|
||||||
|
docvalues.CreateEnumString("lz4"),
|
||||||
|
docvalues.CreateEnumString("gzip"),
|
||||||
|
docvalues.CreateEnumString("zstd"),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
// No idea if those enums are correct,
|
||||||
|
// the documentation does not provide any information
|
||||||
|
var booleanEnumValue = docvalues.EnumValue{
|
||||||
|
EnforceValues: true,
|
||||||
|
Values: []docvalues.EnumString{
|
||||||
|
docvalues.CreateEnumString("yes"),
|
||||||
|
docvalues.CreateEnumString("no"),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
var BcacheFSDocumentationAssignable = map[docvalues.EnumString]docvalues.DeprecatedValue{
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"errors",
|
||||||
|
"Action to take on filesystem error. The errors option is used for inconsistencies that indicate some sort of a bug",
|
||||||
|
): docvalues.EnumValue{
|
||||||
|
EnforceValues: true,
|
||||||
|
Values: []docvalues.EnumString{
|
||||||
|
docvalues.CreateEnumStringWithDoc("continue", "Log the error but continue normal operation"),
|
||||||
|
docvalues.CreateEnumStringWithDoc("ro", "Emergency read only, immediately halting any changes to the filesystem on disk"),
|
||||||
|
docvalues.CreateEnumStringWithDoc("panic", "Immediately halt the entire machine, printing a backtrace on the system console"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"metadata_replicas",
|
||||||
|
"Number of replicas for metadata (journal and btree)",
|
||||||
|
): docvalues.PositiveNumberValue(),
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"data_replicas",
|
||||||
|
"Number of replicas for user data",
|
||||||
|
): docvalues.PositiveNumberValue(),
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"metadata_checksum",
|
||||||
|
"Checksum type for metadata writes",
|
||||||
|
): checksumType,
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"data_checksum",
|
||||||
|
"Checksum type for data writes",
|
||||||
|
): checksumType,
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"compression",
|
||||||
|
"Compression type",
|
||||||
|
): compressionType,
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"background_compression",
|
||||||
|
"Background compression type",
|
||||||
|
): compressionType,
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"str_hash",
|
||||||
|
"Hash function for string hash tables (directories and xattrs)",
|
||||||
|
): docvalues.EnumValue{
|
||||||
|
EnforceValues: true,
|
||||||
|
Values: []docvalues.EnumString{
|
||||||
|
docvalues.CreateEnumString("crc32c"),
|
||||||
|
docvalues.CreateEnumString("crc64"),
|
||||||
|
docvalues.CreateEnumString("siphash"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"metadata_target",
|
||||||
|
"Preferred target for metadata writes",
|
||||||
|
): docvalues.StringValue{},
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"foreground_target",
|
||||||
|
"Preferred target for foreground writes",
|
||||||
|
): docvalues.StringValue{},
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"background_target",
|
||||||
|
"Target for data to be moved to in the background",
|
||||||
|
): docvalues.StringValue{},
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"promote_target",
|
||||||
|
"Target for data to be copied to on read",
|
||||||
|
): docvalues.StringValue{},
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"erasure_code",
|
||||||
|
"Enable erasure coding",
|
||||||
|
): booleanEnumValue,
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"inodes_32bit",
|
||||||
|
"Restrict new inode numbers to 32 bits",
|
||||||
|
): booleanEnumValue,
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"shard_inode_numbers",
|
||||||
|
"Use CPU id for high bits of new inode numbers.",
|
||||||
|
): booleanEnumValue,
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"wide_macs",
|
||||||
|
"Store full 128 bit cryptographic MACs (default 80)",
|
||||||
|
): booleanEnumValue,
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"inline_data",
|
||||||
|
"Enable inline data extents (default on)",
|
||||||
|
): booleanEnumValue,
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"journal_flush_delay",
|
||||||
|
"Delay in milliseconds before automatic journal commit (default 1000)",
|
||||||
|
): docvalues.PositiveNumberValue(),
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"journal_flush_disabled",
|
||||||
|
"Disables journal flush on sync/fsync. `journal_flush_delay` remains in effect, thus with the default setting not more than 1 second of work will be lost",
|
||||||
|
): booleanEnumValue,
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"journal_reclaim",
|
||||||
|
"Reclaim journal space after a certain amount of time",
|
||||||
|
): docvalues.PositiveNumberValue(),
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"journal_reclaim_delay",
|
||||||
|
"Delay in milliseconds before automatic journal reclaim",
|
||||||
|
): docvalues.PositiveNumberValue(),
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"acl",
|
||||||
|
"Enable POSIX ACLs",
|
||||||
|
): booleanEnumValue,
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"usrquota",
|
||||||
|
"Enable user quotas",
|
||||||
|
): booleanEnumValue,
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"grpquota",
|
||||||
|
"Enable group quotas",
|
||||||
|
): booleanEnumValue,
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"prjquota",
|
||||||
|
"Enable project quotas",
|
||||||
|
): booleanEnumValue,
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"degraded",
|
||||||
|
"Allow mounting with data degraded",
|
||||||
|
): booleanEnumValue,
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"very_degraded",
|
||||||
|
"Allow mounting with data missing",
|
||||||
|
): booleanEnumValue,
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"verbose",
|
||||||
|
"Extra debugging info during mount/recovery",
|
||||||
|
): booleanEnumValue,
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"fsck",
|
||||||
|
"Run fsck during mount",
|
||||||
|
): booleanEnumValue,
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"fix_errors",
|
||||||
|
"Fix errors without asking during fsck",
|
||||||
|
): booleanEnumValue,
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"ratelimit_errors",
|
||||||
|
"Ratelimit error messages during fsck",
|
||||||
|
): booleanEnumValue,
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"read_only",
|
||||||
|
"Mount in read only mode",
|
||||||
|
): booleanEnumValue,
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"nochanges",
|
||||||
|
"Issue no writes, even for journal replay",
|
||||||
|
): booleanEnumValue,
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"norecovery",
|
||||||
|
"Don’t replay the journal (not recommended)",
|
||||||
|
): booleanEnumValue,
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"noexcl",
|
||||||
|
"Don’t open devices in exclusive mode",
|
||||||
|
): booleanEnumValue,
|
||||||
|
docvalues.CreateEnumStringWithDoc(
|
||||||
|
"version_upgrade",
|
||||||
|
"Upgrade on disk format to latest version",
|
||||||
|
): booleanEnumValue,
|
||||||
|
}
|
||||||
|
|
||||||
|
var BcacheFSDocumentationEnums = []docvalues.EnumString{}
|
43
server/common/levenshtein.go
Normal file
43
server/common/levenshtein.go
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
package common
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/hbollon/go-edlib"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Find items that are similar to the given input.
|
||||||
|
// This is used to find typos & suggest the correct item.
|
||||||
|
// Once an item is found that has a Damerau-Levenshtein distance of 1, it is immediately returned.
|
||||||
|
// If not, then the next 2 items of similarity 2, or 3 items of similarity 3 are returned.
|
||||||
|
// If no items with similarity <= 3 are found, then an empty slice is returned.
|
||||||
|
func FindSimilarItems[T ~string](
|
||||||
|
input T,
|
||||||
|
items []T,
|
||||||
|
) []T {
|
||||||
|
itemsPerSimilarity := map[uint8][]T{
|
||||||
|
2: make([]T, 0, 2),
|
||||||
|
3: make([]T, 0, 3),
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, item := range items {
|
||||||
|
similarity := edlib.DamerauLevenshteinDistance(string(item), string(input))
|
||||||
|
|
||||||
|
switch similarity {
|
||||||
|
case 1:
|
||||||
|
return []T{item}
|
||||||
|
case 2:
|
||||||
|
itemsPerSimilarity[2] = append(itemsPerSimilarity[2], item)
|
||||||
|
|
||||||
|
if len(itemsPerSimilarity[2]) >= 2 {
|
||||||
|
return itemsPerSimilarity[2]
|
||||||
|
}
|
||||||
|
case 3:
|
||||||
|
itemsPerSimilarity[3] = append(itemsPerSimilarity[3], item)
|
||||||
|
|
||||||
|
if len(itemsPerSimilarity[3]) >= 3 {
|
||||||
|
return itemsPerSimilarity[3]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return append(itemsPerSimilarity[2], itemsPerSimilarity[3]...)
|
||||||
|
}
|
@ -177,7 +177,7 @@ func (v KeyEnumAssignmentValue) DeprecatedFetchCompletions(line string, cursor u
|
|||||||
)
|
)
|
||||||
|
|
||||||
if found {
|
if found {
|
||||||
relativePosition := max(1, foundPosition) - 1
|
relativePosition := min(foundPosition, len(line)-1)
|
||||||
selectedKey := line[:uint32(relativePosition)]
|
selectedKey := line[:uint32(relativePosition)]
|
||||||
line = line[uint32(relativePosition+len(v.Separator)):]
|
line = line[uint32(relativePosition+len(v.Separator)):]
|
||||||
cursor -= uint32(relativePosition)
|
cursor -= uint32(relativePosition)
|
||||||
|
@ -2,31 +2,21 @@ package docvalues
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"config-lsp/utils"
|
"config-lsp/utils"
|
||||||
protocol "github.com/tliron/glsp/protocol_3_16"
|
"errors"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||||
)
|
)
|
||||||
|
|
||||||
type PathDoesNotExistError struct{}
|
|
||||||
|
|
||||||
func (e PathDoesNotExistError) Error() string {
|
|
||||||
return "This path does not exist"
|
|
||||||
}
|
|
||||||
|
|
||||||
type PathInvalidError struct{}
|
|
||||||
|
|
||||||
func (e PathInvalidError) Error() string {
|
|
||||||
return "This path is invalid"
|
|
||||||
}
|
|
||||||
|
|
||||||
type PathType uint8
|
type PathType uint8
|
||||||
|
|
||||||
const (
|
const (
|
||||||
PathTypeExistenceOptional PathType = 0
|
PathTypeFile PathType = 1
|
||||||
PathTypeFile PathType = 1
|
PathTypeDirectory PathType = 2
|
||||||
PathTypeDirectory PathType = 2
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type PathValue struct {
|
type PathValue struct {
|
||||||
|
IsOptional bool
|
||||||
RequiredType PathType
|
RequiredType PathType
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -34,51 +24,88 @@ func (v PathValue) GetTypeDescription() []string {
|
|||||||
hints := make([]string, 0)
|
hints := make([]string, 0)
|
||||||
|
|
||||||
switch v.RequiredType {
|
switch v.RequiredType {
|
||||||
case PathTypeExistenceOptional:
|
|
||||||
hints = append(hints, "Optional")
|
|
||||||
break
|
|
||||||
case PathTypeFile:
|
case PathTypeFile:
|
||||||
hints = append(hints, "File")
|
hints = append(hints, "File")
|
||||||
case PathTypeDirectory:
|
case PathTypeDirectory:
|
||||||
hints = append(hints, "Directory")
|
hints = append(hints, "Directory")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if v.IsOptional {
|
||||||
|
hints = append(hints, "Optional")
|
||||||
|
}
|
||||||
|
|
||||||
return []string{strings.Join(hints, ", ")}
|
return []string{strings.Join(hints, ", ")}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (v PathValue) DeprecatedCheckIsValid(value string) []*InvalidValue {
|
func (v PathValue) DeprecatedCheckIsValid(value string) []*InvalidValue {
|
||||||
if !utils.DoesPathExist(value) {
|
if !utils.DoesPathExist(value) {
|
||||||
if v.RequiredType == PathTypeExistenceOptional {
|
if v.IsOptional {
|
||||||
return nil
|
return nil
|
||||||
} else {
|
} else {
|
||||||
return []*InvalidValue{{
|
return []*InvalidValue{{
|
||||||
Err: PathDoesNotExistError{},
|
Err: errors.New("This path does not exist"),
|
||||||
Start: 0,
|
Start: 0,
|
||||||
End: uint32(len(value)),
|
End: uint32(len(value)),
|
||||||
}}
|
}}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
isValid := false
|
fileExpected := (v.RequiredType & PathTypeFile) == PathTypeFile
|
||||||
|
directoryExpected := (v.RequiredType & PathTypeDirectory) == PathTypeDirectory
|
||||||
|
|
||||||
if (v.RequiredType & PathTypeFile) == PathTypeFile {
|
isValid := true
|
||||||
|
|
||||||
|
// If file is expected
|
||||||
|
if fileExpected {
|
||||||
|
// and exists
|
||||||
isValid = isValid && utils.IsPathFile(value)
|
isValid = isValid && utils.IsPathFile(value)
|
||||||
|
// file not expected
|
||||||
|
} else {
|
||||||
|
// and should not exist
|
||||||
|
isValid = isValid && !utils.IsPathFile(value)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (v.RequiredType & PathTypeDirectory) == PathTypeDirectory {
|
// if directory
|
||||||
|
if directoryExpected {
|
||||||
|
// and exists
|
||||||
isValid = isValid && utils.IsPathDirectory(value)
|
isValid = isValid && utils.IsPathDirectory(value)
|
||||||
|
// directory not expected
|
||||||
|
} else {
|
||||||
|
// and should not exist
|
||||||
|
isValid = isValid && !utils.IsPathDirectory(value)
|
||||||
}
|
}
|
||||||
|
|
||||||
if isValid {
|
if isValid {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if fileExpected && directoryExpected {
|
||||||
|
return []*InvalidValue{{
|
||||||
|
Err: errors.New("This must be either a file or a directory"),
|
||||||
|
Start: 0,
|
||||||
|
End: uint32(len(value)),
|
||||||
|
}}
|
||||||
|
}
|
||||||
|
if fileExpected {
|
||||||
|
return []*InvalidValue{{
|
||||||
|
Err: errors.New("This must be a file"),
|
||||||
|
Start: 0,
|
||||||
|
End: uint32(len(value)),
|
||||||
|
}}
|
||||||
|
}
|
||||||
|
if directoryExpected {
|
||||||
|
return []*InvalidValue{{
|
||||||
|
Err: errors.New("This must be a directory"),
|
||||||
|
Start: 0,
|
||||||
|
End: uint32(len(value)),
|
||||||
|
}}
|
||||||
|
}
|
||||||
|
|
||||||
return []*InvalidValue{{
|
return []*InvalidValue{{
|
||||||
Err: PathInvalidError{},
|
Err: errors.New("This path is invalid"),
|
||||||
Start: 0,
|
Start: 0,
|
||||||
End: uint32(len(value)),
|
End: uint32(len(value)),
|
||||||
},
|
}}
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (v PathValue) DeprecatedFetchCompletions(line string, cursor uint32) []protocol.CompletionItem {
|
func (v PathValue) DeprecatedFetchCompletions(line string, cursor uint32) []protocol.CompletionItem {
|
||||||
|
@ -1,36 +1,32 @@
|
|||||||
module config-lsp
|
module config-lsp
|
||||||
|
|
||||||
go 1.22.5
|
go 1.24
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/antlr4-go/antlr/v4 v4.13.1
|
github.com/antlr4-go/antlr/v4 v4.13.1
|
||||||
github.com/emirpasic/gods v1.18.1
|
github.com/emirpasic/gods v1.18.1
|
||||||
github.com/google/go-cmp v0.6.0
|
github.com/google/go-cmp v0.7.0
|
||||||
github.com/k0kubun/pp v3.0.1+incompatible
|
github.com/hbollon/go-edlib v1.6.0
|
||||||
github.com/tliron/commonlog v0.2.17
|
github.com/tliron/commonlog v0.2.19
|
||||||
github.com/tliron/glsp v0.2.2
|
github.com/tliron/glsp v0.2.2
|
||||||
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56
|
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6
|
||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
|
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
|
||||||
github.com/gorilla/websocket v1.5.3 // indirect
|
github.com/gorilla/websocket v1.5.3 // indirect
|
||||||
github.com/hbollon/go-edlib v1.6.0 // indirect
|
|
||||||
github.com/iancoleman/strcase v0.3.0 // indirect
|
github.com/iancoleman/strcase v0.3.0 // indirect
|
||||||
github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88 // indirect
|
|
||||||
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
|
github.com/lucasb-eyer/go-colorful v1.2.0 // indirect
|
||||||
github.com/mattn/go-colorable v0.1.13 // indirect
|
|
||||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||||
github.com/mattn/go-runewidth v0.0.16 // indirect
|
github.com/muesli/termenv v0.16.0 // indirect
|
||||||
github.com/muesli/termenv v0.15.2 // indirect
|
github.com/petermattis/goid v0.0.0-20250508124226-395b08cebbdb // indirect
|
||||||
github.com/petermattis/goid v0.0.0-20240716203034-badd1c0974d6 // indirect
|
|
||||||
github.com/pkg/errors v0.9.1 // indirect
|
github.com/pkg/errors v0.9.1 // indirect
|
||||||
github.com/rivo/uniseg v0.4.7 // indirect
|
github.com/rivo/uniseg v0.4.7 // indirect
|
||||||
github.com/sasha-s/go-deadlock v0.3.1 // indirect
|
github.com/sasha-s/go-deadlock v0.3.5 // indirect
|
||||||
github.com/segmentio/ksuid v1.0.4 // indirect
|
github.com/segmentio/ksuid v1.0.4 // indirect
|
||||||
github.com/sourcegraph/jsonrpc2 v0.2.0 // indirect
|
github.com/sourcegraph/jsonrpc2 v0.2.1 // indirect
|
||||||
github.com/tliron/kutil v0.3.24 // indirect
|
github.com/tliron/kutil v0.3.26 // indirect
|
||||||
golang.org/x/crypto v0.31.0 // indirect
|
golang.org/x/crypto v0.38.0 // indirect
|
||||||
golang.org/x/sys v0.28.0 // indirect
|
golang.org/x/sys v0.33.0 // indirect
|
||||||
golang.org/x/term v0.27.0 // indirect
|
golang.org/x/term v0.32.0 // indirect
|
||||||
)
|
)
|
||||||
|
@ -4,8 +4,8 @@ github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiE
|
|||||||
github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8=
|
github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8=
|
||||||
github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc=
|
github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc=
|
||||||
github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ=
|
github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ=
|
||||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||||
github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||||
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
|
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
|
||||||
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||||
@ -13,48 +13,37 @@ github.com/hbollon/go-edlib v1.6.0 h1:ga7AwwVIvP8mHm9GsPueC0d71cfRU/52hmPJ7Tprv4
|
|||||||
github.com/hbollon/go-edlib v1.6.0/go.mod h1:wnt6o6EIVEzUfgbUZY7BerzQ2uvzp354qmS2xaLkrhM=
|
github.com/hbollon/go-edlib v1.6.0/go.mod h1:wnt6o6EIVEzUfgbUZY7BerzQ2uvzp354qmS2xaLkrhM=
|
||||||
github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI=
|
github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI=
|
||||||
github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho=
|
github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho=
|
||||||
github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88 h1:uC1QfSlInpQF+M0ao65imhwqKnz3Q2z/d8PWZRMQvDM=
|
|
||||||
github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88/go.mod h1:3w7q1U84EfirKl04SVQ/s7nPm1ZPhiXd34z40TNz36k=
|
|
||||||
github.com/k0kubun/pp v3.0.1+incompatible h1:3tqvf7QgUnZ5tXO6pNAZlrvHgl6DvifjDrd9g2S9Z40=
|
|
||||||
github.com/k0kubun/pp v3.0.1+incompatible/go.mod h1:GWse8YhT0p8pT4ir3ZgBbfZild3tgzSScAn6HmfYukg=
|
|
||||||
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
|
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
|
||||||
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
|
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
|
||||||
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
|
|
||||||
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
|
|
||||||
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
|
||||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||||
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
|
github.com/muesli/termenv v0.16.0 h1:S5AlUN9dENB57rsbnkPyfdGuWIlkmzJjbFf0Tf5FWUc=
|
||||||
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
github.com/muesli/termenv v0.16.0/go.mod h1:ZRfOIKPFDYQoDFF4Olj7/QJbW60Ol/kL1pU3VfY/Cnk=
|
||||||
github.com/muesli/termenv v0.15.2 h1:GohcuySI0QmI3wN8Ok9PtKGkgkFIk7y6Vpb5PvrY+Wo=
|
github.com/petermattis/goid v0.0.0-20240813172612-4fcff4a6cae7/go.mod h1:pxMtw7cyUw6B2bRH0ZBANSPg+AoSud1I1iyJHI69jH4=
|
||||||
github.com/muesli/termenv v0.15.2/go.mod h1:Epx+iuz8sNs7mNKhxzH4fWXGNpZwUaJKRS1noLXviQ8=
|
github.com/petermattis/goid v0.0.0-20250508124226-395b08cebbdb h1:3PrKuO92dUTMrQ9dx0YNejC6U/Si6jqKmyQ9vWjwqR4=
|
||||||
github.com/petermattis/goid v0.0.0-20180202154549-b0b1615b78e5/go.mod h1:jvVRKCrJTQWu0XVbaOlby/2lO20uSCHEMzzplHXte1o=
|
github.com/petermattis/goid v0.0.0-20250508124226-395b08cebbdb/go.mod h1:pxMtw7cyUw6B2bRH0ZBANSPg+AoSud1I1iyJHI69jH4=
|
||||||
github.com/petermattis/goid v0.0.0-20240716203034-badd1c0974d6 h1:DUDJI8T/9NcGbbL+AWk6vIYlmQ8ZBS8LZqVre6zbkPQ=
|
|
||||||
github.com/petermattis/goid v0.0.0-20240716203034-badd1c0974d6/go.mod h1:pxMtw7cyUw6B2bRH0ZBANSPg+AoSud1I1iyJHI69jH4=
|
|
||||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
|
||||||
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
||||||
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||||
github.com/sasha-s/go-deadlock v0.3.1 h1:sqv7fDNShgjcaxkO0JNcOAlr8B9+cV5Ey/OB71efZx0=
|
github.com/sasha-s/go-deadlock v0.3.5 h1:tNCOEEDG6tBqrNDOX35j/7hL5FcFViG6awUGROb2NsU=
|
||||||
github.com/sasha-s/go-deadlock v0.3.1/go.mod h1:F73l+cr82YSh10GxyRI6qZiCgK64VaZjwesgfQ1/iLM=
|
github.com/sasha-s/go-deadlock v0.3.5/go.mod h1:bugP6EGbdGYObIlx7pUZtWqlvo8k9H6vCBBsiChJQ5U=
|
||||||
github.com/segmentio/ksuid v1.0.4 h1:sBo2BdShXjmcugAMwjugoGUdUV0pcxY5mW4xKRn3v4c=
|
github.com/segmentio/ksuid v1.0.4 h1:sBo2BdShXjmcugAMwjugoGUdUV0pcxY5mW4xKRn3v4c=
|
||||||
github.com/segmentio/ksuid v1.0.4/go.mod h1:/XUiZBD3kVx5SmUOl55voK5yeAbBNNIed+2O73XgrPE=
|
github.com/segmentio/ksuid v1.0.4/go.mod h1:/XUiZBD3kVx5SmUOl55voK5yeAbBNNIed+2O73XgrPE=
|
||||||
github.com/sourcegraph/jsonrpc2 v0.2.0 h1:KjN/dC4fP6aN9030MZCJs9WQbTOjWHhrtKVpzzSrr/U=
|
github.com/sourcegraph/jsonrpc2 v0.2.1 h1:2GtljixMQYUYCmIg7W9aF2dFmniq/mOr2T9tFRh6zSQ=
|
||||||
github.com/sourcegraph/jsonrpc2 v0.2.0/go.mod h1:ZafdZgk/axhT1cvZAPOhw+95nz2I/Ra5qMlU4gTRwIo=
|
github.com/sourcegraph/jsonrpc2 v0.2.1/go.mod h1:ZafdZgk/axhT1cvZAPOhw+95nz2I/Ra5qMlU4gTRwIo=
|
||||||
github.com/tliron/commonlog v0.2.17 h1:GFVvzDZbNLkuQfT45IZeWkrR5AyqiX7Du8pWAtFuPTY=
|
github.com/tliron/commonlog v0.2.19 h1:v1mOH1TyzFLqkshR03khw7ENAZPjAyZTQBQrqN+vX9c=
|
||||||
github.com/tliron/commonlog v0.2.17/go.mod h1:J2Hb63/mMjYmkDzd7E+VL9wCHT6NFNSzV/IOjJWMJqc=
|
github.com/tliron/commonlog v0.2.19/go.mod h1:AcdhfcUqlAWukDrzTGyaPhUgYiNdZhS4dKzD/e0tjcY=
|
||||||
github.com/tliron/glsp v0.2.2 h1:IKPfwpE8Lu8yB6Dayta+IyRMAbTVunudeauEgjXBt+c=
|
github.com/tliron/glsp v0.2.2 h1:IKPfwpE8Lu8yB6Dayta+IyRMAbTVunudeauEgjXBt+c=
|
||||||
github.com/tliron/glsp v0.2.2/go.mod h1:GMVWDNeODxHzmDPvYbYTCs7yHVaEATfYtXiYJ9w1nBg=
|
github.com/tliron/glsp v0.2.2/go.mod h1:GMVWDNeODxHzmDPvYbYTCs7yHVaEATfYtXiYJ9w1nBg=
|
||||||
github.com/tliron/kutil v0.3.24 h1:LvaqizF4htpEef9tC0B//sqtvQzEjDu69A4a1HrY+ko=
|
github.com/tliron/kutil v0.3.26 h1:G+dicQLvzm3zdOMrrQFLBfHJXtk57fEu2kf1IFNyJxw=
|
||||||
github.com/tliron/kutil v0.3.24/go.mod h1:2iSIhOnOe1reqczZQy6TauVHhItsq6xRLV2rVBvodpk=
|
github.com/tliron/kutil v0.3.26/go.mod h1:1/HRVAb+fnRIRnzmhu0FPP+ZJKobrpwHStDVMuaXDzY=
|
||||||
golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U=
|
golang.org/x/crypto v0.38.0 h1:jt+WWG8IZlBnVbomuhg2Mdq0+BBQaHbtqHEFEigjUV8=
|
||||||
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
|
golang.org/x/crypto v0.38.0/go.mod h1:MvrbAqul58NNYPKnOra203SB9vpuZW0e+RRZV+Ggqjw=
|
||||||
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56 h1:2dVuKD2vS7b0QIHQbpyTISPd0LeHDbnYEryqj5Q1ug8=
|
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6 h1:y5zboxd6LQAqYIhHnB48p0ByQ/GnQx2BE33L8BOHQkI=
|
||||||
golang.org/x/exp v0.0.0-20240719175910-8a7402abbf56/go.mod h1:M4RDyNAINzryxdtnbRXRL/OHtkFuWGRjvuhBJpk2IlY=
|
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6/go.mod h1:U6Lno4MTRCDY+Ba7aCcauB9T60gsv5s4ralQzP72ZoQ=
|
||||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
||||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA=
|
golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw=
|
||||||
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||||
golang.org/x/term v0.27.0 h1:WP60Sv1nlK1T6SupCHbXzSaN0b9wUmsPoRS9b61A23Q=
|
golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg=
|
||||||
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
|
golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ=
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Code generated from Aliases.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
// Code generated from Aliases.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
package parser // Aliases
|
package parser // Aliases
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Code generated from Aliases.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
// Code generated from Aliases.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
package parser
|
package parser
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Code generated from Aliases.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
// Code generated from Aliases.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
package parser // Aliases
|
package parser // Aliases
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Code generated from Aliases.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
// Code generated from Aliases.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
package parser // Aliases
|
package parser // Aliases
|
||||||
|
|
||||||
|
@ -15,6 +15,7 @@ var UserDeclaration = "`user`"
|
|||||||
var PathField = docvalues.DocumentationValue{
|
var PathField = docvalues.DocumentationValue{
|
||||||
Documentation: "Append messages to file, specified by its absolute pathname",
|
Documentation: "Append messages to file, specified by its absolute pathname",
|
||||||
Value: docvalues.PathValue{
|
Value: docvalues.PathValue{
|
||||||
|
IsOptional: true,
|
||||||
RequiredType: docvalues.PathTypeFile,
|
RequiredType: docvalues.PathTypeFile,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -40,6 +41,7 @@ var EmailDeclaration = "`user-part@domain-part`"
|
|||||||
var IncludeField = docvalues.DocumentationValue{
|
var IncludeField = docvalues.DocumentationValue{
|
||||||
Documentation: "Include any definitions in file as alias entries. The format of the file is identical to this one.",
|
Documentation: "Include any definitions in file as alias entries. The format of the file is identical to this one.",
|
||||||
Value: docvalues.PathValue{
|
Value: docvalues.PathValue{
|
||||||
|
IsOptional: false,
|
||||||
RequiredType: docvalues.PathTypeFile,
|
RequiredType: docvalues.PathTypeFile,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -141,8 +141,8 @@ func (e FstabEntry) FetchMountOptionsField(includeDefaults bool) docvalues.Depre
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
var enums []docvalues.EnumString
|
var enums []docvalues.EnumString = make([]docvalues.EnumString, 0)
|
||||||
var assignable map[docvalues.EnumString]docvalues.DeprecatedValue
|
var assignable map[docvalues.EnumString]docvalues.DeprecatedValue = make(map[docvalues.EnumString]docvalues.DeprecatedValue, 0)
|
||||||
|
|
||||||
if includeDefaults {
|
if includeDefaults {
|
||||||
enums = append(option.Enums, fields.DefaultOptions...)
|
enums = append(option.Enums, fields.DefaultOptions...)
|
||||||
|
@ -5,10 +5,6 @@ null
|
|||||||
'#'
|
'#'
|
||||||
null
|
null
|
||||||
null
|
null
|
||||||
null
|
|
||||||
null
|
|
||||||
null
|
|
||||||
null
|
|
||||||
|
|
||||||
token symbolic names:
|
token symbolic names:
|
||||||
null
|
null
|
||||||
@ -17,10 +13,6 @@ WHITESPACE
|
|||||||
HASH
|
HASH
|
||||||
STRING
|
STRING
|
||||||
QUOTED_STRING
|
QUOTED_STRING
|
||||||
ADFS
|
|
||||||
AFFS
|
|
||||||
BTRFS
|
|
||||||
EXFAT
|
|
||||||
|
|
||||||
rule names:
|
rule names:
|
||||||
entry
|
entry
|
||||||
@ -33,4 +25,4 @@ pass
|
|||||||
|
|
||||||
|
|
||||||
atn:
|
atn:
|
||||||
[4, 1, 9, 68, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 1, 0, 3, 0, 16, 8, 0, 1, 0, 3, 0, 19, 8, 0, 1, 0, 3, 0, 22, 8, 0, 1, 0, 3, 0, 25, 8, 0, 1, 0, 3, 0, 28, 8, 0, 1, 0, 3, 0, 31, 8, 0, 1, 0, 3, 0, 34, 8, 0, 1, 0, 3, 0, 37, 8, 0, 1, 0, 3, 0, 40, 8, 0, 1, 0, 3, 0, 43, 8, 0, 1, 0, 3, 0, 46, 8, 0, 1, 0, 3, 0, 49, 8, 0, 1, 0, 3, 0, 52, 8, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 0, 0, 7, 0, 2, 4, 6, 8, 10, 12, 0, 2, 1, 0, 4, 5, 1, 0, 4, 9, 73, 0, 15, 1, 0, 0, 0, 2, 55, 1, 0, 0, 0, 4, 57, 1, 0, 0, 0, 6, 59, 1, 0, 0, 0, 8, 61, 1, 0, 0, 0, 10, 63, 1, 0, 0, 0, 12, 65, 1, 0, 0, 0, 14, 16, 5, 2, 0, 0, 15, 14, 1, 0, 0, 0, 15, 16, 1, 0, 0, 0, 16, 18, 1, 0, 0, 0, 17, 19, 3, 2, 1, 0, 18, 17, 1, 0, 0, 0, 18, 19, 1, 0, 0, 0, 19, 21, 1, 0, 0, 0, 20, 22, 5, 2, 0, 0, 21, 20, 1, 0, 0, 0, 21, 22, 1, 0, 0, 0, 22, 24, 1, 0, 0, 0, 23, 25, 3, 4, 2, 0, 24, 23, 1, 0, 0, 0, 24, 25, 1, 0, 0, 0, 25, 27, 1, 0, 0, 0, 26, 28, 5, 2, 0, 0, 27, 26, 1, 0, 0, 0, 27, 28, 1, 0, 0, 0, 28, 30, 1, 0, 0, 0, 29, 31, 3, 6, 3, 0, 30, 29, 1, 0, 0, 0, 30, 31, 1, 0, 0, 0, 31, 33, 1, 0, 0, 0, 32, 34, 5, 2, 0, 0, 33, 32, 1, 0, 0, 0, 33, 34, 1, 0, 0, 0, 34, 36, 1, 0, 0, 0, 35, 37, 3, 8, 4, 0, 36, 35, 1, 0, 0, 0, 36, 37, 1, 0, 0, 0, 37, 39, 1, 0, 0, 0, 38, 40, 5, 2, 0, 0, 39, 38, 1, 0, 0, 0, 39, 40, 1, 0, 0, 0, 40, 42, 1, 0, 0, 0, 41, 43, 3, 10, 5, 0, 42, 41, 1, 0, 0, 0, 42, 43, 1, 0, 0, 0, 43, 45, 1, 0, 0, 0, 44, 46, 5, 2, 0, 0, 45, 44, 1, 0, 0, 0, 45, 46, 1, 0, 0, 0, 46, 48, 1, 0, 0, 0, 47, 49, 3, 12, 6, 0, 48, 47, 1, 0, 0, 0, 48, 49, 1, 0, 0, 0, 49, 51, 1, 0, 0, 0, 50, 52, 5, 2, 0, 0, 51, 50, 1, 0, 0, 0, 51, 52, 1, 0, 0, 0, 52, 53, 1, 0, 0, 0, 53, 54, 5, 0, 0, 1, 54, 1, 1, 0, 0, 0, 55, 56, 7, 0, 0, 0, 56, 3, 1, 0, 0, 0, 57, 58, 7, 0, 0, 0, 58, 5, 1, 0, 0, 0, 59, 60, 7, 1, 0, 0, 60, 7, 1, 0, 0, 0, 61, 62, 7, 0, 0, 0, 62, 9, 1, 0, 0, 0, 63, 64, 5, 1, 0, 0, 64, 11, 1, 0, 0, 0, 65, 66, 5, 1, 0, 0, 66, 13, 1, 0, 0, 0, 13, 15, 18, 21, 24, 27, 30, 33, 36, 39, 42, 45, 48, 51]
|
[4, 1, 5, 68, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 1, 0, 3, 0, 16, 8, 0, 1, 0, 3, 0, 19, 8, 0, 1, 0, 3, 0, 22, 8, 0, 1, 0, 3, 0, 25, 8, 0, 1, 0, 3, 0, 28, 8, 0, 1, 0, 3, 0, 31, 8, 0, 1, 0, 3, 0, 34, 8, 0, 1, 0, 3, 0, 37, 8, 0, 1, 0, 3, 0, 40, 8, 0, 1, 0, 3, 0, 43, 8, 0, 1, 0, 3, 0, 46, 8, 0, 1, 0, 3, 0, 49, 8, 0, 1, 0, 3, 0, 52, 8, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 0, 0, 7, 0, 2, 4, 6, 8, 10, 12, 0, 1, 1, 0, 4, 5, 73, 0, 15, 1, 0, 0, 0, 2, 55, 1, 0, 0, 0, 4, 57, 1, 0, 0, 0, 6, 59, 1, 0, 0, 0, 8, 61, 1, 0, 0, 0, 10, 63, 1, 0, 0, 0, 12, 65, 1, 0, 0, 0, 14, 16, 5, 2, 0, 0, 15, 14, 1, 0, 0, 0, 15, 16, 1, 0, 0, 0, 16, 18, 1, 0, 0, 0, 17, 19, 3, 2, 1, 0, 18, 17, 1, 0, 0, 0, 18, 19, 1, 0, 0, 0, 19, 21, 1, 0, 0, 0, 20, 22, 5, 2, 0, 0, 21, 20, 1, 0, 0, 0, 21, 22, 1, 0, 0, 0, 22, 24, 1, 0, 0, 0, 23, 25, 3, 4, 2, 0, 24, 23, 1, 0, 0, 0, 24, 25, 1, 0, 0, 0, 25, 27, 1, 0, 0, 0, 26, 28, 5, 2, 0, 0, 27, 26, 1, 0, 0, 0, 27, 28, 1, 0, 0, 0, 28, 30, 1, 0, 0, 0, 29, 31, 3, 6, 3, 0, 30, 29, 1, 0, 0, 0, 30, 31, 1, 0, 0, 0, 31, 33, 1, 0, 0, 0, 32, 34, 5, 2, 0, 0, 33, 32, 1, 0, 0, 0, 33, 34, 1, 0, 0, 0, 34, 36, 1, 0, 0, 0, 35, 37, 3, 8, 4, 0, 36, 35, 1, 0, 0, 0, 36, 37, 1, 0, 0, 0, 37, 39, 1, 0, 0, 0, 38, 40, 5, 2, 0, 0, 39, 38, 1, 0, 0, 0, 39, 40, 1, 0, 0, 0, 40, 42, 1, 0, 0, 0, 41, 43, 3, 10, 5, 0, 42, 41, 1, 0, 0, 0, 42, 43, 1, 0, 0, 0, 43, 45, 1, 0, 0, 0, 44, 46, 5, 2, 0, 0, 45, 44, 1, 0, 0, 0, 45, 46, 1, 0, 0, 0, 46, 48, 1, 0, 0, 0, 47, 49, 3, 12, 6, 0, 48, 47, 1, 0, 0, 0, 48, 49, 1, 0, 0, 0, 49, 51, 1, 0, 0, 0, 50, 52, 5, 2, 0, 0, 51, 50, 1, 0, 0, 0, 51, 52, 1, 0, 0, 0, 52, 53, 1, 0, 0, 0, 53, 54, 5, 0, 0, 1, 54, 1, 1, 0, 0, 0, 55, 56, 7, 0, 0, 0, 56, 3, 1, 0, 0, 0, 57, 58, 7, 0, 0, 0, 58, 5, 1, 0, 0, 0, 59, 60, 7, 0, 0, 0, 60, 7, 1, 0, 0, 0, 61, 62, 7, 0, 0, 0, 62, 9, 1, 0, 0, 0, 63, 64, 5, 1, 0, 0, 64, 11, 1, 0, 0, 0, 65, 66, 5, 1, 0, 0, 66, 13, 1, 0, 0, 0, 13, 15, 18, 21, 24, 27, 30, 33, 36, 39, 42, 45, 48, 51]
|
@ -3,8 +3,4 @@ WHITESPACE=2
|
|||||||
HASH=3
|
HASH=3
|
||||||
STRING=4
|
STRING=4
|
||||||
QUOTED_STRING=5
|
QUOTED_STRING=5
|
||||||
ADFS=6
|
|
||||||
AFFS=7
|
|
||||||
BTRFS=8
|
|
||||||
EXFAT=9
|
|
||||||
'#'=3
|
'#'=3
|
||||||
|
@ -5,10 +5,6 @@ null
|
|||||||
'#'
|
'#'
|
||||||
null
|
null
|
||||||
null
|
null
|
||||||
null
|
|
||||||
null
|
|
||||||
null
|
|
||||||
null
|
|
||||||
|
|
||||||
token symbolic names:
|
token symbolic names:
|
||||||
null
|
null
|
||||||
@ -17,10 +13,6 @@ WHITESPACE
|
|||||||
HASH
|
HASH
|
||||||
STRING
|
STRING
|
||||||
QUOTED_STRING
|
QUOTED_STRING
|
||||||
ADFS
|
|
||||||
AFFS
|
|
||||||
BTRFS
|
|
||||||
EXFAT
|
|
||||||
|
|
||||||
rule names:
|
rule names:
|
||||||
DIGITS
|
DIGITS
|
||||||
@ -28,10 +20,6 @@ WHITESPACE
|
|||||||
HASH
|
HASH
|
||||||
STRING
|
STRING
|
||||||
QUOTED_STRING
|
QUOTED_STRING
|
||||||
ADFS
|
|
||||||
AFFS
|
|
||||||
BTRFS
|
|
||||||
EXFAT
|
|
||||||
|
|
||||||
channel names:
|
channel names:
|
||||||
DEFAULT_TOKEN_CHANNEL
|
DEFAULT_TOKEN_CHANNEL
|
||||||
@ -41,4 +29,4 @@ mode names:
|
|||||||
DEFAULT_MODE
|
DEFAULT_MODE
|
||||||
|
|
||||||
atn:
|
atn:
|
||||||
[4, 0, 9, 76, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 1, 0, 4, 0, 21, 8, 0, 11, 0, 12, 0, 22, 1, 1, 4, 1, 26, 8, 1, 11, 1, 12, 1, 27, 1, 2, 1, 2, 1, 3, 4, 3, 33, 8, 3, 11, 3, 12, 3, 34, 1, 4, 1, 4, 3, 4, 39, 8, 4, 1, 4, 1, 4, 1, 4, 5, 4, 44, 8, 4, 10, 4, 12, 4, 47, 9, 4, 1, 4, 3, 4, 50, 8, 4, 1, 4, 3, 4, 53, 8, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 0, 0, 9, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 1, 0, 12, 1, 0, 48, 57, 2, 0, 9, 9, 32, 32, 3, 0, 9, 9, 32, 32, 35, 35, 2, 0, 65, 65, 97, 97, 2, 0, 68, 68, 100, 100, 2, 0, 70, 70, 102, 102, 2, 0, 83, 83, 115, 115, 2, 0, 66, 66, 98, 98, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 69, 69, 101, 101, 2, 0, 88, 88, 120, 120, 82, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 1, 20, 1, 0, 0, 0, 3, 25, 1, 0, 0, 0, 5, 29, 1, 0, 0, 0, 7, 32, 1, 0, 0, 0, 9, 36, 1, 0, 0, 0, 11, 54, 1, 0, 0, 0, 13, 59, 1, 0, 0, 0, 15, 64, 1, 0, 0, 0, 17, 70, 1, 0, 0, 0, 19, 21, 7, 0, 0, 0, 20, 19, 1, 0, 0, 0, 21, 22, 1, 0, 0, 0, 22, 20, 1, 0, 0, 0, 22, 23, 1, 0, 0, 0, 23, 2, 1, 0, 0, 0, 24, 26, 7, 1, 0, 0, 25, 24, 1, 0, 0, 0, 26, 27, 1, 0, 0, 0, 27, 25, 1, 0, 0, 0, 27, 28, 1, 0, 0, 0, 28, 4, 1, 0, 0, 0, 29, 30, 5, 35, 0, 0, 30, 6, 1, 0, 0, 0, 31, 33, 8, 2, 0, 0, 32, 31, 1, 0, 0, 0, 33, 34, 1, 0, 0, 0, 34, 32, 1, 0, 0, 0, 34, 35, 1, 0, 0, 0, 35, 8, 1, 0, 0, 0, 36, 38, 5, 34, 0, 0, 37, 39, 3, 3, 1, 0, 38, 37, 1, 0, 0, 0, 38, 39, 1, 0, 0, 0, 39, 45, 1, 0, 0, 0, 40, 41, 3, 7, 3, 0, 41, 42, 3, 3, 1, 0, 42, 44, 1, 0, 0, 0, 43, 40, 1, 0, 0, 0, 44, 47, 1, 0, 0, 0, 45, 43, 1, 0, 0, 0, 45, 46, 1, 0, 0, 0, 46, 49, 1, 0, 0, 0, 47, 45, 1, 0, 0, 0, 48, 50, 3, 7, 3, 0, 49, 48, 1, 0, 0, 0, 49, 50, 1, 0, 0, 0, 50, 52, 1, 0, 0, 0, 51, 53, 5, 34, 0, 0, 52, 51, 1, 0, 0, 0, 52, 53, 1, 0, 0, 0, 53, 10, 1, 0, 0, 0, 54, 55, 7, 3, 0, 0, 55, 56, 7, 4, 0, 0, 56, 57, 7, 5, 0, 0, 57, 58, 7, 6, 0, 0, 58, 12, 1, 0, 0, 0, 59, 60, 7, 3, 0, 0, 60, 61, 7, 5, 0, 0, 61, 62, 7, 5, 0, 0, 62, 63, 7, 6, 0, 0, 63, 14, 1, 0, 0, 0, 64, 65, 7, 7, 0, 0, 65, 66, 7, 8, 0, 0, 66, 67, 7, 9, 0, 0, 67, 68, 7, 5, 0, 0, 68, 69, 7, 6, 0, 0, 69, 16, 1, 0, 0, 0, 70, 71, 7, 10, 0, 0, 71, 72, 7, 11, 0, 0, 72, 73, 7, 5, 0, 0, 73, 74, 7, 3, 0, 0, 74, 75, 7, 8, 0, 0, 75, 18, 1, 0, 0, 0, 8, 0, 22, 27, 34, 38, 45, 49, 52, 0]
|
[4, 0, 5, 46, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 1, 0, 4, 0, 13, 8, 0, 11, 0, 12, 0, 14, 1, 1, 4, 1, 18, 8, 1, 11, 1, 12, 1, 19, 1, 2, 1, 2, 1, 3, 4, 3, 25, 8, 3, 11, 3, 12, 3, 26, 1, 4, 1, 4, 3, 4, 31, 8, 4, 1, 4, 1, 4, 1, 4, 5, 4, 36, 8, 4, 10, 4, 12, 4, 39, 9, 4, 1, 4, 3, 4, 42, 8, 4, 1, 4, 3, 4, 45, 8, 4, 0, 0, 5, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 1, 0, 3, 1, 0, 48, 57, 2, 0, 9, 9, 32, 32, 3, 0, 9, 9, 32, 32, 35, 35, 52, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 1, 12, 1, 0, 0, 0, 3, 17, 1, 0, 0, 0, 5, 21, 1, 0, 0, 0, 7, 24, 1, 0, 0, 0, 9, 28, 1, 0, 0, 0, 11, 13, 7, 0, 0, 0, 12, 11, 1, 0, 0, 0, 13, 14, 1, 0, 0, 0, 14, 12, 1, 0, 0, 0, 14, 15, 1, 0, 0, 0, 15, 2, 1, 0, 0, 0, 16, 18, 7, 1, 0, 0, 17, 16, 1, 0, 0, 0, 18, 19, 1, 0, 0, 0, 19, 17, 1, 0, 0, 0, 19, 20, 1, 0, 0, 0, 20, 4, 1, 0, 0, 0, 21, 22, 5, 35, 0, 0, 22, 6, 1, 0, 0, 0, 23, 25, 8, 2, 0, 0, 24, 23, 1, 0, 0, 0, 25, 26, 1, 0, 0, 0, 26, 24, 1, 0, 0, 0, 26, 27, 1, 0, 0, 0, 27, 8, 1, 0, 0, 0, 28, 30, 5, 34, 0, 0, 29, 31, 3, 3, 1, 0, 30, 29, 1, 0, 0, 0, 30, 31, 1, 0, 0, 0, 31, 37, 1, 0, 0, 0, 32, 33, 3, 7, 3, 0, 33, 34, 3, 3, 1, 0, 34, 36, 1, 0, 0, 0, 35, 32, 1, 0, 0, 0, 36, 39, 1, 0, 0, 0, 37, 35, 1, 0, 0, 0, 37, 38, 1, 0, 0, 0, 38, 41, 1, 0, 0, 0, 39, 37, 1, 0, 0, 0, 40, 42, 3, 7, 3, 0, 41, 40, 1, 0, 0, 0, 41, 42, 1, 0, 0, 0, 42, 44, 1, 0, 0, 0, 43, 45, 5, 34, 0, 0, 44, 43, 1, 0, 0, 0, 44, 45, 1, 0, 0, 0, 45, 10, 1, 0, 0, 0, 8, 0, 14, 19, 26, 30, 37, 41, 44, 0]
|
@ -3,8 +3,4 @@ WHITESPACE=2
|
|||||||
HASH=3
|
HASH=3
|
||||||
STRING=4
|
STRING=4
|
||||||
QUOTED_STRING=5
|
QUOTED_STRING=5
|
||||||
ADFS=6
|
|
||||||
AFFS=7
|
|
||||||
BTRFS=8
|
|
||||||
EXFAT=9
|
|
||||||
'#'=3
|
'#'=3
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Code generated from Fstab.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
// Code generated from Fstab.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
package parser // Fstab
|
package parser // Fstab
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Code generated from Fstab.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
// Code generated from Fstab.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
package parser
|
package parser
|
||||||
|
|
||||||
@ -46,51 +46,34 @@ func fstablexerLexerInit() {
|
|||||||
"", "", "", "'#'",
|
"", "", "", "'#'",
|
||||||
}
|
}
|
||||||
staticData.SymbolicNames = []string{
|
staticData.SymbolicNames = []string{
|
||||||
"", "DIGITS", "WHITESPACE", "HASH", "STRING", "QUOTED_STRING", "ADFS",
|
"", "DIGITS", "WHITESPACE", "HASH", "STRING", "QUOTED_STRING",
|
||||||
"AFFS", "BTRFS", "EXFAT",
|
|
||||||
}
|
}
|
||||||
staticData.RuleNames = []string{
|
staticData.RuleNames = []string{
|
||||||
"DIGITS", "WHITESPACE", "HASH", "STRING", "QUOTED_STRING", "ADFS", "AFFS",
|
"DIGITS", "WHITESPACE", "HASH", "STRING", "QUOTED_STRING",
|
||||||
"BTRFS", "EXFAT",
|
|
||||||
}
|
}
|
||||||
staticData.PredictionContextCache = antlr.NewPredictionContextCache()
|
staticData.PredictionContextCache = antlr.NewPredictionContextCache()
|
||||||
staticData.serializedATN = []int32{
|
staticData.serializedATN = []int32{
|
||||||
4, 0, 9, 76, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2,
|
4, 0, 5, 46, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2,
|
||||||
4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 1, 0, 4, 0, 21,
|
4, 7, 4, 1, 0, 4, 0, 13, 8, 0, 11, 0, 12, 0, 14, 1, 1, 4, 1, 18, 8, 1,
|
||||||
8, 0, 11, 0, 12, 0, 22, 1, 1, 4, 1, 26, 8, 1, 11, 1, 12, 1, 27, 1, 2, 1,
|
11, 1, 12, 1, 19, 1, 2, 1, 2, 1, 3, 4, 3, 25, 8, 3, 11, 3, 12, 3, 26, 1,
|
||||||
2, 1, 3, 4, 3, 33, 8, 3, 11, 3, 12, 3, 34, 1, 4, 1, 4, 3, 4, 39, 8, 4,
|
4, 1, 4, 3, 4, 31, 8, 4, 1, 4, 1, 4, 1, 4, 5, 4, 36, 8, 4, 10, 4, 12, 4,
|
||||||
1, 4, 1, 4, 1, 4, 5, 4, 44, 8, 4, 10, 4, 12, 4, 47, 9, 4, 1, 4, 3, 4, 50,
|
39, 9, 4, 1, 4, 3, 4, 42, 8, 4, 1, 4, 3, 4, 45, 8, 4, 0, 0, 5, 1, 1, 3,
|
||||||
8, 4, 1, 4, 3, 4, 53, 8, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1,
|
2, 5, 3, 7, 4, 9, 5, 1, 0, 3, 1, 0, 48, 57, 2, 0, 9, 9, 32, 32, 3, 0, 9,
|
||||||
6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1,
|
9, 32, 32, 35, 35, 52, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0,
|
||||||
8, 1, 8, 1, 8, 0, 0, 9, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15,
|
0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 1, 12, 1, 0, 0, 0, 3, 17, 1,
|
||||||
8, 17, 9, 1, 0, 12, 1, 0, 48, 57, 2, 0, 9, 9, 32, 32, 3, 0, 9, 9, 32, 32,
|
0, 0, 0, 5, 21, 1, 0, 0, 0, 7, 24, 1, 0, 0, 0, 9, 28, 1, 0, 0, 0, 11, 13,
|
||||||
35, 35, 2, 0, 65, 65, 97, 97, 2, 0, 68, 68, 100, 100, 2, 0, 70, 70, 102,
|
7, 0, 0, 0, 12, 11, 1, 0, 0, 0, 13, 14, 1, 0, 0, 0, 14, 12, 1, 0, 0, 0,
|
||||||
102, 2, 0, 83, 83, 115, 115, 2, 0, 66, 66, 98, 98, 2, 0, 84, 84, 116, 116,
|
14, 15, 1, 0, 0, 0, 15, 2, 1, 0, 0, 0, 16, 18, 7, 1, 0, 0, 17, 16, 1, 0,
|
||||||
2, 0, 82, 82, 114, 114, 2, 0, 69, 69, 101, 101, 2, 0, 88, 88, 120, 120,
|
0, 0, 18, 19, 1, 0, 0, 0, 19, 17, 1, 0, 0, 0, 19, 20, 1, 0, 0, 0, 20, 4,
|
||||||
82, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0,
|
1, 0, 0, 0, 21, 22, 5, 35, 0, 0, 22, 6, 1, 0, 0, 0, 23, 25, 8, 2, 0, 0,
|
||||||
0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0,
|
24, 23, 1, 0, 0, 0, 25, 26, 1, 0, 0, 0, 26, 24, 1, 0, 0, 0, 26, 27, 1,
|
||||||
0, 0, 0, 17, 1, 0, 0, 0, 1, 20, 1, 0, 0, 0, 3, 25, 1, 0, 0, 0, 5, 29, 1,
|
0, 0, 0, 27, 8, 1, 0, 0, 0, 28, 30, 5, 34, 0, 0, 29, 31, 3, 3, 1, 0, 30,
|
||||||
0, 0, 0, 7, 32, 1, 0, 0, 0, 9, 36, 1, 0, 0, 0, 11, 54, 1, 0, 0, 0, 13,
|
29, 1, 0, 0, 0, 30, 31, 1, 0, 0, 0, 31, 37, 1, 0, 0, 0, 32, 33, 3, 7, 3,
|
||||||
59, 1, 0, 0, 0, 15, 64, 1, 0, 0, 0, 17, 70, 1, 0, 0, 0, 19, 21, 7, 0, 0,
|
0, 33, 34, 3, 3, 1, 0, 34, 36, 1, 0, 0, 0, 35, 32, 1, 0, 0, 0, 36, 39,
|
||||||
0, 20, 19, 1, 0, 0, 0, 21, 22, 1, 0, 0, 0, 22, 20, 1, 0, 0, 0, 22, 23,
|
1, 0, 0, 0, 37, 35, 1, 0, 0, 0, 37, 38, 1, 0, 0, 0, 38, 41, 1, 0, 0, 0,
|
||||||
1, 0, 0, 0, 23, 2, 1, 0, 0, 0, 24, 26, 7, 1, 0, 0, 25, 24, 1, 0, 0, 0,
|
39, 37, 1, 0, 0, 0, 40, 42, 3, 7, 3, 0, 41, 40, 1, 0, 0, 0, 41, 42, 1,
|
||||||
26, 27, 1, 0, 0, 0, 27, 25, 1, 0, 0, 0, 27, 28, 1, 0, 0, 0, 28, 4, 1, 0,
|
0, 0, 0, 42, 44, 1, 0, 0, 0, 43, 45, 5, 34, 0, 0, 44, 43, 1, 0, 0, 0, 44,
|
||||||
0, 0, 29, 30, 5, 35, 0, 0, 30, 6, 1, 0, 0, 0, 31, 33, 8, 2, 0, 0, 32, 31,
|
45, 1, 0, 0, 0, 45, 10, 1, 0, 0, 0, 8, 0, 14, 19, 26, 30, 37, 41, 44, 0,
|
||||||
1, 0, 0, 0, 33, 34, 1, 0, 0, 0, 34, 32, 1, 0, 0, 0, 34, 35, 1, 0, 0, 0,
|
|
||||||
35, 8, 1, 0, 0, 0, 36, 38, 5, 34, 0, 0, 37, 39, 3, 3, 1, 0, 38, 37, 1,
|
|
||||||
0, 0, 0, 38, 39, 1, 0, 0, 0, 39, 45, 1, 0, 0, 0, 40, 41, 3, 7, 3, 0, 41,
|
|
||||||
42, 3, 3, 1, 0, 42, 44, 1, 0, 0, 0, 43, 40, 1, 0, 0, 0, 44, 47, 1, 0, 0,
|
|
||||||
0, 45, 43, 1, 0, 0, 0, 45, 46, 1, 0, 0, 0, 46, 49, 1, 0, 0, 0, 47, 45,
|
|
||||||
1, 0, 0, 0, 48, 50, 3, 7, 3, 0, 49, 48, 1, 0, 0, 0, 49, 50, 1, 0, 0, 0,
|
|
||||||
50, 52, 1, 0, 0, 0, 51, 53, 5, 34, 0, 0, 52, 51, 1, 0, 0, 0, 52, 53, 1,
|
|
||||||
0, 0, 0, 53, 10, 1, 0, 0, 0, 54, 55, 7, 3, 0, 0, 55, 56, 7, 4, 0, 0, 56,
|
|
||||||
57, 7, 5, 0, 0, 57, 58, 7, 6, 0, 0, 58, 12, 1, 0, 0, 0, 59, 60, 7, 3, 0,
|
|
||||||
0, 60, 61, 7, 5, 0, 0, 61, 62, 7, 5, 0, 0, 62, 63, 7, 6, 0, 0, 63, 14,
|
|
||||||
1, 0, 0, 0, 64, 65, 7, 7, 0, 0, 65, 66, 7, 8, 0, 0, 66, 67, 7, 9, 0, 0,
|
|
||||||
67, 68, 7, 5, 0, 0, 68, 69, 7, 6, 0, 0, 69, 16, 1, 0, 0, 0, 70, 71, 7,
|
|
||||||
10, 0, 0, 71, 72, 7, 11, 0, 0, 72, 73, 7, 5, 0, 0, 73, 74, 7, 3, 0, 0,
|
|
||||||
74, 75, 7, 8, 0, 0, 75, 18, 1, 0, 0, 0, 8, 0, 22, 27, 34, 38, 45, 49, 52,
|
|
||||||
0,
|
|
||||||
}
|
}
|
||||||
deserializer := antlr.NewATNDeserializer(nil)
|
deserializer := antlr.NewATNDeserializer(nil)
|
||||||
staticData.atn = deserializer.Deserialize(staticData.serializedATN)
|
staticData.atn = deserializer.Deserialize(staticData.serializedATN)
|
||||||
@ -136,8 +119,4 @@ const (
|
|||||||
FstabLexerHASH = 3
|
FstabLexerHASH = 3
|
||||||
FstabLexerSTRING = 4
|
FstabLexerSTRING = 4
|
||||||
FstabLexerQUOTED_STRING = 5
|
FstabLexerQUOTED_STRING = 5
|
||||||
FstabLexerADFS = 6
|
|
||||||
FstabLexerAFFS = 7
|
|
||||||
FstabLexerBTRFS = 8
|
|
||||||
FstabLexerEXFAT = 9
|
|
||||||
)
|
)
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Code generated from Fstab.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
// Code generated from Fstab.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
package parser // Fstab
|
package parser // Fstab
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Code generated from Fstab.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
// Code generated from Fstab.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
package parser // Fstab
|
package parser // Fstab
|
||||||
|
|
||||||
@ -36,8 +36,7 @@ func fstabParserInit() {
|
|||||||
"", "", "", "'#'",
|
"", "", "", "'#'",
|
||||||
}
|
}
|
||||||
staticData.SymbolicNames = []string{
|
staticData.SymbolicNames = []string{
|
||||||
"", "DIGITS", "WHITESPACE", "HASH", "STRING", "QUOTED_STRING", "ADFS",
|
"", "DIGITS", "WHITESPACE", "HASH", "STRING", "QUOTED_STRING",
|
||||||
"AFFS", "BTRFS", "EXFAT",
|
|
||||||
}
|
}
|
||||||
staticData.RuleNames = []string{
|
staticData.RuleNames = []string{
|
||||||
"entry", "spec", "mountPoint", "fileSystem", "mountOptions", "freq",
|
"entry", "spec", "mountPoint", "fileSystem", "mountOptions", "freq",
|
||||||
@ -45,35 +44,35 @@ func fstabParserInit() {
|
|||||||
}
|
}
|
||||||
staticData.PredictionContextCache = antlr.NewPredictionContextCache()
|
staticData.PredictionContextCache = antlr.NewPredictionContextCache()
|
||||||
staticData.serializedATN = []int32{
|
staticData.serializedATN = []int32{
|
||||||
4, 1, 9, 68, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4,
|
4, 1, 5, 68, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4,
|
||||||
2, 5, 7, 5, 2, 6, 7, 6, 1, 0, 3, 0, 16, 8, 0, 1, 0, 3, 0, 19, 8, 0, 1,
|
2, 5, 7, 5, 2, 6, 7, 6, 1, 0, 3, 0, 16, 8, 0, 1, 0, 3, 0, 19, 8, 0, 1,
|
||||||
0, 3, 0, 22, 8, 0, 1, 0, 3, 0, 25, 8, 0, 1, 0, 3, 0, 28, 8, 0, 1, 0, 3,
|
0, 3, 0, 22, 8, 0, 1, 0, 3, 0, 25, 8, 0, 1, 0, 3, 0, 28, 8, 0, 1, 0, 3,
|
||||||
0, 31, 8, 0, 1, 0, 3, 0, 34, 8, 0, 1, 0, 3, 0, 37, 8, 0, 1, 0, 3, 0, 40,
|
0, 31, 8, 0, 1, 0, 3, 0, 34, 8, 0, 1, 0, 3, 0, 37, 8, 0, 1, 0, 3, 0, 40,
|
||||||
8, 0, 1, 0, 3, 0, 43, 8, 0, 1, 0, 3, 0, 46, 8, 0, 1, 0, 3, 0, 49, 8, 0,
|
8, 0, 1, 0, 3, 0, 43, 8, 0, 1, 0, 3, 0, 46, 8, 0, 1, 0, 3, 0, 49, 8, 0,
|
||||||
1, 0, 3, 0, 52, 8, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1,
|
1, 0, 3, 0, 52, 8, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1,
|
||||||
4, 1, 4, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 0, 0, 7, 0, 2, 4, 6, 8, 10, 12,
|
4, 1, 4, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 0, 0, 7, 0, 2, 4, 6, 8, 10, 12,
|
||||||
0, 2, 1, 0, 4, 5, 1, 0, 4, 9, 73, 0, 15, 1, 0, 0, 0, 2, 55, 1, 0, 0, 0,
|
0, 1, 1, 0, 4, 5, 73, 0, 15, 1, 0, 0, 0, 2, 55, 1, 0, 0, 0, 4, 57, 1, 0,
|
||||||
4, 57, 1, 0, 0, 0, 6, 59, 1, 0, 0, 0, 8, 61, 1, 0, 0, 0, 10, 63, 1, 0,
|
0, 0, 6, 59, 1, 0, 0, 0, 8, 61, 1, 0, 0, 0, 10, 63, 1, 0, 0, 0, 12, 65,
|
||||||
0, 0, 12, 65, 1, 0, 0, 0, 14, 16, 5, 2, 0, 0, 15, 14, 1, 0, 0, 0, 15, 16,
|
1, 0, 0, 0, 14, 16, 5, 2, 0, 0, 15, 14, 1, 0, 0, 0, 15, 16, 1, 0, 0, 0,
|
||||||
1, 0, 0, 0, 16, 18, 1, 0, 0, 0, 17, 19, 3, 2, 1, 0, 18, 17, 1, 0, 0, 0,
|
16, 18, 1, 0, 0, 0, 17, 19, 3, 2, 1, 0, 18, 17, 1, 0, 0, 0, 18, 19, 1,
|
||||||
18, 19, 1, 0, 0, 0, 19, 21, 1, 0, 0, 0, 20, 22, 5, 2, 0, 0, 21, 20, 1,
|
0, 0, 0, 19, 21, 1, 0, 0, 0, 20, 22, 5, 2, 0, 0, 21, 20, 1, 0, 0, 0, 21,
|
||||||
0, 0, 0, 21, 22, 1, 0, 0, 0, 22, 24, 1, 0, 0, 0, 23, 25, 3, 4, 2, 0, 24,
|
22, 1, 0, 0, 0, 22, 24, 1, 0, 0, 0, 23, 25, 3, 4, 2, 0, 24, 23, 1, 0, 0,
|
||||||
23, 1, 0, 0, 0, 24, 25, 1, 0, 0, 0, 25, 27, 1, 0, 0, 0, 26, 28, 5, 2, 0,
|
0, 24, 25, 1, 0, 0, 0, 25, 27, 1, 0, 0, 0, 26, 28, 5, 2, 0, 0, 27, 26,
|
||||||
0, 27, 26, 1, 0, 0, 0, 27, 28, 1, 0, 0, 0, 28, 30, 1, 0, 0, 0, 29, 31,
|
1, 0, 0, 0, 27, 28, 1, 0, 0, 0, 28, 30, 1, 0, 0, 0, 29, 31, 3, 6, 3, 0,
|
||||||
3, 6, 3, 0, 30, 29, 1, 0, 0, 0, 30, 31, 1, 0, 0, 0, 31, 33, 1, 0, 0, 0,
|
30, 29, 1, 0, 0, 0, 30, 31, 1, 0, 0, 0, 31, 33, 1, 0, 0, 0, 32, 34, 5,
|
||||||
32, 34, 5, 2, 0, 0, 33, 32, 1, 0, 0, 0, 33, 34, 1, 0, 0, 0, 34, 36, 1,
|
2, 0, 0, 33, 32, 1, 0, 0, 0, 33, 34, 1, 0, 0, 0, 34, 36, 1, 0, 0, 0, 35,
|
||||||
0, 0, 0, 35, 37, 3, 8, 4, 0, 36, 35, 1, 0, 0, 0, 36, 37, 1, 0, 0, 0, 37,
|
37, 3, 8, 4, 0, 36, 35, 1, 0, 0, 0, 36, 37, 1, 0, 0, 0, 37, 39, 1, 0, 0,
|
||||||
39, 1, 0, 0, 0, 38, 40, 5, 2, 0, 0, 39, 38, 1, 0, 0, 0, 39, 40, 1, 0, 0,
|
0, 38, 40, 5, 2, 0, 0, 39, 38, 1, 0, 0, 0, 39, 40, 1, 0, 0, 0, 40, 42,
|
||||||
0, 40, 42, 1, 0, 0, 0, 41, 43, 3, 10, 5, 0, 42, 41, 1, 0, 0, 0, 42, 43,
|
1, 0, 0, 0, 41, 43, 3, 10, 5, 0, 42, 41, 1, 0, 0, 0, 42, 43, 1, 0, 0, 0,
|
||||||
1, 0, 0, 0, 43, 45, 1, 0, 0, 0, 44, 46, 5, 2, 0, 0, 45, 44, 1, 0, 0, 0,
|
43, 45, 1, 0, 0, 0, 44, 46, 5, 2, 0, 0, 45, 44, 1, 0, 0, 0, 45, 46, 1,
|
||||||
45, 46, 1, 0, 0, 0, 46, 48, 1, 0, 0, 0, 47, 49, 3, 12, 6, 0, 48, 47, 1,
|
0, 0, 0, 46, 48, 1, 0, 0, 0, 47, 49, 3, 12, 6, 0, 48, 47, 1, 0, 0, 0, 48,
|
||||||
0, 0, 0, 48, 49, 1, 0, 0, 0, 49, 51, 1, 0, 0, 0, 50, 52, 5, 2, 0, 0, 51,
|
49, 1, 0, 0, 0, 49, 51, 1, 0, 0, 0, 50, 52, 5, 2, 0, 0, 51, 50, 1, 0, 0,
|
||||||
50, 1, 0, 0, 0, 51, 52, 1, 0, 0, 0, 52, 53, 1, 0, 0, 0, 53, 54, 5, 0, 0,
|
0, 51, 52, 1, 0, 0, 0, 52, 53, 1, 0, 0, 0, 53, 54, 5, 0, 0, 1, 54, 1, 1,
|
||||||
1, 54, 1, 1, 0, 0, 0, 55, 56, 7, 0, 0, 0, 56, 3, 1, 0, 0, 0, 57, 58, 7,
|
0, 0, 0, 55, 56, 7, 0, 0, 0, 56, 3, 1, 0, 0, 0, 57, 58, 7, 0, 0, 0, 58,
|
||||||
0, 0, 0, 58, 5, 1, 0, 0, 0, 59, 60, 7, 1, 0, 0, 60, 7, 1, 0, 0, 0, 61,
|
5, 1, 0, 0, 0, 59, 60, 7, 0, 0, 0, 60, 7, 1, 0, 0, 0, 61, 62, 7, 0, 0,
|
||||||
62, 7, 0, 0, 0, 62, 9, 1, 0, 0, 0, 63, 64, 5, 1, 0, 0, 64, 11, 1, 0, 0,
|
0, 62, 9, 1, 0, 0, 0, 63, 64, 5, 1, 0, 0, 64, 11, 1, 0, 0, 0, 65, 66, 5,
|
||||||
0, 65, 66, 5, 1, 0, 0, 66, 13, 1, 0, 0, 0, 13, 15, 18, 21, 24, 27, 30,
|
1, 0, 0, 66, 13, 1, 0, 0, 0, 13, 15, 18, 21, 24, 27, 30, 33, 36, 39, 42,
|
||||||
33, 36, 39, 42, 45, 48, 51,
|
45, 48, 51,
|
||||||
}
|
}
|
||||||
deserializer := antlr.NewATNDeserializer(nil)
|
deserializer := antlr.NewATNDeserializer(nil)
|
||||||
staticData.atn = deserializer.Deserialize(staticData.serializedATN)
|
staticData.atn = deserializer.Deserialize(staticData.serializedATN)
|
||||||
@ -117,10 +116,6 @@ const (
|
|||||||
FstabParserHASH = 3
|
FstabParserHASH = 3
|
||||||
FstabParserSTRING = 4
|
FstabParserSTRING = 4
|
||||||
FstabParserQUOTED_STRING = 5
|
FstabParserQUOTED_STRING = 5
|
||||||
FstabParserADFS = 6
|
|
||||||
FstabParserAFFS = 7
|
|
||||||
FstabParserBTRFS = 8
|
|
||||||
FstabParserEXFAT = 9
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// FstabParser rules.
|
// FstabParser rules.
|
||||||
@ -754,10 +749,6 @@ type IFileSystemContext interface {
|
|||||||
GetParser() antlr.Parser
|
GetParser() antlr.Parser
|
||||||
|
|
||||||
// Getter signatures
|
// Getter signatures
|
||||||
ADFS() antlr.TerminalNode
|
|
||||||
AFFS() antlr.TerminalNode
|
|
||||||
BTRFS() antlr.TerminalNode
|
|
||||||
EXFAT() antlr.TerminalNode
|
|
||||||
STRING() antlr.TerminalNode
|
STRING() antlr.TerminalNode
|
||||||
QUOTED_STRING() antlr.TerminalNode
|
QUOTED_STRING() antlr.TerminalNode
|
||||||
|
|
||||||
@ -797,22 +788,6 @@ func NewFileSystemContext(parser antlr.Parser, parent antlr.ParserRuleContext, i
|
|||||||
|
|
||||||
func (s *FileSystemContext) GetParser() antlr.Parser { return s.parser }
|
func (s *FileSystemContext) GetParser() antlr.Parser { return s.parser }
|
||||||
|
|
||||||
func (s *FileSystemContext) ADFS() antlr.TerminalNode {
|
|
||||||
return s.GetToken(FstabParserADFS, 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *FileSystemContext) AFFS() antlr.TerminalNode {
|
|
||||||
return s.GetToken(FstabParserAFFS, 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *FileSystemContext) BTRFS() antlr.TerminalNode {
|
|
||||||
return s.GetToken(FstabParserBTRFS, 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *FileSystemContext) EXFAT() antlr.TerminalNode {
|
|
||||||
return s.GetToken(FstabParserEXFAT, 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *FileSystemContext) STRING() antlr.TerminalNode {
|
func (s *FileSystemContext) STRING() antlr.TerminalNode {
|
||||||
return s.GetToken(FstabParserSTRING, 0)
|
return s.GetToken(FstabParserSTRING, 0)
|
||||||
}
|
}
|
||||||
@ -851,7 +826,7 @@ func (p *FstabParser) FileSystem() (localctx IFileSystemContext) {
|
|||||||
p.SetState(59)
|
p.SetState(59)
|
||||||
_la = p.GetTokenStream().LA(1)
|
_la = p.GetTokenStream().LA(1)
|
||||||
|
|
||||||
if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&1008) != 0) {
|
if !(_la == FstabParserSTRING || _la == FstabParserQUOTED_STRING) {
|
||||||
p.GetErrorHandler().RecoverInline(p)
|
p.GetErrorHandler().RecoverInline(p)
|
||||||
} else {
|
} else {
|
||||||
p.GetErrorHandler().ReportMatch(p)
|
p.GetErrorHandler().ReportMatch(p)
|
||||||
|
@ -6,6 +6,31 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func createMountOptionField(
|
||||||
|
options []docvalues.EnumString,
|
||||||
|
assignOption map[docvalues.EnumString]docvalues.DeprecatedValue,
|
||||||
|
) docvalues.DeprecatedValue {
|
||||||
|
// dynamicOptions := docvalues.MergeKeyEnumAssignmentMaps(defaultAssignOptions, assignOption)
|
||||||
|
|
||||||
|
return docvalues.ArrayValue{
|
||||||
|
Separator: ",",
|
||||||
|
DuplicatesExtractor: &MountOptionsExtractor,
|
||||||
|
SubValue: docvalues.OrValue{
|
||||||
|
Values: []docvalues.DeprecatedValue{
|
||||||
|
docvalues.KeyEnumAssignmentValue{
|
||||||
|
Values: assignOption,
|
||||||
|
ValueIsOptional: false,
|
||||||
|
Separator: "=",
|
||||||
|
},
|
||||||
|
docvalues.EnumValue{
|
||||||
|
EnforceValues: true,
|
||||||
|
Values: options,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
var MountOptionsExtractor = func(value string) string {
|
var MountOptionsExtractor = func(value string) string {
|
||||||
separatorIndex := strings.Index(value, "=")
|
separatorIndex := strings.Index(value, "=")
|
||||||
|
|
||||||
@ -339,31 +364,6 @@ Added in version 233.`,
|
|||||||
): docvalues.StringValue{},
|
): docvalues.StringValue{},
|
||||||
}
|
}
|
||||||
|
|
||||||
func createMountOptionField(
|
|
||||||
options []docvalues.EnumString,
|
|
||||||
assignOption map[docvalues.EnumString]docvalues.DeprecatedValue,
|
|
||||||
) docvalues.DeprecatedValue {
|
|
||||||
// dynamicOptions := docvalues.MergeKeyEnumAssignmentMaps(defaultAssignOptions, assignOption)
|
|
||||||
|
|
||||||
return docvalues.ArrayValue{
|
|
||||||
Separator: ",",
|
|
||||||
DuplicatesExtractor: &MountOptionsExtractor,
|
|
||||||
SubValue: docvalues.OrValue{
|
|
||||||
Values: []docvalues.DeprecatedValue{
|
|
||||||
docvalues.KeyEnumAssignmentValue{
|
|
||||||
Values: assignOption,
|
|
||||||
ValueIsOptional: false,
|
|
||||||
Separator: "=",
|
|
||||||
},
|
|
||||||
docvalues.EnumValue{
|
|
||||||
EnforceValues: true,
|
|
||||||
Values: options,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type optionField struct {
|
type optionField struct {
|
||||||
Assignable map[docvalues.EnumString]docvalues.DeprecatedValue
|
Assignable map[docvalues.EnumString]docvalues.DeprecatedValue
|
||||||
Enums []docvalues.EnumString
|
Enums []docvalues.EnumString
|
||||||
@ -376,6 +376,10 @@ var MountOptionsMapField = map[string]optionField{
|
|||||||
Enums: commondocumentation.AdfsDocumentationEnums,
|
Enums: commondocumentation.AdfsDocumentationEnums,
|
||||||
Assignable: commondocumentation.AdfsDocumentationAssignable,
|
Assignable: commondocumentation.AdfsDocumentationAssignable,
|
||||||
},
|
},
|
||||||
|
"apfs": {
|
||||||
|
Enums: commondocumentation.APFSDocumentationEnums,
|
||||||
|
Assignable: commondocumentation.APFSDocumentationAssignable,
|
||||||
|
},
|
||||||
"affs": {
|
"affs": {
|
||||||
Enums: commondocumentation.AffsDocumentationEnums,
|
Enums: commondocumentation.AffsDocumentationEnums,
|
||||||
Assignable: commondocumentation.AffsDocumentationAssignable,
|
Assignable: commondocumentation.AffsDocumentationAssignable,
|
||||||
@ -478,4 +482,8 @@ var MountOptionsMapField = map[string]optionField{
|
|||||||
Enums: commondocumentation.VfatDocumentationEnums,
|
Enums: commondocumentation.VfatDocumentationEnums,
|
||||||
Assignable: commondocumentation.VfatDocumentationAssignable,
|
Assignable: commondocumentation.VfatDocumentationAssignable,
|
||||||
},
|
},
|
||||||
|
"bcachefs": {
|
||||||
|
Enums: commondocumentation.BcacheFSDocumentationEnums,
|
||||||
|
Assignable: commondocumentation.BcacheFSDocumentationAssignable,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
@ -16,7 +16,8 @@ var LabelField = docvalues.RegexValue{
|
|||||||
var SpecField = docvalues.OrValue{
|
var SpecField = docvalues.OrValue{
|
||||||
Values: []docvalues.DeprecatedValue{
|
Values: []docvalues.DeprecatedValue{
|
||||||
docvalues.PathValue{
|
docvalues.PathValue{
|
||||||
RequiredType: docvalues.PathTypeExistenceOptional,
|
IsOptional: false,
|
||||||
|
RequiredType: docvalues.PathTypeFile,
|
||||||
},
|
},
|
||||||
docvalues.KeyEnumAssignmentValue{
|
docvalues.KeyEnumAssignmentValue{
|
||||||
Separator: "=",
|
Separator: "=",
|
||||||
|
@ -44,39 +44,29 @@ func GetCompletion(
|
|||||||
fileSystemType := entry.Fields.FilesystemType.Value.Value
|
fileSystemType := entry.Fields.FilesystemType.Value.Value
|
||||||
completions := make([]protocol.CompletionItem, 0, 50)
|
completions := make([]protocol.CompletionItem, 0, 50)
|
||||||
|
|
||||||
for _, completion := range fields.DefaultMountOptionsField.DeprecatedFetchCompletions(line, cursor) {
|
optionsValue := entry.FetchMountOptionsField(false)
|
||||||
var documentation string
|
|
||||||
|
|
||||||
switch completion.Documentation.(type) {
|
if optionsValue != nil {
|
||||||
case string:
|
for _, completion := range optionsValue.DeprecatedFetchCompletions(line, cursor) {
|
||||||
documentation = completion.Documentation.(string)
|
var documentation string
|
||||||
case *string:
|
|
||||||
documentation = *completion.Documentation.(*string)
|
|
||||||
}
|
|
||||||
|
|
||||||
completion.Documentation = protocol.MarkupContent{
|
switch completion.Documentation.(type) {
|
||||||
Kind: protocol.MarkupKindMarkdown,
|
case string:
|
||||||
Value: documentation + "\n\n" + "From: _Default Mount Options_",
|
documentation = completion.Documentation.(string)
|
||||||
|
case *string:
|
||||||
|
documentation = *completion.Documentation.(*string)
|
||||||
|
}
|
||||||
|
|
||||||
|
completion.Documentation = protocol.MarkupContent{
|
||||||
|
Kind: protocol.MarkupKindMarkdown,
|
||||||
|
Value: documentation + "\n\n" + fmt.Sprintf("From: _%s_", fileSystemType),
|
||||||
|
}
|
||||||
|
completions = append(completions, completion)
|
||||||
}
|
}
|
||||||
completions = append(completions, completion)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, completion := range entry.FetchMountOptionsField(false).DeprecatedFetchCompletions(line, cursor) {
|
// Add defaults
|
||||||
var documentation string
|
completions = append(completions, fields.DefaultMountOptionsField.DeprecatedFetchCompletions(line, cursor)...)
|
||||||
|
|
||||||
switch completion.Documentation.(type) {
|
|
||||||
case string:
|
|
||||||
documentation = completion.Documentation.(string)
|
|
||||||
case *string:
|
|
||||||
documentation = *completion.Documentation.(*string)
|
|
||||||
}
|
|
||||||
|
|
||||||
completion.Documentation = protocol.MarkupContent{
|
|
||||||
Kind: protocol.MarkupKindMarkdown,
|
|
||||||
Value: documentation + "\n\n" + fmt.Sprintf("From: _%s_", fileSystemType),
|
|
||||||
}
|
|
||||||
completions = append(completions, completion)
|
|
||||||
}
|
|
||||||
|
|
||||||
return completions, nil
|
return completions, nil
|
||||||
case ast.FstabFieldFreq:
|
case ast.FstabFieldFreq:
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Code generated from Hosts.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
// Code generated from Hosts.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
package parser // Hosts
|
package parser // Hosts
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Code generated from Hosts.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
// Code generated from Hosts.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
package parser
|
package parser
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Code generated from Hosts.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
// Code generated from Hosts.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
package parser // Hosts
|
package parser // Hosts
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Code generated from Hosts.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
// Code generated from Hosts.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
package parser // Hosts
|
package parser // Hosts
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Code generated from Config.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
// Code generated from Config.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
package parser // Config
|
package parser // Config
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Code generated from Config.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
// Code generated from Config.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
package parser
|
package parser
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Code generated from Config.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
// Code generated from Config.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
package parser // Config
|
package parser // Config
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Code generated from Config.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
// Code generated from Config.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
package parser // Config
|
package parser // Config
|
||||||
|
|
||||||
|
@ -139,6 +139,7 @@ rsa-sha2-512,rsa-sha2-256
|
|||||||
Arguments to CertificateFile may use the tilde syntax to refer to a user's home directory, the tokens described in the TOKENS section and environment variables as described in the ENVIRONMENT VARIABLES section.
|
Arguments to CertificateFile may use the tilde syntax to refer to a user's home directory, the tokens described in the TOKENS section and environment variables as described in the ENVIRONMENT VARIABLES section.
|
||||||
It is possible to have multiple certificate files specified in configuration files; these certificates will be tried in sequence. Multiple CertificateFile directives will add to the list of certificates used for authentication.`,
|
It is possible to have multiple certificate files specified in configuration files; these certificates will be tried in sequence. Multiple CertificateFile directives will add to the list of certificates used for authentication.`,
|
||||||
Value: docvalues.PathValue{
|
Value: docvalues.PathValue{
|
||||||
|
IsOptional: true,
|
||||||
RequiredType: docvalues.PathTypeFile,
|
RequiredType: docvalues.PathTypeFile,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -366,6 +367,7 @@ aes128-gcm@openssh.com,aes256-gcm@openssh.com
|
|||||||
DuplicatesExtractor: &docvalues.SimpleDuplicatesExtractor,
|
DuplicatesExtractor: &docvalues.SimpleDuplicatesExtractor,
|
||||||
RespectQuotes: true,
|
RespectQuotes: true,
|
||||||
SubValue: docvalues.PathValue{
|
SubValue: docvalues.PathValue{
|
||||||
|
IsOptional: true,
|
||||||
RequiredType: docvalues.PathTypeFile,
|
RequiredType: docvalues.PathTypeFile,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -834,6 +836,7 @@ rsa-sha2-512,rsa-sha2-256
|
|||||||
Documentation: `Specifies a path to a library that will be used when loading any FIDO authenticator-hosted keys, overriding the default of using the built-in USB HID support.
|
Documentation: `Specifies a path to a library that will be used when loading any FIDO authenticator-hosted keys, overriding the default of using the built-in USB HID support.
|
||||||
If the specified value begins with a ‘$’ character, then it will be treated as an environment variable containing the path to the library.`,
|
If the specified value begins with a ‘$’ character, then it will be treated as an environment variable containing the path to the library.`,
|
||||||
Value: docvalues.PathValue{
|
Value: docvalues.PathValue{
|
||||||
|
IsOptional: false,
|
||||||
RequiredType: docvalues.PathTypeFile,
|
RequiredType: docvalues.PathTypeFile,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -963,6 +966,7 @@ rsa-sha2-512,rsa-sha2-256
|
|||||||
DuplicatesExtractor: &docvalues.SimpleDuplicatesExtractor,
|
DuplicatesExtractor: &docvalues.SimpleDuplicatesExtractor,
|
||||||
RespectQuotes: true,
|
RespectQuotes: true,
|
||||||
SubValue: docvalues.PathValue{
|
SubValue: docvalues.PathValue{
|
||||||
|
IsOptional: true,
|
||||||
RequiredType: docvalues.PathTypeFile,
|
RequiredType: docvalues.PathTypeFile,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -986,6 +990,7 @@ rsa-sha2-512,rsa-sha2-256
|
|||||||
"xauthlocation": {
|
"xauthlocation": {
|
||||||
Documentation: `Specifies the full pathname of the xauth(1) program. The default is /usr/X11R6/bin/xauth.`,
|
Documentation: `Specifies the full pathname of the xauth(1) program. The default is /usr/X11R6/bin/xauth.`,
|
||||||
Value: docvalues.PathValue{
|
Value: docvalues.PathValue{
|
||||||
|
IsOptional: false,
|
||||||
RequiredType: docvalues.PathTypeFile,
|
RequiredType: docvalues.PathTypeFile,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -5,9 +5,9 @@ import (
|
|||||||
sshconfig "config-lsp/handlers/ssh_config"
|
sshconfig "config-lsp/handlers/ssh_config"
|
||||||
"config-lsp/handlers/ssh_config/diagnostics"
|
"config-lsp/handlers/ssh_config/diagnostics"
|
||||||
"config-lsp/handlers/ssh_config/fields"
|
"config-lsp/handlers/ssh_config/fields"
|
||||||
|
"config-lsp/utils"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"github.com/hbollon/go-edlib"
|
|
||||||
protocol "github.com/tliron/glsp/protocol_3_16"
|
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -24,7 +24,8 @@ func getKeywordTypoFixes(
|
|||||||
if typoOption, found := d.Indexes.UnknownOptions[line]; found {
|
if typoOption, found := d.Indexes.UnknownOptions[line]; found {
|
||||||
name := typoOption.Option.Key.Value.Value
|
name := typoOption.Option.Key.Value.Value
|
||||||
|
|
||||||
suggestedOptions := findSimilarOptions(name)
|
opts := utils.KeysOfMap(fields.Options)
|
||||||
|
suggestedOptions := common.FindSimilarItems(fields.CreateNormalizedName(name), opts)
|
||||||
|
|
||||||
actions := make([]protocol.CodeAction, 0, len(suggestedOptions))
|
actions := make([]protocol.CodeAction, 0, len(suggestedOptions))
|
||||||
|
|
||||||
@ -61,43 +62,3 @@ func getKeywordTypoFixes(
|
|||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Find options that are similar to the given option name.
|
|
||||||
// This is used to find typos & suggest the correct option name.
|
|
||||||
// Once an option is found that has a Damerau-Levenshtein distance of 1, it is immediately returned.
|
|
||||||
// If not, then the next 2 options of similarity 2, or 3 options of similarity 3 are returned.
|
|
||||||
// If no options with similarity <= 3 are found, then an empty slice is returned.
|
|
||||||
func findSimilarOptions(
|
|
||||||
optionName string,
|
|
||||||
) []fields.NormalizedOptionName {
|
|
||||||
normalizedOptionName := string(fields.CreateNormalizedName(optionName))
|
|
||||||
|
|
||||||
optionsPerSimilarity := map[uint8][]fields.NormalizedOptionName{
|
|
||||||
2: make([]fields.NormalizedOptionName, 0, 2),
|
|
||||||
3: make([]fields.NormalizedOptionName, 0, 3),
|
|
||||||
}
|
|
||||||
|
|
||||||
for name := range fields.Options {
|
|
||||||
normalizedName := string(name)
|
|
||||||
similarity := edlib.DamerauLevenshteinDistance(normalizedName, normalizedOptionName)
|
|
||||||
|
|
||||||
switch similarity {
|
|
||||||
case 1:
|
|
||||||
return []fields.NormalizedOptionName{name}
|
|
||||||
case 2:
|
|
||||||
optionsPerSimilarity[2] = append(optionsPerSimilarity[2], name)
|
|
||||||
|
|
||||||
if len(optionsPerSimilarity[2]) >= 2 {
|
|
||||||
return optionsPerSimilarity[2]
|
|
||||||
}
|
|
||||||
case 3:
|
|
||||||
optionsPerSimilarity[3] = append(optionsPerSimilarity[3], name)
|
|
||||||
|
|
||||||
if len(optionsPerSimilarity[3]) >= 3 {
|
|
||||||
return optionsPerSimilarity[3]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return append(optionsPerSimilarity[2], optionsPerSimilarity[3]...)
|
|
||||||
}
|
|
||||||
|
@ -20,7 +20,6 @@ func TextDocumentDidChange(
|
|||||||
document := sshconfig.DocumentParserMap[params.TextDocument.URI]
|
document := sshconfig.DocumentParserMap[params.TextDocument.URI]
|
||||||
document.Config.Clear()
|
document.Config.Clear()
|
||||||
|
|
||||||
println("reparsing everything")
|
|
||||||
diagnostics := make([]protocol.Diagnostic, 0)
|
diagnostics := make([]protocol.Diagnostic, 0)
|
||||||
errors := document.Config.Parse(content)
|
errors := document.Config.Parse(content)
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Code generated from Match.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
// Code generated from Match.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
package parser // Match
|
package parser // Match
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Code generated from Match.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
// Code generated from Match.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
package parser
|
package parser
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Code generated from Match.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
// Code generated from Match.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
package parser // Match
|
package parser // Match
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Code generated from Match.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
// Code generated from Match.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
package parser // Match
|
package parser // Match
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Code generated from Config.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
// Code generated from Config.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
package parser // Config
|
package parser // Config
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Code generated from Config.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
// Code generated from Config.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
package parser
|
package parser
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Code generated from Config.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
// Code generated from Config.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
package parser // Config
|
package parser // Config
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Code generated from Config.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
// Code generated from Config.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
package parser // Config
|
package parser // Config
|
||||||
|
|
||||||
|
@ -83,31 +83,36 @@ See PATTERNS in ssh_config(5) for more information on patterns. This keyword may
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
docvalues.ArrayValue{
|
docvalues.ArrayValue{
|
||||||
Separator: ",",
|
Separator: " ",
|
||||||
DuplicatesExtractor: &docvalues.SimpleDuplicatesExtractor,
|
DuplicatesExtractor: nil,
|
||||||
RespectQuotes: true,
|
RespectQuotes: true,
|
||||||
SubValue: docvalues.EnumValue{
|
SubValue: docvalues.ArrayValue{
|
||||||
EnforceValues: true,
|
Separator: ",",
|
||||||
Values: []docvalues.EnumString{
|
DuplicatesExtractor: &docvalues.SimpleDuplicatesExtractor,
|
||||||
docvalues.CreateEnumString("none"),
|
RespectQuotes: true,
|
||||||
|
SubValue: docvalues.EnumValue{
|
||||||
|
EnforceValues: true,
|
||||||
|
Values: []docvalues.EnumString{
|
||||||
|
docvalues.CreateEnumString("none"),
|
||||||
|
|
||||||
docvalues.CreateEnumString("password"),
|
docvalues.CreateEnumString("password"),
|
||||||
docvalues.CreateEnumString("publickey"),
|
docvalues.CreateEnumString("publickey"),
|
||||||
docvalues.CreateEnumString("gssapi-with-mic"),
|
docvalues.CreateEnumString("gssapi-with-mic"),
|
||||||
docvalues.CreateEnumString("keyboard-interactive"),
|
docvalues.CreateEnumString("keyboard-interactive"),
|
||||||
docvalues.CreateEnumString("hostbased"),
|
docvalues.CreateEnumString("hostbased"),
|
||||||
|
|
||||||
docvalues.CreateEnumString("password:bsdauth"),
|
docvalues.CreateEnumString("password:bsdauth"),
|
||||||
docvalues.CreateEnumString("publickey:bsdauth"),
|
docvalues.CreateEnumString("publickey:bsdauth"),
|
||||||
docvalues.CreateEnumString("gssapi-with-mic:bsdauth"),
|
docvalues.CreateEnumString("gssapi-with-mic:bsdauth"),
|
||||||
docvalues.CreateEnumString("keyboard-interactive:bsdauth"),
|
docvalues.CreateEnumString("keyboard-interactive:bsdauth"),
|
||||||
docvalues.CreateEnumString("hostbased:bsdauth"),
|
docvalues.CreateEnumString("hostbased:bsdauth"),
|
||||||
|
|
||||||
docvalues.CreateEnumString("password:pam"),
|
docvalues.CreateEnumString("password:pam"),
|
||||||
docvalues.CreateEnumString("publickey:pam"),
|
docvalues.CreateEnumString("publickey:pam"),
|
||||||
docvalues.CreateEnumString("gssapi-with-mic:pam"),
|
docvalues.CreateEnumString("gssapi-with-mic:pam"),
|
||||||
docvalues.CreateEnumString("keyboard-interactive:pam"),
|
docvalues.CreateEnumString("keyboard-interactive:pam"),
|
||||||
docvalues.CreateEnumString("hostbased:pam"),
|
docvalues.CreateEnumString("hostbased:pam"),
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -146,14 +151,26 @@ See PATTERNS in ssh_config(5) for more information on patterns. This keyword may
|
|||||||
Documentation: `Specifies a file that lists principal names that are accepted for certificate authentication. When using certificates signed by a key listed in TrustedUserCAKeys, this file lists names, one of which must appear in the certificate for it to be accepted for authentication. Names are listed one per line preceded by key options (as described in “AUTHORIZED_KEYS FILE FORMAT” in sshd(8)). Empty lines and comments starting with ‘#’ are ignored.
|
Documentation: `Specifies a file that lists principal names that are accepted for certificate authentication. When using certificates signed by a key listed in TrustedUserCAKeys, this file lists names, one of which must appear in the certificate for it to be accepted for authentication. Names are listed one per line preceded by key options (as described in “AUTHORIZED_KEYS FILE FORMAT” in sshd(8)). Empty lines and comments starting with ‘#’ are ignored.
|
||||||
Arguments to AuthorizedPrincipalsFile accept the tokens described in the “TOKENS” section. After expansion, AuthorizedPrincipalsFile is taken to be an absolute path or one relative to the user's home directory. The default is none, i.e. not to use a principals file – in this case, the username of the user must appear in a certificate's principals list for it to be accepted.
|
Arguments to AuthorizedPrincipalsFile accept the tokens described in the “TOKENS” section. After expansion, AuthorizedPrincipalsFile is taken to be an absolute path or one relative to the user's home directory. The default is none, i.e. not to use a principals file – in this case, the username of the user must appear in a certificate's principals list for it to be accepted.
|
||||||
Note that AuthorizedPrincipalsFile is only used when authentication proceeds using a CA listed in TrustedUserCAKeys and is not consulted for certification authorities trusted via ~/.ssh/authorized_keys, though the principals= key option offers a similar facility (see sshd(8) for details).`,
|
Note that AuthorizedPrincipalsFile is only used when authentication proceeds using a CA listed in TrustedUserCAKeys and is not consulted for certification authorities trusted via ~/.ssh/authorized_keys, though the principals= key option offers a similar facility (see sshd(8) for details).`,
|
||||||
Value: docvalues.PathValue{
|
Value: docvalues.OrValue{
|
||||||
RequiredType: docvalues.PathTypeFile,
|
Values: []docvalues.DeprecatedValue{
|
||||||
|
docvalues.SingleEnumValue("none"),
|
||||||
|
docvalues.PathValue{
|
||||||
|
IsOptional: false,
|
||||||
|
RequiredType: docvalues.PathTypeFile,
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"banner": {
|
"banner": {
|
||||||
Documentation: `The contents of the specified file are sent to the remote user before authentication is allowed. If the argument is none then no banner is displayed. By default, no banner is displayed.`,
|
Documentation: `The contents of the specified file are sent to the remote user before authentication is allowed. If the argument is none then no banner is displayed. By default, no banner is displayed.`,
|
||||||
Value: docvalues.PathValue{
|
Value: docvalues.OrValue{
|
||||||
RequiredType: docvalues.PathTypeFile,
|
Values: []docvalues.DeprecatedValue{
|
||||||
|
docvalues.SingleEnumValue("none"),
|
||||||
|
docvalues.PathValue{
|
||||||
|
IsOptional: false,
|
||||||
|
RequiredType: docvalues.PathTypeFile,
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"casignaturealgorithms": {
|
"casignaturealgorithms": {
|
||||||
@ -343,13 +360,19 @@ See PATTERNS in ssh_config(5) for more information on patterns. This keyword may
|
|||||||
},
|
},
|
||||||
"hostcertificate": {
|
"hostcertificate": {
|
||||||
Documentation: `Specifies a file containing a public host certificate. The certificate's public key must match a private host key already specified by HostKey. The default behaviour of sshd(8) is not to load any certificates.`,
|
Documentation: `Specifies a file containing a public host certificate. The certificate's public key must match a private host key already specified by HostKey. The default behaviour of sshd(8) is not to load any certificates.`,
|
||||||
Value: docvalues.PathValue{},
|
Value: docvalues.PathValue{
|
||||||
|
IsOptional: true,
|
||||||
|
RequiredType: docvalues.PathTypeFile,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
"hostkey": {
|
"hostkey": {
|
||||||
Documentation: `Specifies a file containing a private host key used by SSH. The defaults are /etc/ssh/ssh_host_ecdsa_key, /etc/ssh/ssh_host_ed25519_key and /etc/ssh/ssh_host_rsa_key.
|
Documentation: `Specifies a file containing a private host key used by SSH. The defaults are /etc/ssh/ssh_host_ecdsa_key, /etc/ssh/ssh_host_ed25519_key and /etc/ssh/ssh_host_rsa_key.
|
||||||
Note that sshd(8) will refuse to use a file if it is group/world-accessible and that the HostKeyAlgorithms option restricts which of the keys are actually used by sshd(8).
|
Note that sshd(8) will refuse to use a file if it is group/world-accessible and that the HostKeyAlgorithms option restricts which of the keys are actually used by sshd(8).
|
||||||
It is possible to have multiple host key files. It is also possible to specify public host key files instead. In this case operations on the private key will be delegated to an ssh-agent(1).`,
|
It is possible to have multiple host key files. It is also possible to specify public host key files instead. In this case operations on the private key will be delegated to an ssh-agent(1).`,
|
||||||
Value: docvalues.PathValue{},
|
Value: docvalues.PathValue{
|
||||||
|
IsOptional: false,
|
||||||
|
RequiredType: docvalues.PathTypeFile,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
"hostkeyagent": {
|
"hostkeyagent": {
|
||||||
Documentation: `Identifies the UNIX-domain socket used to communicate with an agent that has access to the private host keys. If the string "SSH_AUTH_SOCK" is specified, the location of the socket will be read from the SSH_AUTH_SOCK environment variable.`,
|
Documentation: `Identifies the UNIX-domain socket used to communicate with an agent that has access to the private host keys. If the string "SSH_AUTH_SOCK" is specified, the location of the socket will be read from the SSH_AUTH_SOCK environment variable.`,
|
||||||
@ -592,8 +615,9 @@ Only a subset of keywords may be used on the lines following a Match keyword. Av
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
"modulifile": {
|
"modulifile": {
|
||||||
Documentation: `Specifies the moduli(5) file that contains the Diffie- Hellman groups used for the “diffie-hellman-group-exchange-sha1” and “diffie-hellman-group-exchange-sha256” key exchange methods. The default is /etc/moduli.`,
|
Documentation: `Specifies the moduli(5) file that contains the Diffie-Hellman groups used for the “diffie-hellman-group-exchange-sha1” and “diffie-hellman-group-exchange-sha256” key exchange methods. The default is /etc/moduli.`,
|
||||||
Value: docvalues.PathValue{
|
Value: docvalues.PathValue{
|
||||||
|
IsOptional: false,
|
||||||
RequiredType: docvalues.PathTypeFile,
|
RequiredType: docvalues.PathTypeFile,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -859,6 +883,7 @@ Only a subset of keywords may be used on the lines following a Match keyword. Av
|
|||||||
"securitykeyprovider": {
|
"securitykeyprovider": {
|
||||||
Documentation: `Specifies a path to a library that will be used when loading FIDO authenticator-hosted keys, overriding the default of using the built-in USB HID support.`,
|
Documentation: `Specifies a path to a library that will be used when loading FIDO authenticator-hosted keys, overriding the default of using the built-in USB HID support.`,
|
||||||
Value: docvalues.PathValue{
|
Value: docvalues.PathValue{
|
||||||
|
IsOptional: false,
|
||||||
RequiredType: docvalues.PathTypeFile,
|
RequiredType: docvalues.PathTypeFile,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
@ -8,4 +8,5 @@ var AllowedDuplicateOptions = map[NormalizedOptionName]struct{}{
|
|||||||
"listenaddress": {},
|
"listenaddress": {},
|
||||||
"match": {},
|
"match": {},
|
||||||
"port": {},
|
"port": {},
|
||||||
|
"hostkey": {},
|
||||||
}
|
}
|
||||||
|
@ -18,6 +18,10 @@ func GetRootCompletions(
|
|||||||
parentMatchBlock *ast.SSHDMatchBlock,
|
parentMatchBlock *ast.SSHDMatchBlock,
|
||||||
suggestValue bool,
|
suggestValue bool,
|
||||||
) ([]protocol.CompletionItem, error) {
|
) ([]protocol.CompletionItem, error) {
|
||||||
|
if d.Indexes == nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
kind := protocol.CompletionItemKindField
|
kind := protocol.CompletionItemKindField
|
||||||
|
|
||||||
availableOptions := make(map[fields.NormalizedOptionName]docvalues.DocumentationValue, 0)
|
availableOptions := make(map[fields.NormalizedOptionName]docvalues.DocumentationValue, 0)
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Code generated from Match.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
// Code generated from Match.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
package parser // Match
|
package parser // Match
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Code generated from Match.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
// Code generated from Match.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
package parser
|
package parser
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Code generated from Match.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
// Code generated from Match.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
package parser // Match
|
package parser // Match
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Code generated from Match.g4 by ANTLR 4.13.0. DO NOT EDIT.
|
// Code generated from Match.g4 by ANTLR 4.13.2. DO NOT EDIT.
|
||||||
|
|
||||||
package parser // Match
|
package parser // Match
|
||||||
|
|
||||||
|
@ -35,6 +35,12 @@ func Analyze(
|
|||||||
|
|
||||||
d.Indexes = i
|
d.Indexes = i
|
||||||
|
|
||||||
|
analyzeProperties(ctx)
|
||||||
|
|
||||||
|
if len(ctx.diagnostics) > 0 {
|
||||||
|
return ctx.diagnostics
|
||||||
|
}
|
||||||
|
|
||||||
analyzeInterfaceSection(ctx)
|
analyzeInterfaceSection(ctx)
|
||||||
analyzeDNSPropertyContainsFallback(ctx)
|
analyzeDNSPropertyContainsFallback(ctx)
|
||||||
analyzeKeepAlivePropertyIsSet(ctx)
|
analyzeKeepAlivePropertyIsSet(ctx)
|
||||||
|
89
server/handlers/wireguard/analyzer/properties.go
Normal file
89
server/handlers/wireguard/analyzer/properties.go
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
package analyzer
|
||||||
|
|
||||||
|
import (
|
||||||
|
"config-lsp/common"
|
||||||
|
docvalues "config-lsp/doc-values"
|
||||||
|
"config-lsp/handlers/wireguard/ast"
|
||||||
|
"config-lsp/handlers/wireguard/diagnostics"
|
||||||
|
"config-lsp/handlers/wireguard/fields"
|
||||||
|
"config-lsp/handlers/wireguard/indexes"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||||
|
)
|
||||||
|
|
||||||
|
func analyzeProperties(
|
||||||
|
ctx *analyzerContext,
|
||||||
|
) {
|
||||||
|
for _, section := range ctx.document.Config.Sections {
|
||||||
|
normalizedHeaderName := fields.CreateNormalizedName(section.Header.Name)
|
||||||
|
|
||||||
|
// Whether to check if the property is allowed in the section
|
||||||
|
checkAllowedProperty := true
|
||||||
|
existingProperties := make(map[fields.NormalizedName]*ast.WGProperty)
|
||||||
|
|
||||||
|
it := section.Properties.Iterator()
|
||||||
|
for it.Next() {
|
||||||
|
property := it.Value().(*ast.WGProperty)
|
||||||
|
normalizedPropertyName := fields.CreateNormalizedName(property.Key.Name)
|
||||||
|
|
||||||
|
if property.Key.Name == "" {
|
||||||
|
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
||||||
|
Message: "This property is missing a name",
|
||||||
|
Range: property.Key.ToLSPRange(),
|
||||||
|
Severity: &common.SeverityError,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if property.Value == nil || property.Value.Value == "" {
|
||||||
|
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
||||||
|
Message: "This property is missing a value",
|
||||||
|
Range: property.ToLSPRange(),
|
||||||
|
Severity: &common.SeverityError,
|
||||||
|
})
|
||||||
|
checkAllowedProperty = false
|
||||||
|
}
|
||||||
|
|
||||||
|
if checkAllowedProperty {
|
||||||
|
availableOptions := fields.OptionsHeaderMap[normalizedHeaderName]
|
||||||
|
|
||||||
|
// Duplicate check
|
||||||
|
if existingProperty, found := existingProperties[normalizedPropertyName]; found {
|
||||||
|
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
||||||
|
Message: fmt.Sprintf("Property '%s' has already been defined on line %d", property.Key.Name, existingProperty.Start.Line+1),
|
||||||
|
Severity: &common.SeverityError,
|
||||||
|
Range: existingProperty.ToLSPRange(),
|
||||||
|
})
|
||||||
|
// Check if value is valid
|
||||||
|
} else if option, found := availableOptions[normalizedPropertyName]; found {
|
||||||
|
invalidValues := option.DeprecatedCheckIsValid(property.Value.Value)
|
||||||
|
|
||||||
|
for _, invalidValue := range invalidValues {
|
||||||
|
err := docvalues.LSPErrorFromInvalidValue(property.Start.Line, *invalidValue).ShiftCharacter(property.Value.Start.Character)
|
||||||
|
|
||||||
|
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
||||||
|
Range: err.Range.ToLSPRange(),
|
||||||
|
Message: err.Err.Error(),
|
||||||
|
Severity: &common.SeverityError,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
// Unknown property
|
||||||
|
} else {
|
||||||
|
ctx.diagnostics = append(ctx.diagnostics,
|
||||||
|
diagnostics.GenerateUnknownOption(
|
||||||
|
property.ToLSPRange(),
|
||||||
|
property.Key.Name,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
ctx.document.Indexes.UnknownProperties[property.Key.Start.Line] = indexes.WGIndexPropertyInfo{
|
||||||
|
Section: section,
|
||||||
|
Property: property,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
existingProperties[normalizedPropertyName] = property
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -22,7 +22,7 @@ func analyzeDNSPropertyContainsFallback(
|
|||||||
|
|
||||||
interfaceSection := sections[0]
|
interfaceSection := sections[0]
|
||||||
|
|
||||||
property := interfaceSection.FindFirstPropertyByName("DNS")
|
_, property := interfaceSection.FindFirstPropertyByName("DNS")
|
||||||
|
|
||||||
if property == nil {
|
if property == nil {
|
||||||
return
|
return
|
||||||
@ -44,7 +44,10 @@ func analyzeKeepAlivePropertyIsSet(
|
|||||||
) {
|
) {
|
||||||
for _, section := range ctx.document.Indexes.SectionsByName["Peer"] {
|
for _, section := range ctx.document.Indexes.SectionsByName["Peer"] {
|
||||||
// If an endpoint is set, then we should only check for the keepalive property
|
// If an endpoint is set, then we should only check for the keepalive property
|
||||||
if section.FindFirstPropertyByName("Endpoint") != nil && section.FindFirstPropertyByName("PersistentKeepalive") == nil {
|
_, endpoint := section.FindFirstPropertyByName("Endpoint")
|
||||||
|
_, persistentKeepAlive := section.FindFirstPropertyByName("PersistentKeepalive")
|
||||||
|
|
||||||
|
if endpoint != nil && persistentKeepAlive == nil {
|
||||||
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
||||||
Message: "PersistentKeepalive is not set. It is recommended to set this property, as it helps to maintain the connection when users are behind NAT",
|
Message: "PersistentKeepalive is not set. It is recommended to set this property, as it helps to maintain the connection when users are behind NAT",
|
||||||
Severity: &common.SeverityHint,
|
Severity: &common.SeverityHint,
|
||||||
@ -58,11 +61,11 @@ func analyzeSymmetricPropertiesSet(
|
|||||||
ctx *analyzerContext,
|
ctx *analyzerContext,
|
||||||
) {
|
) {
|
||||||
for _, section := range ctx.document.Indexes.SectionsByName["Interface"] {
|
for _, section := range ctx.document.Indexes.SectionsByName["Interface"] {
|
||||||
preUpProperty := section.FindFirstPropertyByName("PreUp")
|
_, preUpProperty := section.FindFirstPropertyByName("PreUp")
|
||||||
preDownProperty := section.FindFirstPropertyByName("PreDown")
|
_, preDownProperty := section.FindFirstPropertyByName("PreDown")
|
||||||
|
|
||||||
postUpProperty := section.FindFirstPropertyByName("PostUp")
|
_, postUpProperty := section.FindFirstPropertyByName("PostUp")
|
||||||
postDownProperty := section.FindFirstPropertyByName("PostDown")
|
_, postDownProperty := section.FindFirstPropertyByName("PostDown")
|
||||||
|
|
||||||
if preUpProperty != nil && preDownProperty == nil {
|
if preUpProperty != nil && preDownProperty == nil {
|
||||||
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
||||||
@ -94,7 +97,7 @@ func analyzeSymmetricPropertiesSet(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type key int
|
type key uint8
|
||||||
|
|
||||||
const (
|
const (
|
||||||
lineKey key = iota
|
lineKey key = iota
|
||||||
@ -109,7 +112,7 @@ func analyzeDuplicateAllowedIPs(
|
|||||||
ipHostSet := utils.CreateIPv4HostSet()
|
ipHostSet := utils.CreateIPv4HostSet()
|
||||||
|
|
||||||
for _, section := range ctx.document.Indexes.SectionsByName["Peer"] {
|
for _, section := range ctx.document.Indexes.SectionsByName["Peer"] {
|
||||||
property := section.FindFirstPropertyByName("AllowedIPs")
|
_, property := section.FindFirstPropertyByName("AllowedIPs")
|
||||||
|
|
||||||
if property == nil {
|
if property == nil {
|
||||||
continue
|
continue
|
||||||
@ -123,12 +126,15 @@ func analyzeDuplicateAllowedIPs(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if ipContext, _ := ipHostSet.ContainsIP(ipAddress); ipContext != nil {
|
if ipContext, _ := ipHostSet.ContainsIP(ipAddress); ipContext != nil {
|
||||||
definedLine := (*ipContext).Value(lineKey).(uint32)
|
ctxx := *ipContext
|
||||||
|
definedLineRaw := ctxx.Value(lineKey)
|
||||||
|
|
||||||
|
definedLine := definedLineRaw.(uint32)
|
||||||
|
|
||||||
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
||||||
Message: fmt.Sprintf("This IP range is already covered on line %d", definedLine+1),
|
Message: fmt.Sprintf("This IP range is already covered on line %d", definedLine+1),
|
||||||
Severity: &common.SeverityError,
|
Severity: &common.SeverityError,
|
||||||
Range: property.ToLSPRange(),
|
Range: property.Value.ToLSPRange(),
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
ipContext := context.WithValue(
|
ipContext := context.WithValue(
|
||||||
|
@ -2,8 +2,6 @@ package analyzer
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"config-lsp/common"
|
"config-lsp/common"
|
||||||
docvalues "config-lsp/doc-values"
|
|
||||||
"config-lsp/handlers/wireguard/ast"
|
|
||||||
"config-lsp/handlers/wireguard/fields"
|
"config-lsp/handlers/wireguard/fields"
|
||||||
"config-lsp/utils"
|
"config-lsp/utils"
|
||||||
"fmt"
|
"fmt"
|
||||||
@ -14,8 +12,6 @@ import (
|
|||||||
func analyzeStructureIsValid(ctx *analyzerContext) {
|
func analyzeStructureIsValid(ctx *analyzerContext) {
|
||||||
for _, section := range ctx.document.Config.Sections {
|
for _, section := range ctx.document.Config.Sections {
|
||||||
normalizedHeaderName := fields.CreateNormalizedName(section.Header.Name)
|
normalizedHeaderName := fields.CreateNormalizedName(section.Header.Name)
|
||||||
// Whether to check if the property is allowed in the section
|
|
||||||
checkAllowedProperty := true
|
|
||||||
|
|
||||||
if section.Header.Name == "" {
|
if section.Header.Name == "" {
|
||||||
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
||||||
@ -29,8 +25,6 @@ func analyzeStructureIsValid(ctx *analyzerContext) {
|
|||||||
Range: section.Header.ToLSPRange(),
|
Range: section.Header.ToLSPRange(),
|
||||||
Severity: &common.SeverityError,
|
Severity: &common.SeverityError,
|
||||||
})
|
})
|
||||||
// Do not check as the section is unknown
|
|
||||||
checkAllowedProperty = false
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if section.Properties.Size() == 0 {
|
if section.Properties.Size() == 0 {
|
||||||
@ -42,66 +36,6 @@ func analyzeStructureIsValid(ctx *analyzerContext) {
|
|||||||
protocol.DiagnosticTagUnnecessary,
|
protocol.DiagnosticTagUnnecessary,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
} else {
|
|
||||||
existingProperties := make(map[fields.NormalizedName]*ast.WGProperty)
|
|
||||||
|
|
||||||
it := section.Properties.Iterator()
|
|
||||||
for it.Next() {
|
|
||||||
property := it.Value().(*ast.WGProperty)
|
|
||||||
normalizedPropertyName := fields.CreateNormalizedName(property.Key.Name)
|
|
||||||
|
|
||||||
if property.Key.Name == "" {
|
|
||||||
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
|
||||||
Message: "This property is missing a name",
|
|
||||||
Range: property.Key.ToLSPRange(),
|
|
||||||
Severity: &common.SeverityError,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if property.Value == nil || property.Value.Value == "" {
|
|
||||||
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
|
||||||
Message: "This property is missing a value",
|
|
||||||
Range: property.ToLSPRange(),
|
|
||||||
Severity: &common.SeverityError,
|
|
||||||
})
|
|
||||||
checkAllowedProperty = false
|
|
||||||
}
|
|
||||||
|
|
||||||
if checkAllowedProperty {
|
|
||||||
availableOptions := fields.OptionsHeaderMap[normalizedHeaderName]
|
|
||||||
|
|
||||||
// Duplicate check
|
|
||||||
if existingProperty, found := existingProperties[normalizedPropertyName]; found {
|
|
||||||
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
|
||||||
Message: fmt.Sprintf("Property '%s' has already been defined on line %d", property.Key.Name, existingProperty.Start.Line+1),
|
|
||||||
Severity: &common.SeverityError,
|
|
||||||
Range: existingProperty.ToLSPRange(),
|
|
||||||
})
|
|
||||||
// Check if value is valid
|
|
||||||
} else if option, found := availableOptions[normalizedPropertyName]; found {
|
|
||||||
invalidValues := option.DeprecatedCheckIsValid(property.Value.Value)
|
|
||||||
|
|
||||||
for _, invalidValue := range invalidValues {
|
|
||||||
err := docvalues.LSPErrorFromInvalidValue(property.Start.Line, *invalidValue).ShiftCharacter(property.Value.Start.Character)
|
|
||||||
|
|
||||||
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
|
||||||
Range: err.Range.ToLSPRange(),
|
|
||||||
Message: err.Err.Error(),
|
|
||||||
Severity: &common.SeverityError,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
// Unknown property
|
|
||||||
} else {
|
|
||||||
ctx.diagnostics = append(ctx.diagnostics, protocol.Diagnostic{
|
|
||||||
Message: fmt.Sprintf("Unknown property '%s'", property.Key.Name),
|
|
||||||
Range: property.Key.ToLSPRange(),
|
|
||||||
Severity: &common.SeverityError,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
existingProperties[normalizedPropertyName] = property
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -35,7 +35,7 @@ type WGHeader struct {
|
|||||||
type WGSection struct {
|
type WGSection struct {
|
||||||
common.LocationRange
|
common.LocationRange
|
||||||
Header WGHeader
|
Header WGHeader
|
||||||
// [uint32]WGProperty: line number -> WGProperty
|
// [uint32]*WGProperty: line number -> *WGProperty
|
||||||
Properties *treemap.Map
|
Properties *treemap.Map
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -10,11 +10,11 @@ func (c *WGConfig) FindSectionByLine(line uint32) *WGSection {
|
|||||||
line,
|
line,
|
||||||
func(current *WGSection, target uint32) int {
|
func(current *WGSection, target uint32) int {
|
||||||
if target < current.Start.Line {
|
if target < current.Start.Line {
|
||||||
return -1
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
if target > current.End.Line {
|
if target > current.End.Line {
|
||||||
return 1
|
return -1
|
||||||
}
|
}
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
@ -42,28 +42,17 @@ func (c *WGConfig) FindPropertyByLine(line uint32) *WGProperty {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *WGSection) FindFirstPropertyByName(name string) *WGProperty {
|
func (s *WGSection) FindFirstPropertyByName(name string) (uint32, *WGProperty) {
|
||||||
it := s.Properties.Iterator()
|
it := s.Properties.Iterator()
|
||||||
for it.Next() {
|
for it.Next() {
|
||||||
|
line := it.Key().(uint32)
|
||||||
property := it.Value().(*WGProperty)
|
property := it.Value().(*WGProperty)
|
||||||
if property.Key.Name == name {
|
if property.Key.Name == name {
|
||||||
return property
|
return line, property
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return 0, nil
|
||||||
}
|
|
||||||
|
|
||||||
func (s *WGSection) FindPropertyByName(name string) *WGProperty {
|
|
||||||
it := s.Properties.Iterator()
|
|
||||||
for it.Next() {
|
|
||||||
property := it.Value().(*WGProperty)
|
|
||||||
if property.Key.Name == name {
|
|
||||||
return property
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *WGSection) GetLastProperty() *WGProperty {
|
func (s *WGSection) GetLastProperty() *WGProperty {
|
||||||
|
19
server/handlers/wireguard/diagnostics/diagnostics.go
Normal file
19
server/handlers/wireguard/diagnostics/diagnostics.go
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
package diagnostics
|
||||||
|
|
||||||
|
import (
|
||||||
|
"config-lsp/common"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||||
|
)
|
||||||
|
|
||||||
|
func GenerateUnknownOption(
|
||||||
|
diagnosticRange protocol.Range,
|
||||||
|
propertyName string,
|
||||||
|
) protocol.Diagnostic {
|
||||||
|
return protocol.Diagnostic{
|
||||||
|
Range: diagnosticRange,
|
||||||
|
Message: fmt.Sprintf("Unknown property: %s", propertyName),
|
||||||
|
Severity: &common.SeverityError,
|
||||||
|
}
|
||||||
|
}
|
@ -1,9 +1,7 @@
|
|||||||
package handlers
|
package handlers
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"config-lsp/handlers/wireguard"
|
|
||||||
"config-lsp/handlers/wireguard/ast"
|
"config-lsp/handlers/wireguard/ast"
|
||||||
wgcommands "config-lsp/handlers/wireguard/commands"
|
|
||||||
|
|
||||||
protocol "github.com/tliron/glsp/protocol_3_16"
|
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||||
)
|
)
|
||||||
@ -13,6 +11,7 @@ type CodeActionName string
|
|||||||
const (
|
const (
|
||||||
CodeActionGeneratePrivateKey CodeActionName = "generatePrivateKey"
|
CodeActionGeneratePrivateKey CodeActionName = "generatePrivateKey"
|
||||||
CodeActionGeneratePresharedKey CodeActionName = "generatePresharedKey"
|
CodeActionGeneratePresharedKey CodeActionName = "generatePresharedKey"
|
||||||
|
CodeActionCreatePeer CodeActionName = "createPeer"
|
||||||
)
|
)
|
||||||
|
|
||||||
type CodeAction interface {
|
type CodeAction interface {
|
||||||
@ -20,105 +19,3 @@ type CodeAction interface {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type CodeActionArgs interface{}
|
type CodeActionArgs interface{}
|
||||||
|
|
||||||
type CodeActionGeneratePrivateKeyArgs struct {
|
|
||||||
URI protocol.DocumentUri
|
|
||||||
Line uint32
|
|
||||||
}
|
|
||||||
|
|
||||||
func CodeActionGeneratePrivateKeyArgsFromArguments(arguments map[string]any) CodeActionGeneratePrivateKeyArgs {
|
|
||||||
return CodeActionGeneratePrivateKeyArgs{
|
|
||||||
URI: arguments["URI"].(protocol.DocumentUri),
|
|
||||||
Line: uint32(arguments["Line"].(float64)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (args CodeActionGeneratePrivateKeyArgs) RunCommand(d *wireguard.WGDocument) (*protocol.ApplyWorkspaceEditParams, error) {
|
|
||||||
privateKey, err := wgcommands.CreateNewPrivateKey()
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return &protocol.ApplyWorkspaceEditParams{}, err
|
|
||||||
}
|
|
||||||
|
|
||||||
section := d.Config.FindSectionByLine(args.Line)
|
|
||||||
property := d.Config.FindPropertyByLine(args.Line)
|
|
||||||
|
|
||||||
if section == nil || property == nil {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
label := "Generate Private Key"
|
|
||||||
return &protocol.ApplyWorkspaceEditParams{
|
|
||||||
Label: &label,
|
|
||||||
Edit: protocol.WorkspaceEdit{
|
|
||||||
Changes: map[protocol.DocumentUri][]protocol.TextEdit{
|
|
||||||
args.URI: {
|
|
||||||
{
|
|
||||||
NewText: " " + privateKey,
|
|
||||||
Range: protocol.Range{
|
|
||||||
Start: protocol.Position{
|
|
||||||
Line: property.End.Line,
|
|
||||||
Character: property.End.Character,
|
|
||||||
},
|
|
||||||
End: protocol.Position{
|
|
||||||
Line: property.End.Line,
|
|
||||||
Character: property.End.Character,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type CodeActionGeneratePresharedKeyArgs struct {
|
|
||||||
URI protocol.DocumentUri
|
|
||||||
Line uint32
|
|
||||||
}
|
|
||||||
|
|
||||||
func CodeActionGeneratePresharedKeyArgsFromArguments(arguments map[string]any) CodeActionGeneratePresharedKeyArgs {
|
|
||||||
return CodeActionGeneratePresharedKeyArgs{
|
|
||||||
URI: arguments["URI"].(protocol.DocumentUri),
|
|
||||||
Line: uint32(arguments["Line"].(float64)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (args CodeActionGeneratePresharedKeyArgs) RunCommand(d *wireguard.WGDocument) (*protocol.ApplyWorkspaceEditParams, error) {
|
|
||||||
presharedKey, err := wgcommands.CreatePresharedKey()
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return &protocol.ApplyWorkspaceEditParams{}, err
|
|
||||||
}
|
|
||||||
|
|
||||||
section := d.Config.FindSectionByLine(args.Line)
|
|
||||||
property := d.Config.FindPropertyByLine(args.Line)
|
|
||||||
|
|
||||||
if section == nil || property == nil {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
label := "Generate Preshared Key"
|
|
||||||
return &protocol.ApplyWorkspaceEditParams{
|
|
||||||
Label: &label,
|
|
||||||
Edit: protocol.WorkspaceEdit{
|
|
||||||
Changes: map[protocol.DocumentUri][]protocol.TextEdit{
|
|
||||||
args.URI: {
|
|
||||||
{
|
|
||||||
NewText: " " + presharedKey,
|
|
||||||
Range: protocol.Range{
|
|
||||||
Start: protocol.Position{
|
|
||||||
Line: property.End.Line,
|
|
||||||
Character: property.End.Character,
|
|
||||||
},
|
|
||||||
End: protocol.Position{
|
|
||||||
Line: property.End.Line,
|
|
||||||
Character: property.End.Character,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
191
server/handlers/wireguard/handlers/code-actions_create-peer.go
Normal file
191
server/handlers/wireguard/handlers/code-actions_create-peer.go
Normal file
@ -0,0 +1,191 @@
|
|||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"config-lsp/common"
|
||||||
|
"config-lsp/handlers/wireguard"
|
||||||
|
"config-lsp/handlers/wireguard/ast"
|
||||||
|
wgcommands "config-lsp/handlers/wireguard/commands"
|
||||||
|
"fmt"
|
||||||
|
"net"
|
||||||
|
|
||||||
|
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||||
|
)
|
||||||
|
|
||||||
|
type CodeActionCreatePeerArgs struct {
|
||||||
|
URI protocol.DocumentUri
|
||||||
|
Line uint32
|
||||||
|
}
|
||||||
|
|
||||||
|
func CodeActionCreatePeerArgsFromArguments(arguments map[string]any) CodeActionCreatePeerArgs {
|
||||||
|
return CodeActionCreatePeerArgs{
|
||||||
|
URI: arguments["URI"].(protocol.DocumentUri),
|
||||||
|
Line: uint32(arguments["Line"].(float64)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (args CodeActionCreatePeerArgs) RunCommand(d *wireguard.WGDocument) (*protocol.ApplyWorkspaceEditParams, error) {
|
||||||
|
interfaceSection := d.Indexes.SectionsByName["Interface"][0]
|
||||||
|
section := d.Config.FindSectionByLine(args.Line)
|
||||||
|
|
||||||
|
label := fmt.Sprintf("Add Peer based on peer on line %d", args.Line)
|
||||||
|
|
||||||
|
newSection := section
|
||||||
|
|
||||||
|
// IP Address
|
||||||
|
ipAddressLine, ipAddress := newSection.FindFirstPropertyByName("AllowedIPs")
|
||||||
|
_, address := interfaceSection.FindFirstPropertyByName("Address")
|
||||||
|
if ipAddress != nil && address != nil {
|
||||||
|
_, network, err := net.ParseCIDR(address.Value.Value)
|
||||||
|
|
||||||
|
if err == nil {
|
||||||
|
newIPAddress := createNewIP(*network, ipAddress.Value.Value)
|
||||||
|
|
||||||
|
valueEnd := common.Location{
|
||||||
|
Line: ipAddress.End.Line,
|
||||||
|
Character: ipAddress.Value.Start.Character + uint32(len(newIPAddress)) + 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
newSection.Properties.Put(
|
||||||
|
ipAddressLine,
|
||||||
|
&ast.WGProperty{
|
||||||
|
LocationRange: common.LocationRange{
|
||||||
|
Start: ipAddress.Start,
|
||||||
|
End: valueEnd,
|
||||||
|
},
|
||||||
|
Key: ipAddress.Key,
|
||||||
|
RawValue: newIPAddress,
|
||||||
|
Separator: address.Separator,
|
||||||
|
Value: &ast.WGPropertyValue{
|
||||||
|
LocationRange: common.LocationRange{
|
||||||
|
Start: ipAddress.Value.Start,
|
||||||
|
End: valueEnd,
|
||||||
|
},
|
||||||
|
Value: newIPAddress,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Preshared Key
|
||||||
|
presharedKeyLine, presharedKey := newSection.FindFirstPropertyByName("PresharedKey")
|
||||||
|
|
||||||
|
if presharedKey != nil {
|
||||||
|
var newKey string
|
||||||
|
|
||||||
|
if wgcommands.AreWireguardToolsAvailable() {
|
||||||
|
createdKey, err := wgcommands.CreatePresharedKey()
|
||||||
|
|
||||||
|
if err == nil {
|
||||||
|
newKey = createdKey
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
newKey = "[preshared key]"
|
||||||
|
}
|
||||||
|
|
||||||
|
valueEnd := common.Location{
|
||||||
|
Line: presharedKey.End.Line,
|
||||||
|
Character: presharedKey.Value.Start.Character + uint32(len(newKey)) + 1,
|
||||||
|
}
|
||||||
|
newSection.Properties.Put(
|
||||||
|
presharedKeyLine,
|
||||||
|
&ast.WGProperty{
|
||||||
|
LocationRange: common.LocationRange{
|
||||||
|
Start: presharedKey.Start,
|
||||||
|
End: valueEnd,
|
||||||
|
},
|
||||||
|
Key: presharedKey.Key,
|
||||||
|
RawValue: newKey,
|
||||||
|
Separator: presharedKey.Separator,
|
||||||
|
Value: &ast.WGPropertyValue{
|
||||||
|
LocationRange: common.LocationRange{
|
||||||
|
Start: presharedKey.Value.Start,
|
||||||
|
End: valueEnd,
|
||||||
|
},
|
||||||
|
Value: newKey,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
lastProperty := newSection.GetLastProperty()
|
||||||
|
println("last line")
|
||||||
|
println(lastProperty.End.Line)
|
||||||
|
println(fmt.Sprintf("~~~%s~~~", writeSection(*newSection)))
|
||||||
|
newText := writeSection(*newSection)
|
||||||
|
return &protocol.ApplyWorkspaceEditParams{
|
||||||
|
Label: &label,
|
||||||
|
Edit: protocol.WorkspaceEdit{
|
||||||
|
Changes: map[protocol.DocumentUri][]protocol.TextEdit{
|
||||||
|
args.URI: {
|
||||||
|
{
|
||||||
|
Range: protocol.Range{
|
||||||
|
Start: protocol.Position{
|
||||||
|
Line: lastProperty.End.Line,
|
||||||
|
Character: lastProperty.End.Character,
|
||||||
|
},
|
||||||
|
End: protocol.Position{
|
||||||
|
Line: lastProperty.End.Line,
|
||||||
|
Character: lastProperty.End.Character,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
NewText: newText,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func writeSection(section ast.WGSection) string {
|
||||||
|
text := "\n\n"
|
||||||
|
|
||||||
|
text += fmt.Sprintf("[%s]\n", section.Header.Name)
|
||||||
|
|
||||||
|
it := section.Properties.Iterator()
|
||||||
|
for it.Next() {
|
||||||
|
property := it.Value().(*ast.WGProperty)
|
||||||
|
text += fmt.Sprintf("%s = %s\n", property.Key.Name, property.Value.Value)
|
||||||
|
}
|
||||||
|
|
||||||
|
return text
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try incrementing the IP address
|
||||||
|
func createNewIP(
|
||||||
|
network net.IPNet,
|
||||||
|
rawIP string,
|
||||||
|
) string {
|
||||||
|
parsedIP, _, err := net.ParseCIDR(rawIP)
|
||||||
|
parsedIP = parsedIP.To4()
|
||||||
|
|
||||||
|
if parsedIP == nil {
|
||||||
|
// IPv6 is not supported
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
lastAddress := uint32(network.IP[0])<<24 | uint32(network.IP[1])<<16 | uint32(network.IP[2])<<8 | uint32(network.IP[3])
|
||||||
|
|
||||||
|
networkMask, _ := network.Mask.Size()
|
||||||
|
for index := range 32 - networkMask {
|
||||||
|
lastAddress |= 1 << index
|
||||||
|
}
|
||||||
|
|
||||||
|
newIP := uint32(parsedIP[0])<<24 | uint32(parsedIP[1])<<16 | uint32(parsedIP[2])<<8 | uint32(parsedIP[3])
|
||||||
|
newIP += 1
|
||||||
|
|
||||||
|
if newIP >= lastAddress || newIP == 0 {
|
||||||
|
// The IP is the last one, which can't be used
|
||||||
|
// or even worse, it did a whole overflow
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// Here, we successfully incremented the IP correctly
|
||||||
|
|
||||||
|
// Let's return the formatted IP now.
|
||||||
|
return fmt.Sprintf("%d.%d.%d.%d/32", newIP>>24, newIP>>16&0xFF, newIP>>8&0xFF, newIP&0xFF)
|
||||||
|
}
|
@ -0,0 +1,42 @@
|
|||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestCreateNewIPSimple24Mask(t *testing.T) {
|
||||||
|
_, network, _ := net.ParseCIDR("10.0.0.0/24")
|
||||||
|
newIP := createNewIP(*network, "10.0.0.1/32")
|
||||||
|
|
||||||
|
if newIP != "10.0.0.2/32" {
|
||||||
|
t.Errorf("Expected 10.0.0.2/32, got %s", newIP)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCreateNewIPDoesNotWorkWithLast24Mask(t *testing.T) {
|
||||||
|
_, network, _ := net.ParseCIDR("10.0.0.0/24")
|
||||||
|
newIP := createNewIP(*network, "10.0.0.254/32")
|
||||||
|
|
||||||
|
if newIP != "" {
|
||||||
|
t.Errorf("Expected empty string, got %s", newIP)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCreateNewIPDoesNotWorkWithLast24Mask2(t *testing.T) {
|
||||||
|
_, network, _ := net.ParseCIDR("10.0.0.0/24")
|
||||||
|
newIP := createNewIP(*network, "10.0.0.255/32")
|
||||||
|
|
||||||
|
if newIP != "" {
|
||||||
|
t.Errorf("Expected empty string, got %s", newIP)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestCreateNewIPComplex20Mask(t *testing.T) {
|
||||||
|
_, network, _ := net.ParseCIDR("10.0.0.0/20")
|
||||||
|
newIP := createNewIP(*network, "10.0.0.255/32")
|
||||||
|
|
||||||
|
if newIP != "10.0.1.0/32" {
|
||||||
|
t.Errorf("Expected 10.0.1.0/32, got %s", newIP)
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,59 @@
|
|||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"config-lsp/handlers/wireguard"
|
||||||
|
wgcommands "config-lsp/handlers/wireguard/commands"
|
||||||
|
|
||||||
|
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||||
|
)
|
||||||
|
|
||||||
|
type CodeActionGeneratePrivateKeyArgs struct {
|
||||||
|
URI protocol.DocumentUri
|
||||||
|
Line uint32
|
||||||
|
}
|
||||||
|
|
||||||
|
func CodeActionGeneratePrivateKeyArgsFromArguments(arguments map[string]any) CodeActionGeneratePrivateKeyArgs {
|
||||||
|
return CodeActionGeneratePrivateKeyArgs{
|
||||||
|
URI: arguments["URI"].(protocol.DocumentUri),
|
||||||
|
Line: uint32(arguments["Line"].(float64)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (args CodeActionGeneratePrivateKeyArgs) RunCommand(d *wireguard.WGDocument) (*protocol.ApplyWorkspaceEditParams, error) {
|
||||||
|
privateKey, err := wgcommands.CreateNewPrivateKey()
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return &protocol.ApplyWorkspaceEditParams{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
section := d.Config.FindSectionByLine(args.Line)
|
||||||
|
property := d.Config.FindPropertyByLine(args.Line)
|
||||||
|
|
||||||
|
if section == nil || property == nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
label := "Generate Private Key"
|
||||||
|
return &protocol.ApplyWorkspaceEditParams{
|
||||||
|
Label: &label,
|
||||||
|
Edit: protocol.WorkspaceEdit{
|
||||||
|
Changes: map[protocol.DocumentUri][]protocol.TextEdit{
|
||||||
|
args.URI: {
|
||||||
|
{
|
||||||
|
NewText: " " + privateKey,
|
||||||
|
Range: protocol.Range{
|
||||||
|
Start: protocol.Position{
|
||||||
|
Line: property.End.Line,
|
||||||
|
Character: property.End.Character,
|
||||||
|
},
|
||||||
|
End: protocol.Position{
|
||||||
|
Line: property.End.Line,
|
||||||
|
Character: property.End.Character,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}, nil
|
||||||
|
}
|
@ -0,0 +1,50 @@
|
|||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"config-lsp/handlers/wireguard"
|
||||||
|
wgcommands "config-lsp/handlers/wireguard/commands"
|
||||||
|
|
||||||
|
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||||
|
)
|
||||||
|
|
||||||
|
type CodeActionGeneratePresharedKeyArgs struct {
|
||||||
|
URI protocol.DocumentUri
|
||||||
|
Line uint32
|
||||||
|
}
|
||||||
|
|
||||||
|
func CodeActionGeneratePresharedKeyArgsFromArguments(arguments map[string]any) CodeActionGeneratePresharedKeyArgs {
|
||||||
|
return CodeActionGeneratePresharedKeyArgs{
|
||||||
|
URI: arguments["URI"].(protocol.DocumentUri),
|
||||||
|
Line: uint32(arguments["Line"].(float64)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (args CodeActionGeneratePresharedKeyArgs) RunCommand(d *wireguard.WGDocument) (*protocol.ApplyWorkspaceEditParams, error) {
|
||||||
|
presharedKey, err := wgcommands.CreatePresharedKey()
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return &protocol.ApplyWorkspaceEditParams{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
section := d.Config.FindSectionByLine(args.Line)
|
||||||
|
property := d.Config.FindPropertyByLine(args.Line)
|
||||||
|
|
||||||
|
if section == nil || property == nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
label := "Generate Preshared Key"
|
||||||
|
return &protocol.ApplyWorkspaceEditParams{
|
||||||
|
Label: &label,
|
||||||
|
Edit: protocol.WorkspaceEdit{
|
||||||
|
Changes: map[protocol.DocumentUri][]protocol.TextEdit{
|
||||||
|
args.URI: {
|
||||||
|
{
|
||||||
|
NewText: presharedKey,
|
||||||
|
Range: property.Value.ToLSPRange(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}, nil
|
||||||
|
}
|
@ -0,0 +1,42 @@
|
|||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"config-lsp/handlers/wireguard"
|
||||||
|
|
||||||
|
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||||
|
)
|
||||||
|
|
||||||
|
func GetAddPeerLikeThis(
|
||||||
|
d *wireguard.WGDocument,
|
||||||
|
params *protocol.CodeActionParams,
|
||||||
|
) []protocol.CodeAction {
|
||||||
|
// First, check if is on peer line
|
||||||
|
|
||||||
|
line := params.Range.Start.Line
|
||||||
|
|
||||||
|
section := d.Config.FindSectionByLine(line)
|
||||||
|
|
||||||
|
// Check if section can be copied
|
||||||
|
if section == nil || section.Start.Line != line || section.Header.Name != "Peer" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Then add option
|
||||||
|
commandID := "wireguard." + CodeActionCreatePeer
|
||||||
|
command := protocol.Command{
|
||||||
|
Title: "Create new Peer based on this one",
|
||||||
|
Command: string(commandID),
|
||||||
|
Arguments: []any{
|
||||||
|
CodeActionCreatePeerArgs{
|
||||||
|
URI: params.TextDocument.URI,
|
||||||
|
Line: line,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return []protocol.CodeAction{
|
||||||
|
{
|
||||||
|
Title: "Create new Peer based on this one",
|
||||||
|
Command: &command,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,59 @@
|
|||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"config-lsp/handlers/wireguard"
|
||||||
|
|
||||||
|
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||||
|
)
|
||||||
|
|
||||||
|
func GetKeepaliveCodeActions(
|
||||||
|
d *wireguard.WGDocument,
|
||||||
|
params *protocol.CodeActionParams,
|
||||||
|
) []protocol.CodeAction {
|
||||||
|
line := params.Range.Start.Line
|
||||||
|
|
||||||
|
for _, section := range d.Indexes.SectionsByName["Peer"] {
|
||||||
|
if section.Start.Line >= line && line <= section.End.Line {
|
||||||
|
_, endpoint := section.FindFirstPropertyByName("Endpoint")
|
||||||
|
_, persistentKeepAlive := section.FindFirstPropertyByName("PersistentKeepalive")
|
||||||
|
|
||||||
|
if endpoint != nil && persistentKeepAlive == nil {
|
||||||
|
var insertionLine uint32
|
||||||
|
lastProperty := section.GetLastProperty()
|
||||||
|
|
||||||
|
if lastProperty == nil {
|
||||||
|
insertionLine = section.End.Line
|
||||||
|
} else {
|
||||||
|
insertionLine = lastProperty.End.Line + 1
|
||||||
|
}
|
||||||
|
|
||||||
|
return []protocol.CodeAction{
|
||||||
|
{
|
||||||
|
Title: "Add PersistentKeepalive",
|
||||||
|
Edit: &protocol.WorkspaceEdit{
|
||||||
|
Changes: map[protocol.DocumentUri][]protocol.TextEdit{
|
||||||
|
params.TextDocument.URI: {
|
||||||
|
{
|
||||||
|
Range: protocol.Range{
|
||||||
|
Start: protocol.Position{
|
||||||
|
Line: insertionLine,
|
||||||
|
Character: 0,
|
||||||
|
},
|
||||||
|
End: protocol.Position{
|
||||||
|
Line: insertionLine,
|
||||||
|
Character: 0,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
NewText: "PersistentKeepalive = 25\n",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
@ -7,55 +7,6 @@ import (
|
|||||||
protocol "github.com/tliron/glsp/protocol_3_16"
|
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||||
)
|
)
|
||||||
|
|
||||||
func GetKeepaliveCodeActions(
|
|
||||||
d *wireguard.WGDocument,
|
|
||||||
params *protocol.CodeActionParams,
|
|
||||||
) []protocol.CodeAction {
|
|
||||||
line := params.Range.Start.Line
|
|
||||||
|
|
||||||
for _, section := range d.Indexes.SectionsByName["Peer"] {
|
|
||||||
if section.Start.Line >= line && line <= section.End.Line {
|
|
||||||
if section.FindPropertyByName("Endpoint") != nil && section.FindFirstPropertyByName("PersistentKeepalive") == nil {
|
|
||||||
var insertionLine uint32
|
|
||||||
lastProperty := section.GetLastProperty()
|
|
||||||
|
|
||||||
if lastProperty == nil {
|
|
||||||
insertionLine = section.End.Line
|
|
||||||
} else {
|
|
||||||
insertionLine = lastProperty.End.Line + 1
|
|
||||||
}
|
|
||||||
|
|
||||||
return []protocol.CodeAction{
|
|
||||||
{
|
|
||||||
Title: "Add PersistentKeepalive",
|
|
||||||
Edit: &protocol.WorkspaceEdit{
|
|
||||||
Changes: map[protocol.DocumentUri][]protocol.TextEdit{
|
|
||||||
params.TextDocument.URI: {
|
|
||||||
{
|
|
||||||
Range: protocol.Range{
|
|
||||||
Start: protocol.Position{
|
|
||||||
Line: insertionLine,
|
|
||||||
Character: 0,
|
|
||||||
},
|
|
||||||
End: protocol.Position{
|
|
||||||
Line: insertionLine,
|
|
||||||
Character: 0,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
NewText: "PersistentKeepalive = 25\n",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetKeyGenerationCodeActions(
|
func GetKeyGenerationCodeActions(
|
||||||
d *wireguard.WGDocument,
|
d *wireguard.WGDocument,
|
||||||
params *protocol.CodeActionParams,
|
params *protocol.CodeActionParams,
|
@ -0,0 +1,66 @@
|
|||||||
|
package handlers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"config-lsp/common"
|
||||||
|
"config-lsp/handlers/wireguard"
|
||||||
|
"config-lsp/handlers/wireguard/diagnostics"
|
||||||
|
"config-lsp/handlers/wireguard/fields"
|
||||||
|
"config-lsp/utils"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
protocol "github.com/tliron/glsp/protocol_3_16"
|
||||||
|
)
|
||||||
|
|
||||||
|
func GetPropertyKeywordTypoFixes(
|
||||||
|
d *wireguard.WGDocument,
|
||||||
|
params *protocol.CodeActionParams,
|
||||||
|
) []protocol.CodeAction {
|
||||||
|
if common.ServerOptions.NoTypoSuggestions {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
line := params.Range.Start.Line
|
||||||
|
|
||||||
|
if typoInfo, found := d.Indexes.UnknownProperties[line]; found {
|
||||||
|
if options, found := fields.OptionsHeaderMap[fields.CreateNormalizedName(typoInfo.Section.Header.Name)]; found {
|
||||||
|
normalizedPropertyKey := fields.CreateNormalizedName(typoInfo.Property.Key.Name)
|
||||||
|
opts := utils.KeysOfMap(options)
|
||||||
|
|
||||||
|
suggestedProperties := common.FindSimilarItems(normalizedPropertyKey, opts)
|
||||||
|
|
||||||
|
actions := make([]protocol.CodeAction, 0, len(suggestedProperties))
|
||||||
|
|
||||||
|
kind := protocol.CodeActionKindQuickFix
|
||||||
|
for index, normalizedPropertyName := range suggestedProperties {
|
||||||
|
isPreferred := index == 0
|
||||||
|
optionName := fields.AllOptionsFormatted[normalizedPropertyName]
|
||||||
|
|
||||||
|
actions = append(actions, protocol.CodeAction{
|
||||||
|
Title: fmt.Sprintf("Typo Fix: %s", optionName),
|
||||||
|
IsPreferred: &isPreferred,
|
||||||
|
Kind: &kind,
|
||||||
|
Diagnostics: []protocol.Diagnostic{
|
||||||
|
diagnostics.GenerateUnknownOption(
|
||||||
|
typoInfo.Property.ToLSPRange(),
|
||||||
|
typoInfo.Property.Key.Name,
|
||||||
|
),
|
||||||
|
},
|
||||||
|
Edit: &protocol.WorkspaceEdit{
|
||||||
|
Changes: map[protocol.DocumentUri][]protocol.TextEdit{
|
||||||
|
params.TextDocument.URI: {
|
||||||
|
{
|
||||||
|
Range: typoInfo.Property.Key.ToLSPRange(),
|
||||||
|
NewText: optionName,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return actions
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
@ -2,7 +2,15 @@ package indexes
|
|||||||
|
|
||||||
import "config-lsp/handlers/wireguard/ast"
|
import "config-lsp/handlers/wireguard/ast"
|
||||||
|
|
||||||
type WGIndexes struct {
|
type WGIndexPropertyInfo struct {
|
||||||
// map of: section name -> WGSection
|
Section *ast.WGSection
|
||||||
SectionsByName map[string][]*ast.WGSection
|
Property *ast.WGProperty
|
||||||
|
}
|
||||||
|
|
||||||
|
type WGIndexes struct {
|
||||||
|
// map of: section name -> *WGSection
|
||||||
|
SectionsByName map[string][]*ast.WGSection
|
||||||
|
|
||||||
|
// map of: line number -> *WGIndexPropertyInfo
|
||||||
|
UnknownProperties map[uint32]WGIndexPropertyInfo
|
||||||
}
|
}
|
||||||
|
@ -8,7 +8,8 @@ import (
|
|||||||
func CreateIndexes(config *ast.WGConfig) (*WGIndexes, []common.LSPError) {
|
func CreateIndexes(config *ast.WGConfig) (*WGIndexes, []common.LSPError) {
|
||||||
errs := make([]common.LSPError, 0)
|
errs := make([]common.LSPError, 0)
|
||||||
indexes := &WGIndexes{
|
indexes := &WGIndexes{
|
||||||
SectionsByName: make(map[string][]*ast.WGSection),
|
SectionsByName: make(map[string][]*ast.WGSection),
|
||||||
|
UnknownProperties: make(map[uint32]WGIndexPropertyInfo),
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, section := range config.Sections {
|
for _, section := range config.Sections {
|
||||||
|
@ -15,6 +15,8 @@ func TextDocumentCodeAction(context *glsp.Context, params *protocol.CodeActionPa
|
|||||||
|
|
||||||
actions = append(actions, handlers.GetKeyGenerationCodeActions(d, params)...)
|
actions = append(actions, handlers.GetKeyGenerationCodeActions(d, params)...)
|
||||||
actions = append(actions, handlers.GetKeepaliveCodeActions(d, params)...)
|
actions = append(actions, handlers.GetKeepaliveCodeActions(d, params)...)
|
||||||
|
actions = append(actions, handlers.GetAddPeerLikeThis(d, params)...)
|
||||||
|
actions = append(actions, handlers.GetPropertyKeywordTypoFixes(d, params)...)
|
||||||
|
|
||||||
if len(actions) > 0 {
|
if len(actions) > 0 {
|
||||||
return actions, nil
|
return actions, nil
|
||||||
|
@ -24,6 +24,12 @@ func WorkspaceExecuteCommand(context *glsp.Context, params *protocol.ExecuteComm
|
|||||||
|
|
||||||
d := wireguard.DocumentParserMap[args.URI]
|
d := wireguard.DocumentParserMap[args.URI]
|
||||||
|
|
||||||
|
return args.RunCommand(d)
|
||||||
|
case string(handlers.CodeActionCreatePeer):
|
||||||
|
args := handlers.CodeActionCreatePeerArgsFromArguments(params.Arguments[0].(map[string]any))
|
||||||
|
|
||||||
|
d := wireguard.DocumentParserMap[args.URI]
|
||||||
|
|
||||||
return args.RunCommand(d)
|
return args.RunCommand(d)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2,4 +2,4 @@ package roothandler
|
|||||||
|
|
||||||
// The comment below at the end of the line is required for the CI:CD to work.
|
// The comment below at the end of the line is required for the CI:CD to work.
|
||||||
// Do not remove it
|
// Do not remove it
|
||||||
var Version = "0.2.0" // CI:CD-VERSION
|
var Version = "0.2.2" // CI:CD-VERSION
|
||||||
|
@ -1,21 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
|
|
||||||
ROOT=$(git rev-parse --show-toplevel)/server
|
|
||||||
|
|
||||||
# aliases
|
|
||||||
cd $ROOT/handlers/aliases && antlr4 -Dlanguage=Go -o ast/parser Aliases.g4
|
|
||||||
|
|
||||||
# fstab
|
|
||||||
cd $ROOT/hanlders/fstab && antlr4 -Dlanguage=Go -o ast/parser Fstab.g4
|
|
||||||
|
|
||||||
# sshd_config
|
|
||||||
cd $ROOT/handlers/sshd_config && antlr4 -Dlanguage=Go -o ast/parser Config.g4
|
|
||||||
cd $ROOT/handlers/sshd_config/match-parser && antlr4 -Dlanguage=Go -o parser Match.g4
|
|
||||||
|
|
||||||
# ssh_config
|
|
||||||
cd $ROOT/handlers/ssh_config && antlr4 -Dlanguage=Go -o ast/parser Config.g4
|
|
||||||
cd $ROOT/handlers/ssh_config/match-parser && antlr4 -Dlanguage=Go -o parser Match.g4
|
|
||||||
|
|
||||||
# hosts
|
|
||||||
cd $ROOT/handlers/hosts && antlr4 -Dlanguage=Go -o ast/parser Hosts.g4
|
|
||||||
|
|
@ -1,6 +1,7 @@
|
|||||||
package utils
|
package utils
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"errors"
|
||||||
"os"
|
"os"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -13,11 +14,19 @@ func DoesPathExist(path string) bool {
|
|||||||
func IsPathDirectory(path string) bool {
|
func IsPathDirectory(path string) bool {
|
||||||
info, err := os.Stat(path)
|
info, err := os.Stat(path)
|
||||||
|
|
||||||
return err == nil && info.IsDir()
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return info.IsDir()
|
||||||
}
|
}
|
||||||
|
|
||||||
func IsPathFile(path string) bool {
|
func IsPathFile(path string) bool {
|
||||||
info, err := os.Stat(path)
|
_, err := os.Stat(path)
|
||||||
|
|
||||||
return err == nil && !info.IsDir()
|
if errors.Is(err, os.ErrNotExist) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
}
|
}
|
||||||
|
@ -35,10 +35,14 @@ func (t *iPv4Tree) addHostBits(
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (t *iPv4Tree) getFromHostBits(hostBits []bool) *context.Context {
|
func (t *iPv4Tree) getFromHostBits(hostBits []bool) *context.Context {
|
||||||
if t.Context != nil || len(hostBits) == 0 {
|
if t.Context != nil {
|
||||||
return &t.Context
|
return &t.Context
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if len(hostBits) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
if hostBits[0] {
|
if hostBits[0] {
|
||||||
if t.TrueNode == nil {
|
if t.TrueNode == nil {
|
||||||
return nil
|
return nil
|
||||||
|
@ -31,8 +31,8 @@ func TestPartialHostIpAddresses(t *testing.T) {
|
|||||||
hostSet.AddIP(netip.MustParsePrefix("10.0.0.2/32"), context.Background())
|
hostSet.AddIP(netip.MustParsePrefix("10.0.0.2/32"), context.Background())
|
||||||
hostSet.AddIP(netip.MustParsePrefix("10.0.0.3/32"), context.Background())
|
hostSet.AddIP(netip.MustParsePrefix("10.0.0.3/32"), context.Background())
|
||||||
|
|
||||||
if ctx, _ := hostSet.ContainsIP(netip.MustParsePrefix("10.0.0.1/16")); ctx == nil {
|
if ctx, _ := hostSet.ContainsIP(netip.MustParsePrefix("10.0.0.1/16")); ctx != nil {
|
||||||
t.Fatalf("Expected to find 10.0.0.1/16 in the host set")
|
t.Fatalf("Didn't expect to find 10.0.0.1/16 in the host set")
|
||||||
}
|
}
|
||||||
|
|
||||||
if ctx, _ := hostSet.ContainsIP(netip.MustParsePrefix("192.168.0.1/16")); ctx != nil {
|
if ctx, _ := hostSet.ContainsIP(netip.MustParsePrefix("192.168.0.1/16")); ctx != nil {
|
||||||
@ -48,12 +48,24 @@ func TestMixedHostIpAddresses(t *testing.T) {
|
|||||||
hostSet.AddIP(netip.MustParsePrefix("192.168.0.1/32"), context.Background())
|
hostSet.AddIP(netip.MustParsePrefix("192.168.0.1/32"), context.Background())
|
||||||
|
|
||||||
if ctx, _ := hostSet.ContainsIP(netip.MustParsePrefix("10.0.0.2/32")); ctx == nil {
|
if ctx, _ := hostSet.ContainsIP(netip.MustParsePrefix("10.0.0.2/32")); ctx == nil {
|
||||||
t.Fatalf("Expected to find 10.0.0.3/32 in the host set")
|
t.Fatalf("Expected to find 10.0.0.1/32 in the host set")
|
||||||
}
|
}
|
||||||
|
|
||||||
if ctx, _ := hostSet.ContainsIP(netip.MustParsePrefix("192.168.0.2/32")); ctx != nil {
|
if ctx, _ := hostSet.ContainsIP(netip.MustParsePrefix("192.168.0.2/32")); ctx != nil {
|
||||||
t.Fatalf("Expected NOT to find 192.168.0.2/32 in the host set")
|
t.Fatalf("Expected NOT to find 192.168.0.2/32 in the host set")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if ctx, _ := hostSet.ContainsIP(netip.MustParsePrefix("10.0.0.2/32")); ctx == nil {
|
||||||
|
t.Fatalf("Expected to find 10.0.0.2/32 in the host set")
|
||||||
|
}
|
||||||
|
|
||||||
|
if ctx, _ := hostSet.ContainsIP(netip.MustParsePrefix("10.0.1.2/32")); ctx == nil {
|
||||||
|
t.Fatalf("Expected to find 10.0.1.2/32 in the host set")
|
||||||
|
}
|
||||||
|
|
||||||
|
if ctx, _ := hostSet.ContainsIP(netip.MustParsePrefix("10.0.0.1/30")); ctx == nil {
|
||||||
|
t.Fatalf("Expected to find 10.0.0.1/30 in the host set")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSimpleExactCheck(t *testing.T) {
|
func TestSimpleExactCheck(t *testing.T) {
|
||||||
|
@ -6,16 +6,10 @@ and definition requests.
|
|||||||
|
|
||||||
Install this extension and load your config files in VS Code to get started.
|
Install this extension and load your config files in VS Code to get started.
|
||||||
|
|
||||||
If `config-lsp` is unable to detect the language of your config file, you can manually
|
_config-lsp_ adds the new languages directly to VS Code, so you can use the built-in language selector
|
||||||
specify it by adding a line in the form of:
|
at the bottom right of the window to switch between different config files.
|
||||||
|
|
||||||
```plaintext
|
**Please note that this extension is still in beta. There are still bugs and missing features.**
|
||||||
#?lsp.language=<language>
|
I'm working on this as a hobby project in my free time, so I can't provide you with a timeline
|
||||||
|
as to when a stable version will be released.
|
||||||
# For example
|
|
||||||
|
|
||||||
#?lsp.language=sshconfig
|
|
||||||
#?lsp.language=fstab
|
|
||||||
#?lsp.language=aliases
|
|
||||||
```
|
|
||||||
|
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"name": "config-lsp",
|
"name": "config-lsp",
|
||||||
"description": "Language Features (completions, diagnostics, etc.) for your config files: gitconfig, fstab, aliases, hosts, wireguard, ssh_config, sshd_config, and more to come!",
|
"description": "Language Features (completions, diagnostics, etc.) for your config files: gitconfig, fstab, aliases, hosts, wireguard, ssh_config, sshd_config, and more to come!",
|
||||||
"author": "Myzel394",
|
"author": "Myzel394",
|
||||||
"version": "0.2.0",
|
"version": "0.2.2",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/Myzel394/config-lsp"
|
"url": "https://github.com/Myzel394/config-lsp"
|
||||||
|
Loading…
x
Reference in New Issue
Block a user