Compare commits

...

25 Commits

Author SHA1 Message Date
Alex Vinyals 5c66fc770f
Merge a37f70a488 into b01fbb4a22 2025-07-21 21:11:37 +02:00
Alexander Meinhardt Scheurer-Volkmann b01fbb4a22
Fix symlink directories in file explorer (#14028) 2025-07-21 14:10:06 -04:00
MrWheatley f75a26cb9b
added janet indents (#14020) 2025-07-21 14:07:11 -04:00
MrWheatley 21ae1c98fb
fix janet highlights (#14017) 2025-07-21 14:00:21 -04:00
Fea 7b8a4b7a51
feat: Add `kotlin-lsp` to `languages.toml` (#14021) 2025-07-21 14:00:08 -04:00
Yorick Peterse 715d4ae2d5
tree-sitter: update Inko grammar and queries (#14022) 2025-07-21 13:51:50 -04:00
Ivan Shymkiv 22b184b570
Fixed theme location (#14016) 2025-07-19 17:33:47 -05:00
Ivan Shymkiv 665ee4da22
feat(theme): add Gruvbox Material Dark theme variants (#14005) 2025-07-19 15:45:15 -05:00
Ivan Shymkiv ecd18e3eb2
feat(themes): add Gruvbox Material Light theme (#14007) 2025-07-19 15:44:42 -05:00
Poliorcetics e7f95ca6b2
just: bump grammar support to handle module path in aliases and recipes dependencies (#14009) 2025-07-19 15:18:18 -04:00
Michael Davis 4418e338e8
Use syntax symbol pickers for Erlang
Neither language server robustly supports workspace symbol search.
`erlang-ls`'s symbol picker takes a long time to open successfully on
boot. `elp`'s is faster but not faster than the tags query.
2025-07-18 11:17:10 -04:00
Michael Davis 6c71fc00b2
Document tags.scm queries, commands and language support 2025-07-18 11:17:10 -04:00
Michael Davis 727758e068
Add syntax symbol pickers based on tags.scm queries 2025-07-18 11:16:42 -04:00
Michael Davis 63eb1b870c
Add initial tags.scm queries
Co-authored-by: cgahr <26804763+cgahr@users.noreply.github.com>
Co-authored-by: eh <correia.eh@gmail.com>
2025-07-18 11:12:41 -04:00
Michael Davis 2d5826d194
Complete words from open buffers (#13206) 2025-07-18 09:51:00 -05:00
Michael Davis 9f4ef2fc3d
Add release notes for 25.07.1
(cherry picked from commit a05c151bb6)
2025-07-18 10:39:27 -04:00
RoloEdits fd8aacc1a4
build: lower ubuntu version from `24.04` to `22.04` (#13983) 2025-07-18 09:16:21 -05:00
Alex Vinyals a37f70a488 bug: tab to str conversion should check the length of the tab in the palette 2024-10-14 09:11:16 +02:00
Alex Vinyals ab57115567 Merge branch 'master' into issue-2719 2024-10-05 22:48:54 +02:00
Alexandre Vinyals Valdepeñas 970122f6d2 introduce custom style for trailing whitespace 2024-04-20 12:21:57 +02:00
Alexandre Vinyals Valdepeñas a86a7d6920 Small enum, faster to pass by value than borrowing it. 2024-04-14 08:23:36 +02:00
Alexandre Vinyals Valdepeñas ba893751c2 Merge branch 'master' into issue-2719
Helix is handling narrow non-breaking spaces, update
trailing whitespace tracker to take them into account
2024-04-14 00:06:10 +02:00
Alexandre Vinyals Valdepeñas de13260bb6 feedback: stop allocating, pass render callback instead, ignore newline 2023-07-06 22:49:27 +02:00
Alexandre Vinyals Valdepeñas 99aa751c75 feedback: apply as much feedback as possible (round 1) 2023-06-24 13:36:55 +02:00
Alexandre Vinyals Valdepeñas 60c06076b2 feat(editor): add support to highlight trailing whitespace
Adds a new render configuration value `Trailing`, which can be used
to selectively enable trailing whitespace of certain whitespace characters.
2023-06-04 22:48:11 +02:00
60 changed files with 2597 additions and 494 deletions

View File

@ -61,12 +61,15 @@ jobs:
build: [x86_64-linux, aarch64-linux, x86_64-macos, x86_64-windows] #, x86_64-win-gnu, win32-msvc
include:
- build: x86_64-linux
os: ubuntu-24.04
# WARN: When changing this to a newer version, make sure that the GLIBC isnt too new, as this can cause issues
# with portablity on older systems that dont follow ubuntus more rapid release cadence.
os: ubuntu-22.04
rust: stable
target: x86_64-unknown-linux-gnu
cross: false
- build: aarch64-linux
os: ubuntu-24.04-arm
# Version should be kept in lockstep with the x86_64 version
os: ubuntu-22.04-arm
rust: stable
target: aarch64-unknown-linux-gnu
cross: false
@ -291,7 +294,7 @@ jobs:
file_glob: true
tag: ${{ github.ref_name }}
overwrite: true
- name: Upload binaries as artifact
uses: actions/upload-artifact@v4
if: env.preview == 'true'

View File

@ -20,6 +20,10 @@ Updated languages and queries:
Packaging:
-->
# 25.07.1 (2025-07-18)
This is a patch release which lowers the GLIBC requirements of the release artifacts published to GitHub ([#13983](https://github.com/helix-editor/helix/pull/13983))
# 25.07 (2025-07-15)
As always, a big thank you to all of the contributors! This release saw changes from 195 contributors.

26
Cargo.lock generated
View File

@ -1397,7 +1397,7 @@ dependencies = [
[[package]]
name = "helix-core"
version = "25.7.0"
version = "25.7.1"
dependencies = [
"anyhow",
"arc-swap",
@ -1435,7 +1435,7 @@ dependencies = [
[[package]]
name = "helix-dap"
version = "25.7.0"
version = "25.7.1"
dependencies = [
"anyhow",
"fern",
@ -1454,7 +1454,7 @@ dependencies = [
[[package]]
name = "helix-event"
version = "25.7.0"
version = "25.7.1"
dependencies = [
"anyhow",
"foldhash",
@ -1468,7 +1468,7 @@ dependencies = [
[[package]]
name = "helix-loader"
version = "25.7.0"
version = "25.7.1"
dependencies = [
"anyhow",
"cc",
@ -1485,7 +1485,7 @@ dependencies = [
[[package]]
name = "helix-lsp"
version = "25.7.0"
version = "25.7.1"
dependencies = [
"anyhow",
"arc-swap",
@ -1518,11 +1518,11 @@ dependencies = [
[[package]]
name = "helix-parsec"
version = "25.7.0"
version = "25.7.1"
[[package]]
name = "helix-stdx"
version = "25.7.0"
version = "25.7.1"
dependencies = [
"bitflags",
"dunce",
@ -1540,13 +1540,14 @@ dependencies = [
[[package]]
name = "helix-term"
version = "25.7.0"
version = "25.7.1"
dependencies = [
"anyhow",
"arc-swap",
"chrono",
"content_inspector",
"crossterm",
"dashmap",
"fern",
"futures-util",
"grep-regex",
@ -1586,7 +1587,7 @@ dependencies = [
[[package]]
name = "helix-tui"
version = "25.7.0"
version = "25.7.1"
dependencies = [
"bitflags",
"cassowary",
@ -1601,7 +1602,7 @@ dependencies = [
[[package]]
name = "helix-vcs"
version = "25.7.0"
version = "25.7.1"
dependencies = [
"anyhow",
"arc-swap",
@ -1617,7 +1618,7 @@ dependencies = [
[[package]]
name = "helix-view"
version = "25.7.0"
version = "25.7.1"
dependencies = [
"anyhow",
"arc-swap",
@ -1634,6 +1635,7 @@ dependencies = [
"helix-stdx",
"helix-tui",
"helix-vcs",
"kstring",
"libc",
"log",
"once_cell",
@ -3279,7 +3281,7 @@ checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51"
[[package]]
name = "xtask"
version = "25.7.0"
version = "25.7.1"
dependencies = [
"helix-core",
"helix-loader",

View File

@ -53,7 +53,7 @@ tokio-stream = "0.1.17"
toml = "0.9"
[workspace.package]
version = "25.7.0"
version = "25.7.1"
edition = "2021"
authors = ["Blaž Hrastnik <blaz@mxxn.io>"]
categories = ["editor"]

View File

@ -28,3 +28,4 @@
- [Adding textobject queries](./guides/textobject.md)
- [Adding indent queries](./guides/indent.md)
- [Adding injection queries](./guides/injection.md)
- [Adding tags queries](./guides/tags.md)

View File

@ -19,6 +19,7 @@
- [`[editor.soft-wrap]` Section](#editorsoft-wrap-section)
- [`[editor.smart-tab]` Section](#editorsmart-tab-section)
- [`[editor.inline-diagnostics]` Section](#editorinline-diagnostics-section)
- [`[editor.word-completion]` Section](#editorword-completion-section)
### `[editor]` Section
@ -477,3 +478,21 @@ end-of-line-diagnostics = "hint"
[editor.inline-diagnostics]
cursor-line = "warning" # show warnings and errors on the cursorline inline
```
### `[editor.word-completion]` Section
Options for controlling completion of words from open buffers.
| Key | Description | Default |
| --- | --- | --- |
| `enable` | Whether word completion is enabled | `true` |
| `trigger-length` | Number of word characters to type before triggering completion | `7` |
Example:
```toml
[editor.word-completion]
enable = true
# Set the trigger length lower so that words are completed more often
trigger-length = 4
```

View File

@ -1,282 +1,282 @@
| Language | Syntax Highlighting | Treesitter Textobjects | Auto Indent | Default language servers |
| --- | --- | --- | --- | --- |
| ada | ✓ | ✓ | | `ada_language_server` |
| adl | ✓ | ✓ | ✓ | |
| agda | ✓ | | | |
| alloy | ✓ | | | |
| amber | ✓ | | | `amber-lsp` |
| astro | ✓ | | | `astro-ls` |
| awk | ✓ | ✓ | | `awk-language-server` |
| bash | ✓ | ✓ | ✓ | `bash-language-server` |
| bass | ✓ | | | `bass` |
| beancount | ✓ | | | `beancount-language-server` |
| bibtex | ✓ | | | `texlab` |
| bicep | ✓ | | | `bicep-langserver` |
| bitbake | ✓ | | | `bitbake-language-server` |
| blade | ✓ | | | |
| blueprint | ✓ | | | `blueprint-compiler` |
| c | ✓ | ✓ | ✓ | `clangd` |
| c-sharp | ✓ | ✓ | | `OmniSharp` |
| cabal | | | | `haskell-language-server-wrapper` |
| caddyfile | ✓ | ✓ | ✓ | |
| cairo | ✓ | ✓ | ✓ | `cairo-language-server` |
| capnp | ✓ | | ✓ | |
| cel | ✓ | | | |
| circom | ✓ | | | `circom-lsp` |
| clarity | ✓ | | | `clarinet` |
| clojure | ✓ | | | `clojure-lsp` |
| cmake | ✓ | ✓ | ✓ | `neocmakelsp`, `cmake-language-server` |
| codeql | ✓ | ✓ | | `codeql` |
| comment | ✓ | | | |
| common-lisp | ✓ | | ✓ | `cl-lsp` |
| cpon | ✓ | | ✓ | |
| cpp | ✓ | ✓ | ✓ | `clangd` |
| crystal | ✓ | ✓ | ✓ | `crystalline`, `ameba-ls` |
| css | ✓ | | ✓ | `vscode-css-language-server` |
| csv | ✓ | | | |
| cue | ✓ | | | `cuelsp` |
| cylc | ✓ | ✓ | ✓ | |
| d | ✓ | ✓ | ✓ | `serve-d` |
| dart | ✓ | ✓ | ✓ | `dart` |
| dbml | ✓ | | | |
| debian | ✓ | | | |
| devicetree | ✓ | | | `dts-lsp` |
| dhall | ✓ | ✓ | | `dhall-lsp-server` |
| diff | ✓ | | | |
| djot | ✓ | | | |
| docker-compose | ✓ | ✓ | ✓ | `docker-compose-langserver`, `yaml-language-server` |
| dockerfile | ✓ | ✓ | | `docker-langserver` |
| dot | ✓ | | | `dot-language-server` |
| dtd | ✓ | | | |
| dune | ✓ | | | |
| dunstrc | ✓ | | | |
| earthfile | ✓ | ✓ | ✓ | `earthlyls` |
| edoc | ✓ | | | |
| eex | ✓ | | | |
| ejs | ✓ | | | |
| elisp | ✓ | | | |
| elixir | ✓ | ✓ | ✓ | `elixir-ls` |
| elm | ✓ | ✓ | | `elm-language-server` |
| elvish | ✓ | | | `elvish` |
| env | ✓ | ✓ | | |
| erb | ✓ | | | |
| erlang | ✓ | ✓ | | `erlang_ls`, `elp` |
| esdl | ✓ | | | |
| fennel | ✓ | | | `fennel-ls` |
| fga | ✓ | ✓ | ✓ | |
| fidl | ✓ | | | |
| fish | ✓ | ✓ | ✓ | `fish-lsp` |
| forth | ✓ | | | `forth-lsp` |
| fortran | ✓ | | ✓ | `fortls` |
| fsharp | ✓ | | | `fsautocomplete` |
| gas | ✓ | ✓ | | `asm-lsp` |
| gdscript | ✓ | ✓ | ✓ | |
| gemini | ✓ | | | |
| gherkin | ✓ | | | |
| ghostty | ✓ | | | |
| git-attributes | ✓ | | | |
| git-commit | ✓ | ✓ | | |
| git-config | ✓ | ✓ | | |
| git-ignore | ✓ | | | |
| git-notes | ✓ | | | |
| git-rebase | ✓ | | | |
| gjs | ✓ | ✓ | ✓ | `typescript-language-server`, `vscode-eslint-language-server`, `ember-language-server` |
| gleam | ✓ | ✓ | | `gleam` |
| glimmer | ✓ | | | `ember-language-server` |
| glsl | ✓ | ✓ | ✓ | `glsl_analyzer` |
| gn | ✓ | | | |
| go | ✓ | ✓ | ✓ | `gopls`, `golangci-lint-langserver` |
| godot-resource | ✓ | ✓ | | |
| gomod | ✓ | | | `gopls` |
| gotmpl | ✓ | | | `gopls` |
| gowork | ✓ | | | `gopls` |
| gpr | ✓ | | | `ada_language_server` |
| graphql | ✓ | ✓ | | `graphql-lsp` |
| gren | ✓ | ✓ | | |
| groovy | ✓ | | | |
| gts | ✓ | ✓ | ✓ | `typescript-language-server`, `vscode-eslint-language-server`, `ember-language-server` |
| hare | ✓ | | | |
| haskell | ✓ | ✓ | | `haskell-language-server-wrapper` |
| haskell-persistent | ✓ | | | |
| hcl | ✓ | ✓ | ✓ | `terraform-ls` |
| heex | ✓ | ✓ | | `elixir-ls` |
| helm | ✓ | | | `helm_ls` |
| hocon | ✓ | ✓ | ✓ | |
| hoon | ✓ | | | |
| hosts | ✓ | | | |
| html | ✓ | ✓ | | `vscode-html-language-server`, `superhtml` |
| htmldjango | ✓ | | | `djlsp`, `vscode-html-language-server`, `superhtml` |
| hurl | ✓ | ✓ | ✓ | |
| hyprlang | ✓ | | ✓ | `hyprls` |
| idris | | | | `idris2-lsp` |
| iex | ✓ | | | |
| ini | ✓ | | | |
| ink | ✓ | | | |
| inko | ✓ | ✓ | ✓ | |
| janet | ✓ | | | |
| java | ✓ | ✓ | ✓ | `jdtls` |
| javascript | ✓ | ✓ | ✓ | `typescript-language-server` |
| jinja | ✓ | | | |
| jjconfig | ✓ | ✓ | ✓ | `taplo`, `tombi` |
| jjdescription | ✓ | | | |
| jjrevset | ✓ | | | |
| jjtemplate | ✓ | | | |
| jq | ✓ | ✓ | | `jq-lsp` |
| jsdoc | ✓ | | | |
| json | ✓ | ✓ | ✓ | `vscode-json-language-server` |
| json-ld | ✓ | ✓ | ✓ | `vscode-json-language-server` |
| json5 | ✓ | | | |
| jsonc | ✓ | | ✓ | `vscode-json-language-server` |
| jsonnet | ✓ | | | `jsonnet-language-server` |
| jsx | ✓ | ✓ | ✓ | `typescript-language-server` |
| julia | ✓ | ✓ | ✓ | `julia` |
| just | ✓ | ✓ | ✓ | `just-lsp` |
| kdl | ✓ | ✓ | ✓ | |
| koka | ✓ | | ✓ | `koka` |
| kotlin | ✓ | ✓ | ✓ | `kotlin-language-server` |
| koto | ✓ | ✓ | ✓ | `koto-ls` |
| latex | ✓ | ✓ | | `texlab` |
| ld | ✓ | | ✓ | |
| ldif | ✓ | | | |
| lean | ✓ | | | `lean` |
| ledger | ✓ | | | |
| llvm | ✓ | ✓ | ✓ | |
| llvm-mir | ✓ | ✓ | ✓ | |
| llvm-mir-yaml | ✓ | | ✓ | |
| log | ✓ | | | |
| lpf | ✓ | | | |
| lua | ✓ | ✓ | ✓ | `lua-language-server` |
| luau | ✓ | ✓ | ✓ | `luau-lsp` |
| mail | ✓ | ✓ | | |
| make | ✓ | | ✓ | |
| markdoc | ✓ | | | `markdoc-ls` |
| markdown | ✓ | | | `marksman`, `markdown-oxide` |
| markdown-rustdoc | ✓ | | | |
| markdown.inline | ✓ | | | |
| matlab | ✓ | ✓ | ✓ | |
| mermaid | ✓ | | | |
| meson | ✓ | | ✓ | `mesonlsp` |
| mint | | | | `mint` |
| mojo | ✓ | ✓ | ✓ | `pixi` |
| move | ✓ | | | |
| msbuild | ✓ | | ✓ | |
| nasm | ✓ | ✓ | | `asm-lsp` |
| nestedtext | ✓ | ✓ | ✓ | |
| nginx | ✓ | | | |
| nickel | ✓ | | ✓ | `nls` |
| nim | ✓ | ✓ | ✓ | `nimlangserver` |
| nix | ✓ | ✓ | ✓ | `nil`, `nixd` |
| nu | ✓ | | | `nu` |
| nunjucks | ✓ | | | |
| ocaml | ✓ | | ✓ | `ocamllsp` |
| ocaml-interface | ✓ | | | `ocamllsp` |
| odin | ✓ | ✓ | ✓ | `ols` |
| ohm | ✓ | ✓ | ✓ | |
| opencl | ✓ | ✓ | ✓ | `clangd` |
| openscad | ✓ | | | `openscad-lsp` |
| org | ✓ | | | |
| pascal | ✓ | ✓ | | `pasls` |
| passwd | ✓ | | | |
| pem | ✓ | | | |
| perl | ✓ | ✓ | ✓ | `perlnavigator` |
| pest | ✓ | ✓ | ✓ | `pest-language-server` |
| php | ✓ | ✓ | ✓ | `intelephense` |
| php-only | ✓ | | | |
| pkgbuild | ✓ | ✓ | ✓ | `termux-language-server`, `bash-language-server` |
| pkl | ✓ | | ✓ | `pkl-lsp` |
| po | ✓ | ✓ | | |
| pod | ✓ | | | |
| ponylang | ✓ | ✓ | ✓ | |
| powershell | ✓ | | | |
| prisma | ✓ | ✓ | | `prisma-language-server` |
| prolog | ✓ | | ✓ | `swipl` |
| properties | ✓ | ✓ | | |
| protobuf | ✓ | ✓ | ✓ | `buf`, `pb`, `protols` |
| prql | ✓ | | | |
| pug | ✓ | | | |
| purescript | ✓ | ✓ | | `purescript-language-server` |
| python | ✓ | ✓ | ✓ | `ty`, `ruff`, `jedi-language-server`, `pylsp` |
| qml | ✓ | ✓ | ✓ | `qmlls` |
| quarto | ✓ | | ✓ | |
| quint | ✓ | | | `quint-language-server` |
| r | ✓ | | | `R` |
| racket | ✓ | | ✓ | `racket` |
| regex | ✓ | | | |
| rego | ✓ | | | `regols` |
| rescript | ✓ | ✓ | | `rescript-language-server` |
| rmarkdown | ✓ | | ✓ | `R` |
| robot | ✓ | | | `robotframework_ls` |
| ron | ✓ | | ✓ | |
| rst | ✓ | | | |
| ruby | ✓ | ✓ | ✓ | `ruby-lsp`, `solargraph` |
| rust | ✓ | ✓ | ✓ | `rust-analyzer` |
| rust-format-args | ✓ | | | |
| rust-format-args-macro | ✓ | ✓ | ✓ | |
| sage | ✓ | ✓ | | |
| scala | ✓ | ✓ | ✓ | `metals` |
| scheme | ✓ | | ✓ | |
| scss | ✓ | | | `vscode-css-language-server` |
| slang | ✓ | ✓ | ✓ | `slangd` |
| slint | ✓ | ✓ | ✓ | `slint-lsp` |
| smali | ✓ | | ✓ | |
| smithy | ✓ | | | `cs` |
| sml | ✓ | | | |
| snakemake | ✓ | | ✓ | `pylsp` |
| solidity | ✓ | ✓ | | `solc` |
| sourcepawn | ✓ | ✓ | | `sourcepawn-studio` |
| spade | ✓ | | ✓ | `spade-language-server` |
| spicedb | ✓ | | | |
| sql | ✓ | ✓ | | |
| sshclientconfig | ✓ | | | |
| starlark | ✓ | ✓ | ✓ | `starpls` |
| strace | ✓ | | | |
| supercollider | ✓ | | | |
| svelte | ✓ | | ✓ | `svelteserver` |
| sway | ✓ | ✓ | ✓ | `forc` |
| swift | ✓ | ✓ | | `sourcekit-lsp` |
| systemd | ✓ | | | `systemd-lsp` |
| t32 | ✓ | | | |
| tablegen | ✓ | ✓ | ✓ | |
| tact | ✓ | ✓ | ✓ | |
| task | ✓ | | | |
| tcl | ✓ | | ✓ | |
| teal | ✓ | | | `teal-language-server` |
| templ | ✓ | | | `templ` |
| tera | ✓ | | | |
| textproto | ✓ | ✓ | ✓ | |
| tfvars | ✓ | | ✓ | `terraform-ls` |
| thrift | ✓ | | | |
| tlaplus | ✓ | | | |
| todotxt | ✓ | | | |
| toml | ✓ | ✓ | | `taplo`, `tombi` |
| tsq | ✓ | | | `ts_query_ls` |
| tsx | ✓ | ✓ | ✓ | `typescript-language-server` |
| twig | ✓ | | | |
| typescript | ✓ | ✓ | ✓ | `typescript-language-server` |
| typespec | ✓ | ✓ | ✓ | `tsp-server` |
| typst | ✓ | | | `tinymist` |
| ungrammar | ✓ | | | |
| unison | ✓ | ✓ | ✓ | |
| uxntal | ✓ | | | |
| v | ✓ | ✓ | ✓ | `v-analyzer` |
| vala | ✓ | ✓ | | `vala-language-server` |
| vento | ✓ | | | |
| verilog | ✓ | ✓ | | `svlangserver` |
| vhdl | ✓ | | | `vhdl_ls` |
| vhs | ✓ | | | |
| vue | ✓ | | | `vue-language-server` |
| wast | ✓ | | | |
| wat | ✓ | | | `wat_server` |
| webc | ✓ | | | |
| werk | ✓ | | | |
| wesl | ✓ | ✓ | | |
| wgsl | ✓ | | | `wgsl-analyzer` |
| wit | ✓ | | ✓ | |
| wren | ✓ | ✓ | ✓ | |
| xit | ✓ | | | |
| xml | ✓ | ✓ | ✓ | |
| xtc | ✓ | | | |
| yaml | ✓ | ✓ | ✓ | `yaml-language-server`, `ansible-language-server` |
| yara | ✓ | | | `yls` |
| yuck | ✓ | | | |
| zig | ✓ | ✓ | ✓ | `zls` |
| Language | Syntax Highlighting | Treesitter Textobjects | Auto Indent | Code Navigation Tags | Default language servers |
| --- | --- | --- | --- | --- | --- |
| ada | ✓ | ✓ | | | `ada_language_server` |
| adl | ✓ | ✓ | ✓ | | |
| agda | ✓ | | | | |
| alloy | ✓ | | | | |
| amber | ✓ | | | | `amber-lsp` |
| astro | ✓ | | | | `astro-ls` |
| awk | ✓ | ✓ | | | `awk-language-server` |
| bash | ✓ | ✓ | ✓ | | `bash-language-server` |
| bass | ✓ | | | | `bass` |
| beancount | ✓ | | | | `beancount-language-server` |
| bibtex | ✓ | | | | `texlab` |
| bicep | ✓ | | | | `bicep-langserver` |
| bitbake | ✓ | | | | `bitbake-language-server` |
| blade | ✓ | | | | |
| blueprint | ✓ | | | | `blueprint-compiler` |
| c | ✓ | ✓ | ✓ | ✓ | `clangd` |
| c-sharp | ✓ | ✓ | | ✓ | `OmniSharp` |
| cabal | | | | | `haskell-language-server-wrapper` |
| caddyfile | ✓ | ✓ | ✓ | | |
| cairo | ✓ | ✓ | ✓ | | `cairo-language-server` |
| capnp | ✓ | | ✓ | | |
| cel | ✓ | | | | |
| circom | ✓ | | | | `circom-lsp` |
| clarity | ✓ | | | | `clarinet` |
| clojure | ✓ | | | | `clojure-lsp` |
| cmake | ✓ | ✓ | ✓ | | `neocmakelsp`, `cmake-language-server` |
| codeql | ✓ | ✓ | | | `codeql` |
| comment | ✓ | | | | |
| common-lisp | ✓ | | ✓ | | `cl-lsp` |
| cpon | ✓ | | ✓ | | |
| cpp | ✓ | ✓ | ✓ | ✓ | `clangd` |
| crystal | ✓ | ✓ | ✓ | ✓ | `crystalline`, `ameba-ls` |
| css | ✓ | | ✓ | | `vscode-css-language-server` |
| csv | ✓ | | | | |
| cue | ✓ | | | | `cuelsp` |
| cylc | ✓ | ✓ | ✓ | | |
| d | ✓ | ✓ | ✓ | | `serve-d` |
| dart | ✓ | ✓ | ✓ | | `dart` |
| dbml | ✓ | | | | |
| debian | ✓ | | | | |
| devicetree | ✓ | | | | `dts-lsp` |
| dhall | ✓ | ✓ | | | `dhall-lsp-server` |
| diff | ✓ | | | | |
| djot | ✓ | | | | |
| docker-compose | ✓ | ✓ | ✓ | | `docker-compose-langserver`, `yaml-language-server` |
| dockerfile | ✓ | ✓ | | | `docker-langserver` |
| dot | ✓ | | | | `dot-language-server` |
| dtd | ✓ | | | | |
| dune | ✓ | | | | |
| dunstrc | ✓ | | | | |
| earthfile | ✓ | ✓ | ✓ | | `earthlyls` |
| edoc | ✓ | | | | |
| eex | ✓ | | | | |
| ejs | ✓ | | | | |
| elisp | ✓ | | | ✓ | |
| elixir | ✓ | ✓ | ✓ | ✓ | `elixir-ls` |
| elm | ✓ | ✓ | | ✓ | `elm-language-server` |
| elvish | ✓ | | | | `elvish` |
| env | ✓ | ✓ | | | |
| erb | ✓ | | | | |
| erlang | ✓ | ✓ | | ✓ | `erlang_ls`, `elp` |
| esdl | ✓ | | | | |
| fennel | ✓ | | | | `fennel-ls` |
| fga | ✓ | ✓ | ✓ | | |
| fidl | ✓ | | | | |
| fish | ✓ | ✓ | ✓ | | `fish-lsp` |
| forth | ✓ | | | | `forth-lsp` |
| fortran | ✓ | | ✓ | | `fortls` |
| fsharp | ✓ | | | | `fsautocomplete` |
| gas | ✓ | ✓ | | | `asm-lsp` |
| gdscript | ✓ | ✓ | ✓ | ✓ | |
| gemini | ✓ | | | | |
| gherkin | ✓ | | | | |
| ghostty | ✓ | | | | |
| git-attributes | ✓ | | | | |
| git-commit | ✓ | ✓ | | | |
| git-config | ✓ | ✓ | | | |
| git-ignore | ✓ | | | | |
| git-notes | ✓ | | | | |
| git-rebase | ✓ | | | | |
| gjs | ✓ | ✓ | ✓ | ✓ | `typescript-language-server`, `vscode-eslint-language-server`, `ember-language-server` |
| gleam | ✓ | ✓ | | | `gleam` |
| glimmer | ✓ | | | | `ember-language-server` |
| glsl | ✓ | ✓ | ✓ | | `glsl_analyzer` |
| gn | ✓ | | | | |
| go | ✓ | ✓ | ✓ | ✓ | `gopls`, `golangci-lint-langserver` |
| godot-resource | ✓ | ✓ | | | |
| gomod | ✓ | | | | `gopls` |
| gotmpl | ✓ | | | | `gopls` |
| gowork | ✓ | | | | `gopls` |
| gpr | ✓ | | | | `ada_language_server` |
| graphql | ✓ | ✓ | | | `graphql-lsp` |
| gren | ✓ | ✓ | | | |
| groovy | ✓ | | | | |
| gts | ✓ | ✓ | ✓ | ✓ | `typescript-language-server`, `vscode-eslint-language-server`, `ember-language-server` |
| hare | ✓ | | | | |
| haskell | ✓ | ✓ | | | `haskell-language-server-wrapper` |
| haskell-persistent | ✓ | | | | |
| hcl | ✓ | ✓ | ✓ | | `terraform-ls` |
| heex | ✓ | ✓ | | | `elixir-ls` |
| helm | ✓ | | | | `helm_ls` |
| hocon | ✓ | ✓ | ✓ | | |
| hoon | ✓ | | | | |
| hosts | ✓ | | | | |
| html | ✓ | ✓ | | | `vscode-html-language-server`, `superhtml` |
| htmldjango | ✓ | | | | `djlsp`, `vscode-html-language-server`, `superhtml` |
| hurl | ✓ | ✓ | ✓ | | |
| hyprlang | ✓ | | ✓ | | `hyprls` |
| idris | | | | | `idris2-lsp` |
| iex | ✓ | | | | |
| ini | ✓ | | | | |
| ink | ✓ | | | | |
| inko | ✓ | ✓ | ✓ | ✓ | |
| janet | ✓ | | ✓ | | |
| java | ✓ | ✓ | ✓ | | `jdtls` |
| javascript | ✓ | ✓ | ✓ | ✓ | `typescript-language-server` |
| jinja | ✓ | | | | |
| jjconfig | ✓ | ✓ | ✓ | | `taplo`, `tombi` |
| jjdescription | ✓ | | | | |
| jjrevset | ✓ | | | | |
| jjtemplate | ✓ | | | | |
| jq | ✓ | ✓ | | | `jq-lsp` |
| jsdoc | ✓ | | | | |
| json | ✓ | ✓ | ✓ | | `vscode-json-language-server` |
| json-ld | ✓ | ✓ | ✓ | | `vscode-json-language-server` |
| json5 | ✓ | | | | |
| jsonc | ✓ | | ✓ | | `vscode-json-language-server` |
| jsonnet | ✓ | | | | `jsonnet-language-server` |
| jsx | ✓ | ✓ | ✓ | ✓ | `typescript-language-server` |
| julia | ✓ | ✓ | ✓ | | `julia` |
| just | ✓ | ✓ | ✓ | | `just-lsp` |
| kdl | ✓ | ✓ | ✓ | | |
| koka | ✓ | | ✓ | | `koka` |
| kotlin | ✓ | ✓ | ✓ | | `kotlin-language-server` |
| koto | ✓ | ✓ | ✓ | | `koto-ls` |
| latex | ✓ | ✓ | | | `texlab` |
| ld | ✓ | | ✓ | | |
| ldif | ✓ | | | | |
| lean | ✓ | | | | `lean` |
| ledger | ✓ | | | | |
| llvm | ✓ | ✓ | ✓ | | |
| llvm-mir | ✓ | ✓ | ✓ | | |
| llvm-mir-yaml | ✓ | | ✓ | | |
| log | ✓ | | | | |
| lpf | ✓ | | | | |
| lua | ✓ | ✓ | ✓ | | `lua-language-server` |
| luau | ✓ | ✓ | ✓ | | `luau-lsp` |
| mail | ✓ | ✓ | | | |
| make | ✓ | | ✓ | | |
| markdoc | ✓ | | | | `markdoc-ls` |
| markdown | ✓ | | | ✓ | `marksman`, `markdown-oxide` |
| markdown-rustdoc | ✓ | | | | |
| markdown.inline | ✓ | | | | |
| matlab | ✓ | ✓ | ✓ | | |
| mermaid | ✓ | | | | |
| meson | ✓ | | ✓ | | `mesonlsp` |
| mint | | | | | `mint` |
| mojo | ✓ | ✓ | ✓ | | `pixi` |
| move | ✓ | | | | |
| msbuild | ✓ | | ✓ | | |
| nasm | ✓ | ✓ | | | `asm-lsp` |
| nestedtext | ✓ | ✓ | ✓ | | |
| nginx | ✓ | | | | |
| nickel | ✓ | | ✓ | | `nls` |
| nim | ✓ | ✓ | ✓ | | `nimlangserver` |
| nix | ✓ | ✓ | ✓ | | `nil`, `nixd` |
| nu | ✓ | | | | `nu` |
| nunjucks | ✓ | | | | |
| ocaml | ✓ | | ✓ | | `ocamllsp` |
| ocaml-interface | ✓ | | | | `ocamllsp` |
| odin | ✓ | ✓ | ✓ | | `ols` |
| ohm | ✓ | ✓ | ✓ | | |
| opencl | ✓ | ✓ | ✓ | | `clangd` |
| openscad | ✓ | | | | `openscad-lsp` |
| org | ✓ | | | | |
| pascal | ✓ | ✓ | | | `pasls` |
| passwd | ✓ | | | | |
| pem | ✓ | | | | |
| perl | ✓ | ✓ | ✓ | | `perlnavigator` |
| pest | ✓ | ✓ | ✓ | | `pest-language-server` |
| php | ✓ | ✓ | ✓ | ✓ | `intelephense` |
| php-only | ✓ | | | ✓ | |
| pkgbuild | ✓ | ✓ | ✓ | | `termux-language-server`, `bash-language-server` |
| pkl | ✓ | | ✓ | | `pkl-lsp` |
| po | ✓ | ✓ | | | |
| pod | ✓ | | | | |
| ponylang | ✓ | ✓ | ✓ | | |
| powershell | ✓ | | | | |
| prisma | ✓ | ✓ | | | `prisma-language-server` |
| prolog | ✓ | | ✓ | | `swipl` |
| properties | ✓ | ✓ | | | |
| protobuf | ✓ | ✓ | ✓ | | `buf`, `pb`, `protols` |
| prql | ✓ | | | | |
| pug | ✓ | | | | |
| purescript | ✓ | ✓ | | | `purescript-language-server` |
| python | ✓ | ✓ | ✓ | ✓ | `ty`, `ruff`, `jedi-language-server`, `pylsp` |
| qml | ✓ | ✓ | ✓ | | `qmlls` |
| quarto | ✓ | | ✓ | | |
| quint | ✓ | | | | `quint-language-server` |
| r | ✓ | | | | `R` |
| racket | ✓ | | ✓ | | `racket` |
| regex | ✓ | | | | |
| rego | ✓ | | | | `regols` |
| rescript | ✓ | ✓ | | | `rescript-language-server` |
| rmarkdown | ✓ | | ✓ | | `R` |
| robot | ✓ | | | | `robotframework_ls` |
| ron | ✓ | | ✓ | | |
| rst | ✓ | | | | |
| ruby | ✓ | ✓ | ✓ | ✓ | `ruby-lsp`, `solargraph` |
| rust | ✓ | ✓ | ✓ | ✓ | `rust-analyzer` |
| rust-format-args | ✓ | | | | |
| rust-format-args-macro | ✓ | ✓ | ✓ | | |
| sage | ✓ | ✓ | | | |
| scala | ✓ | ✓ | ✓ | | `metals` |
| scheme | ✓ | | ✓ | | |
| scss | ✓ | | | | `vscode-css-language-server` |
| slang | ✓ | ✓ | ✓ | | `slangd` |
| slint | ✓ | ✓ | ✓ | | `slint-lsp` |
| smali | ✓ | | ✓ | | |
| smithy | ✓ | | | | `cs` |
| sml | ✓ | | | | |
| snakemake | ✓ | | ✓ | | `pylsp` |
| solidity | ✓ | ✓ | | | `solc` |
| sourcepawn | ✓ | ✓ | | | `sourcepawn-studio` |
| spade | ✓ | | ✓ | | `spade-language-server` |
| spicedb | ✓ | | | ✓ | |
| sql | ✓ | ✓ | | | |
| sshclientconfig | ✓ | | | | |
| starlark | ✓ | ✓ | ✓ | | `starpls` |
| strace | ✓ | | | | |
| supercollider | ✓ | | | | |
| svelte | ✓ | | ✓ | | `svelteserver` |
| sway | ✓ | ✓ | ✓ | | `forc` |
| swift | ✓ | ✓ | | | `sourcekit-lsp` |
| systemd | ✓ | | | | `systemd-lsp` |
| t32 | ✓ | | | | |
| tablegen | ✓ | ✓ | ✓ | | |
| tact | ✓ | ✓ | ✓ | | |
| task | ✓ | | | | |
| tcl | ✓ | | ✓ | | |
| teal | ✓ | | | | `teal-language-server` |
| templ | ✓ | | | | `templ` |
| tera | ✓ | | | | |
| textproto | ✓ | ✓ | ✓ | | |
| tfvars | ✓ | | ✓ | | `terraform-ls` |
| thrift | ✓ | | | | |
| tlaplus | ✓ | | | | |
| todotxt | ✓ | | | | |
| toml | ✓ | ✓ | | | `taplo`, `tombi` |
| tsq | ✓ | | | | `ts_query_ls` |
| tsx | ✓ | ✓ | ✓ | ✓ | `typescript-language-server` |
| twig | ✓ | | | | |
| typescript | ✓ | ✓ | ✓ | ✓ | `typescript-language-server` |
| typespec | ✓ | ✓ | ✓ | | `tsp-server` |
| typst | ✓ | | | ✓ | `tinymist` |
| ungrammar | ✓ | | | | |
| unison | ✓ | ✓ | ✓ | | |
| uxntal | ✓ | | | | |
| v | ✓ | ✓ | ✓ | | `v-analyzer` |
| vala | ✓ | ✓ | | | `vala-language-server` |
| vento | ✓ | | | | |
| verilog | ✓ | ✓ | | | `svlangserver` |
| vhdl | ✓ | | | | `vhdl_ls` |
| vhs | ✓ | | | | |
| vue | ✓ | | | | `vue-language-server` |
| wast | ✓ | | | | |
| wat | ✓ | | | | `wat_server` |
| webc | ✓ | | | | |
| werk | ✓ | | | | |
| wesl | ✓ | ✓ | | | |
| wgsl | ✓ | | | | `wgsl-analyzer` |
| wit | ✓ | | ✓ | | |
| wren | ✓ | ✓ | ✓ | | |
| xit | ✓ | | | | |
| xml | ✓ | ✓ | ✓ | | |
| xtc | ✓ | | | | |
| yaml | ✓ | ✓ | ✓ | | `yaml-language-server`, `ansible-language-server` |
| yara | ✓ | | | | `yls` |
| yuck | ✓ | | | | |
| zig | ✓ | ✓ | ✓ | | `zls` |

View File

@ -106,10 +106,14 @@
| `code_action` | Perform code action | normal: `` <space>a ``, select: `` <space>a `` |
| `buffer_picker` | Open buffer picker | normal: `` <space>b ``, select: `` <space>b `` |
| `jumplist_picker` | Open jumplist picker | normal: `` <space>j ``, select: `` <space>j `` |
| `symbol_picker` | Open symbol picker | normal: `` <space>s ``, select: `` <space>s `` |
| `symbol_picker` | Open symbol picker | |
| `syntax_symbol_picker` | Open symbol picker from syntax information | |
| `lsp_or_syntax_symbol_picker` | Open symbol picker from LSP or syntax information | normal: `` <space>s ``, select: `` <space>s `` |
| `changed_file_picker` | Open changed file picker | normal: `` <space>g ``, select: `` <space>g `` |
| `select_references_to_symbol_under_cursor` | Select symbol references | normal: `` <space>h ``, select: `` <space>h `` |
| `workspace_symbol_picker` | Open workspace symbol picker | normal: `` <space>S ``, select: `` <space>S `` |
| `workspace_symbol_picker` | Open workspace symbol picker | |
| `syntax_workspace_symbol_picker` | Open workspace symbol picker from syntax information | |
| `lsp_or_syntax_workspace_symbol_picker` | Open workspace symbol picker from LSP or syntax information | normal: `` <space>S ``, select: `` <space>S `` |
| `diagnostics_picker` | Open diagnostic picker | normal: `` <space>d ``, select: `` <space>d `` |
| `workspace_diagnostics_picker` | Open workspace diagnostic picker | normal: `` <space>D ``, select: `` <space>D `` |
| `last_picker` | Open last picker | normal: `` <space>' ``, select: `` <space>' `` |

View File

@ -0,0 +1,34 @@
## Adding tags queries
See tree-sitter's documentation on [Code Navigation Systems] for more
background on tags queries.
Helix provides LSP-like features such as document and workspace symbol pickers
out-of-the-box for languages with `tags.scm` queries based on syntax trees. To
be analyzed a language must have a tree-sitter grammar and a `tags.scm` query
file which pattern matches interesting nodes from syntax trees.
Query files should be placed in `runtime/queries/{language}/tags.scm`
when contributing to Helix. You may place these under your local runtime
directory (`~/.config/helix/runtime` in Linux for example) for the sake of
testing.
The following [captures][tree-sitter-captures] are recognized:
| Capture name |
|--- |
| `definition.class` |
| `definition.constant` |
| `definition.function` |
| `definition.interface` |
| `definition.macro` |
| `definition.module` |
| `definition.struct` |
| `definition.type` |
[Example query files][example-queries] can be found in the Helix GitHub
repository.
[Code Navigation Systems]: https://tree-sitter.github.io/tree-sitter/4-code-navigation.html
[tree-sitter-captures]: https://tree-sitter.github.io/tree-sitter/using-parsers/queries/index.html
[example-queries]: https://github.com/search?q=repo%3Ahelix-editor%2Fhelix+path%3A%2A%2A/tags.scm&type=Code

View File

@ -71,6 +71,7 @@ These configuration keys are available:
| `text-width` | Maximum line length. Used for the `:reflow` command and soft-wrapping if `soft-wrap.wrap-at-text-width` is set, defaults to `editor.text-width` |
| `rulers` | Overrides the `editor.rulers` config key for the language. |
| `path-completion` | Overrides the `editor.path-completion` config key for the language. |
| `word-completion` | Overrides the [`editor.word-completion`](./editor.md#editorword-completion-section) configuration for the language. |
| `workspace-lsp-roots` | Directories relative to the workspace root that are treated as LSP roots. Should only be set in `.helix/config.toml`. Overwrites the setting of the same name in `config.toml` if set. |
| `persistent-diagnostic-sources` | An array of LSP diagnostic sources assumed unchanged when the language server resends the same set of diagnostics. Helix can track the position for these diagnostics internally instead. Useful for diagnostics that are recomputed on save.

View File

@ -47,6 +47,9 @@
<content_rating type="oars-1.1" />
<releases>
<release version="25.07.1" date="2025-07-18">
<url>https://github.com/helix-editor/helix/releases/tag/25.07.1</url>
</release>
<release version="25.07" date="2025-07-15">
<url>https://helix-editor.com/news/release-25-07-highlights/</url>
</release>

View File

@ -16,6 +16,7 @@ pub struct CompletionItem {
pub enum CompletionProvider {
Lsp(LanguageServerId),
Path,
Word,
}
impl From<LanguageServerId> for CompletionProvider {

View File

@ -20,7 +20,10 @@ use ropey::RopeSlice;
use tree_house::{
highlighter,
query_iter::QueryIter,
tree_sitter::{Grammar, InactiveQueryCursor, InputEdit, Node, Query, RopeInput, Tree},
tree_sitter::{
query::{InvalidPredicateError, UserPredicate},
Grammar, InactiveQueryCursor, InputEdit, Node, Query, RopeInput, Tree,
},
Error, InjectionLanguageMarker, LanguageConfig as SyntaxConfig, Layer,
};
@ -28,6 +31,7 @@ use crate::{indent::IndentQuery, tree_sitter, ChangeSet, Language};
pub use tree_house::{
highlighter::{Highlight, HighlightEvent},
query_iter::QueryIterEvent,
Error as HighlighterError, LanguageLoader, TreeCursor, TREE_SITTER_MATCH_LIMIT,
};
@ -37,6 +41,7 @@ pub struct LanguageData {
syntax: OnceCell<Option<SyntaxConfig>>,
indent_query: OnceCell<Option<IndentQuery>>,
textobject_query: OnceCell<Option<TextObjectQuery>>,
tag_query: OnceCell<Option<TagQuery>>,
}
impl LanguageData {
@ -46,6 +51,7 @@ impl LanguageData {
syntax: OnceCell::new(),
indent_query: OnceCell::new(),
textobject_query: OnceCell::new(),
tag_query: OnceCell::new(),
}
}
@ -154,6 +160,44 @@ impl LanguageData {
.as_ref()
}
/// Compiles the tags.scm query for a language.
/// This function should only be used by this module or the xtask crate.
pub fn compile_tag_query(
grammar: Grammar,
config: &LanguageConfiguration,
) -> Result<Option<TagQuery>> {
let name = &config.language_id;
let text = read_query(name, "tags.scm");
if text.is_empty() {
return Ok(None);
}
let query = Query::new(grammar, &text, |_pattern, predicate| match predicate {
// TODO: these predicates are allowed in tags.scm queries but not yet used.
UserPredicate::IsPropertySet { key: "local", .. } => Ok(()),
UserPredicate::Other(pred) => match pred.name() {
"strip!" | "select-adjacent!" => Ok(()),
_ => Err(InvalidPredicateError::unknown(predicate)),
},
_ => Err(InvalidPredicateError::unknown(predicate)),
})
.with_context(|| format!("Failed to compile tags.scm query for '{name}'"))?;
Ok(Some(TagQuery { query }))
}
fn tag_query(&self, loader: &Loader) -> Option<&TagQuery> {
self.tag_query
.get_or_init(|| {
let grammar = self.syntax_config(loader)?.grammar;
Self::compile_tag_query(grammar, &self.config)
.map_err(|err| {
log::error!("{err}");
})
.ok()
.flatten()
})
.as_ref()
}
fn reconfigure(&self, scopes: &[String]) {
if let Some(Some(config)) = self.syntax.get() {
reconfigure_highlights(config, scopes);
@ -339,6 +383,10 @@ impl Loader {
self.language(lang).textobject_query(self)
}
pub fn tag_query(&self, lang: Language) -> Option<&TagQuery> {
self.language(lang).tag_query(self)
}
pub fn language_server_configs(&self) -> &HashMap<String, LanguageServerConfiguration> {
&self.language_server_configs
}
@ -511,6 +559,19 @@ impl Syntax {
{
QueryIter::new(&self.inner, source, loader, range)
}
pub fn tags<'a>(
&'a self,
source: RopeSlice<'a>,
loader: &'a Loader,
range: impl RangeBounds<u32>,
) -> QueryIter<'a, 'a, impl FnMut(Language) -> Option<&'a Query> + 'a, ()> {
self.query_iter(
source,
|lang| loader.tag_query(lang).map(|q| &q.query),
range,
)
}
}
pub type Highlighter<'a> = highlighter::Highlighter<'a, 'a, Loader>;
@ -881,6 +942,11 @@ impl TextObjectQuery {
}
}
#[derive(Debug)]
pub struct TagQuery {
pub query: Query,
}
pub fn pretty_print_tree<W: fmt::Write>(fmt: &mut W, node: Node) -> fmt::Result {
if node.child_count() == 0 {
if node_is_visible(&node) {

View File

@ -7,6 +7,7 @@ use serde::{ser::SerializeSeq as _, Deserialize, Serialize};
use std::{
collections::{HashMap, HashSet},
fmt::{self, Display},
num::NonZeroU8,
path::PathBuf,
str::FromStr,
};
@ -60,6 +61,8 @@ pub struct LanguageConfiguration {
/// If set, overrides `editor.path-completion`.
pub path_completion: Option<bool>,
/// If set, overrides `editor.word-completion`.
pub word_completion: Option<WordCompletion>,
#[serde(default)]
pub diagnostic_severity: Severity,
@ -572,6 +575,13 @@ pub struct SoftWrap {
pub wrap_at_text_width: Option<bool>,
}
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Serialize, Deserialize)]
#[serde(default, rename_all = "kebab-case", deny_unknown_fields)]
pub struct WordCompletion {
pub enable: Option<bool>,
pub trigger_length: Option<NonZeroU8>,
}
fn deserialize_regex<'de, D>(deserializer: D) -> Result<Option<rope::Regex>, D::Error>
where
D: serde::Deserializer<'de>,

View File

@ -19,6 +19,16 @@ pub enum Operation {
Insert(Tendril),
}
impl Operation {
/// The number of characters affected by the operation.
pub fn len_chars(&self) -> usize {
match self {
Self::Retain(n) | Self::Delete(n) => *n,
Self::Insert(s) => s.chars().count(),
}
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum Assoc {
Before,

View File

@ -244,7 +244,12 @@ pub fn merge_toml_values(left: toml::Value, right: toml::Value, merge_depth: usi
/// Otherwise (workspace, false) is returned
pub fn find_workspace() -> (PathBuf, bool) {
let current_dir = current_working_dir();
for ancestor in current_dir.ancestors() {
find_workspace_in(current_dir)
}
pub fn find_workspace_in(dir: impl AsRef<Path>) -> (PathBuf, bool) {
let dir = dir.as_ref();
for ancestor in dir.ancestors() {
if ancestor.join(".git").exists()
|| ancestor.join(".svn").exists()
|| ancestor.join(".jj").exists()
@ -254,7 +259,7 @@ pub fn find_workspace() -> (PathBuf, bool) {
}
}
(current_dir, true)
(dir.to_owned(), true)
}
fn default_config_file() -> PathBuf {

View File

@ -91,6 +91,8 @@ serde = { version = "1.0", features = ["derive"] }
grep-regex = "0.1.13"
grep-searcher = "0.1.14"
dashmap = "6.0"
[target.'cfg(not(windows))'.dependencies] # https://github.com/vorner/signal-hook/issues/100
signal-hook-tokio = { version = "0.3", features = ["futures-v0_3"] }
libc = "0.2.174"

View File

@ -1,5 +1,6 @@
pub(crate) mod dap;
pub(crate) mod lsp;
pub(crate) mod syntax;
pub(crate) mod typed;
pub use dap::*;
@ -11,6 +12,7 @@ use helix_stdx::{
};
use helix_vcs::{FileChange, Hunk};
pub use lsp::*;
pub use syntax::*;
use tui::{
text::{Span, Spans},
widgets::Cell,
@ -405,9 +407,13 @@ impl MappableCommand {
buffer_picker, "Open buffer picker",
jumplist_picker, "Open jumplist picker",
symbol_picker, "Open symbol picker",
syntax_symbol_picker, "Open symbol picker from syntax information",
lsp_or_syntax_symbol_picker, "Open symbol picker from LSP or syntax information",
changed_file_picker, "Open changed file picker",
select_references_to_symbol_under_cursor, "Select symbol references",
workspace_symbol_picker, "Open workspace symbol picker",
syntax_workspace_symbol_picker, "Open workspace symbol picker from syntax information",
lsp_or_syntax_workspace_symbol_picker, "Open workspace symbol picker from LSP or syntax information",
diagnostics_picker, "Open diagnostic picker",
workspace_diagnostics_picker, "Open workspace diagnostic picker",
last_picker, "Open last picker",
@ -6835,3 +6841,34 @@ fn jump_to_word(cx: &mut Context, behaviour: Movement) {
}
jump_to_label(cx, words, behaviour)
}
fn lsp_or_syntax_symbol_picker(cx: &mut Context) {
let doc = doc!(cx.editor);
if doc
.language_servers_with_feature(LanguageServerFeature::DocumentSymbols)
.next()
.is_some()
{
lsp::symbol_picker(cx);
} else if doc.syntax().is_some() {
syntax_symbol_picker(cx);
} else {
cx.editor
.set_error("No language server supporting document symbols or syntax info available");
}
}
fn lsp_or_syntax_workspace_symbol_picker(cx: &mut Context) {
let doc = doc!(cx.editor);
if doc
.language_servers_with_feature(LanguageServerFeature::WorkspaceSymbols)
.next()
.is_some()
{
lsp::workspace_symbol_picker(cx);
} else {
syntax_workspace_symbol_picker(cx);
}
}

View File

@ -0,0 +1,446 @@
use std::{
collections::HashSet,
iter,
path::{Path, PathBuf},
sync::Arc,
};
use dashmap::DashMap;
use futures_util::FutureExt;
use grep_regex::RegexMatcherBuilder;
use grep_searcher::{sinks, BinaryDetection, SearcherBuilder};
use helix_core::{
syntax::{Loader, QueryIterEvent},
Rope, RopeSlice, Selection, Syntax, Uri,
};
use helix_stdx::{
path,
rope::{self, RopeSliceExt},
};
use helix_view::{
align_view,
document::{from_reader, SCRATCH_BUFFER_NAME},
Align, Document, DocumentId, Editor,
};
use ignore::{DirEntry, WalkBuilder, WalkState};
use crate::{
filter_picker_entry,
ui::{
overlay::overlaid,
picker::{Injector, PathOrId},
Picker, PickerColumn,
},
};
use super::Context;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum TagKind {
Class,
Constant,
Function,
Interface,
Macro,
Module,
Struct,
Type,
}
impl TagKind {
fn as_str(&self) -> &'static str {
match self {
Self::Class => "class",
Self::Constant => "constant",
Self::Function => "function",
Self::Interface => "interface",
Self::Macro => "macro",
Self::Module => "module",
Self::Struct => "struct",
Self::Type => "type",
}
}
fn from_name(name: &str) -> Option<Self> {
match name {
"class" => Some(TagKind::Class),
"constant" => Some(TagKind::Constant),
"function" => Some(TagKind::Function),
"interface" => Some(TagKind::Interface),
"macro" => Some(TagKind::Macro),
"module" => Some(TagKind::Module),
"struct" => Some(TagKind::Struct),
"type" => Some(TagKind::Type),
_ => None,
}
}
}
// NOTE: Uri is cheap to clone and DocumentId is Copy
#[derive(Debug, Clone)]
enum UriOrDocumentId {
Uri(Uri),
Id(DocumentId),
}
impl UriOrDocumentId {
fn path_or_id(&self) -> Option<PathOrId<'_>> {
match self {
Self::Id(id) => Some(PathOrId::Id(*id)),
Self::Uri(uri) => uri.as_path().map(PathOrId::Path),
}
}
}
#[derive(Debug)]
struct Tag {
kind: TagKind,
name: String,
start: usize,
end: usize,
start_line: usize,
end_line: usize,
doc: UriOrDocumentId,
}
fn tags_iter<'a>(
syntax: &'a Syntax,
loader: &'a Loader,
text: RopeSlice<'a>,
doc: UriOrDocumentId,
pattern: Option<&'a rope::Regex>,
) -> impl Iterator<Item = Tag> + 'a {
let mut tags_iter = syntax.tags(text, loader, ..);
iter::from_fn(move || loop {
let QueryIterEvent::Match(mat) = tags_iter.next()? else {
continue;
};
let query = &loader
.tag_query(tags_iter.current_language())
.expect("must have a tags query to emit matches")
.query;
let Some(kind) = query
.capture_name(mat.capture)
.strip_prefix("definition.")
.and_then(TagKind::from_name)
else {
continue;
};
let range = mat.node.byte_range();
if pattern.is_some_and(|pattern| {
!pattern.is_match(text.regex_input_at_bytes(range.start as usize..range.end as usize))
}) {
continue;
}
let start = text.byte_to_char(range.start as usize);
let end = text.byte_to_char(range.end as usize);
return Some(Tag {
kind,
name: text.slice(start..end).to_string(),
start,
end,
start_line: text.char_to_line(start),
end_line: text.char_to_line(end),
doc: doc.clone(),
});
})
}
pub fn syntax_symbol_picker(cx: &mut Context) {
let doc = doc!(cx.editor);
let Some(syntax) = doc.syntax() else {
cx.editor
.set_error("Syntax tree is not available on this buffer");
return;
};
let doc_id = doc.id();
let text = doc.text().slice(..);
let loader = cx.editor.syn_loader.load();
let tags = tags_iter(syntax, &loader, text, UriOrDocumentId::Id(doc.id()), None);
let columns = vec![
PickerColumn::new("kind", |tag: &Tag, _| tag.kind.as_str().into()),
PickerColumn::new("name", |tag: &Tag, _| tag.name.as_str().into()),
];
let picker = Picker::new(
columns,
1, // name
tags,
(),
move |cx, tag, action| {
cx.editor.switch(doc_id, action);
let view = view_mut!(cx.editor);
let doc = doc_mut!(cx.editor, &doc_id);
doc.set_selection(view.id, Selection::single(tag.start, tag.end));
if action.align_view(view, doc.id()) {
align_view(doc, view, Align::Center)
}
},
)
.with_preview(|_editor, tag| {
Some((tag.doc.path_or_id()?, Some((tag.start_line, tag.end_line))))
})
.truncate_start(false);
cx.push_layer(Box::new(overlaid(picker)));
}
pub fn syntax_workspace_symbol_picker(cx: &mut Context) {
#[derive(Debug)]
struct SearchState {
searcher_builder: SearcherBuilder,
walk_builder: WalkBuilder,
regex_matcher_builder: RegexMatcherBuilder,
rope_regex_builder: rope::RegexBuilder,
search_root: PathBuf,
/// A cache of files that have been parsed in prior searches.
syntax_cache: DashMap<PathBuf, Option<(Rope, Syntax)>>,
}
let mut searcher_builder = SearcherBuilder::new();
searcher_builder.binary_detection(BinaryDetection::quit(b'\x00'));
// Search from the workspace that the currently focused document is within. This behaves like global
// search most of the time but helps when you have two projects open in splits.
let search_root = if let Some(path) = doc!(cx.editor).path() {
helix_loader::find_workspace_in(path).0
} else {
helix_loader::find_workspace().0
};
let absolute_root = search_root
.canonicalize()
.unwrap_or_else(|_| search_root.clone());
let config = cx.editor.config();
let dedup_symlinks = config.file_picker.deduplicate_links;
let mut walk_builder = WalkBuilder::new(&search_root);
walk_builder
.hidden(config.file_picker.hidden)
.parents(config.file_picker.parents)
.ignore(config.file_picker.ignore)
.follow_links(config.file_picker.follow_symlinks)
.git_ignore(config.file_picker.git_ignore)
.git_global(config.file_picker.git_global)
.git_exclude(config.file_picker.git_exclude)
.max_depth(config.file_picker.max_depth)
.filter_entry(move |entry| filter_picker_entry(entry, &absolute_root, dedup_symlinks))
.add_custom_ignore_filename(helix_loader::config_dir().join("ignore"))
.add_custom_ignore_filename(".helix/ignore");
let mut regex_matcher_builder = RegexMatcherBuilder::new();
regex_matcher_builder.case_smart(config.search.smart_case);
let mut rope_regex_builder = rope::RegexBuilder::new();
rope_regex_builder.syntax(rope::Config::new().case_insensitive(config.search.smart_case));
let state = SearchState {
searcher_builder,
walk_builder,
regex_matcher_builder,
rope_regex_builder,
search_root,
syntax_cache: DashMap::default(),
};
let reg = cx.register.unwrap_or('/');
cx.editor.registers.last_search_register = reg;
let columns = vec![
PickerColumn::new("kind", |tag: &Tag, _| tag.kind.as_str().into()),
PickerColumn::new("name", |tag: &Tag, _| tag.name.as_str().into()).without_filtering(),
PickerColumn::new("path", |tag: &Tag, state: &SearchState| {
match &tag.doc {
UriOrDocumentId::Uri(uri) => {
if let Some(path) = uri.as_path() {
let path = if let Ok(stripped) = path.strip_prefix(&state.search_root) {
stripped
} else {
path
};
path.to_string_lossy().into()
} else {
uri.to_string().into()
}
}
// This picker only uses `Id` for scratch buffers for better display.
UriOrDocumentId::Id(_) => SCRATCH_BUFFER_NAME.into(),
}
}),
];
let get_tags = |query: &str,
editor: &mut Editor,
state: Arc<SearchState>,
injector: &Injector<_, _>| {
if query.len() < 3 {
return async { Ok(()) }.boxed();
}
// Attempt to find the tag in any open documents.
let pattern = match state.rope_regex_builder.build(query) {
Ok(pattern) => pattern,
Err(err) => return async { Err(anyhow::anyhow!(err)) }.boxed(),
};
let loader = editor.syn_loader.load();
for doc in editor.documents() {
let Some(syntax) = doc.syntax() else { continue };
let text = doc.text().slice(..);
let uri_or_id = doc
.uri()
.map(UriOrDocumentId::Uri)
.unwrap_or_else(|| UriOrDocumentId::Id(doc.id()));
for tag in tags_iter(syntax, &loader, text.slice(..), uri_or_id, Some(&pattern)) {
if injector.push(tag).is_err() {
return async { Ok(()) }.boxed();
}
}
}
if !state.search_root.exists() {
return async { Err(anyhow::anyhow!("Current working directory does not exist")) }
.boxed();
}
let matcher = match state.regex_matcher_builder.build(query) {
Ok(matcher) => {
// Clear any "Failed to compile regex" errors out of the statusline.
editor.clear_status();
matcher
}
Err(err) => {
log::info!(
"Failed to compile search pattern in workspace symbol search: {}",
err
);
return async { Err(anyhow::anyhow!("Failed to compile regex")) }.boxed();
}
};
let pattern = Arc::new(pattern);
let injector = injector.clone();
let loader = editor.syn_loader.load();
let documents: HashSet<_> = editor
.documents()
.filter_map(Document::path)
.cloned()
.collect();
async move {
let searcher = state.searcher_builder.build();
state.walk_builder.build_parallel().run(|| {
let mut searcher = searcher.clone();
let matcher = matcher.clone();
let injector = injector.clone();
let loader = loader.clone();
let documents = &documents;
let pattern = pattern.clone();
let syntax_cache = &state.syntax_cache;
Box::new(move |entry: Result<DirEntry, ignore::Error>| -> WalkState {
let entry = match entry {
Ok(entry) => entry,
Err(_) => return WalkState::Continue,
};
match entry.file_type() {
Some(entry) if entry.is_file() => {}
// skip everything else
_ => return WalkState::Continue,
};
let path = entry.path();
// If this document is open, skip it because we've already processed it above.
if documents.contains(path) {
return WalkState::Continue;
};
let mut quit = false;
let sink = sinks::UTF8(|_line, _content| {
if !syntax_cache.contains_key(path) {
// Read the file into a Rope and attempt to recognize the language
// and parse it with tree-sitter. Save the Rope and Syntax for future
// queries.
syntax_cache.insert(path.to_path_buf(), syntax_for_path(path, &loader));
};
let entry = syntax_cache.get(path).unwrap();
let Some((text, syntax)) = entry.value() else {
// If the file couldn't be parsed, move on.
return Ok(false);
};
let uri = Uri::from(path::normalize(path));
for tag in tags_iter(
syntax,
&loader,
text.slice(..),
UriOrDocumentId::Uri(uri),
Some(&pattern),
) {
if injector.push(tag).is_err() {
quit = true;
break;
}
}
// Quit after seeing the first regex match. We only care to find files
// that contain the pattern and then we run the tags query within
// those. The location and contents of a match are irrelevant - it's
// only important _if_ a file matches.
Ok(false)
});
if let Err(err) = searcher.search_path(&matcher, path, sink) {
log::info!("Workspace syntax search error: {}, {}", path.display(), err);
}
if quit {
WalkState::Quit
} else {
WalkState::Continue
}
})
});
Ok(())
}
.boxed()
};
let picker = Picker::new(
columns,
1, // name
[],
state,
move |cx, tag, action| {
let doc_id = match &tag.doc {
UriOrDocumentId::Id(id) => *id,
UriOrDocumentId::Uri(uri) => match cx.editor.open(uri.as_path().expect(""), action) {
Ok(id) => id,
Err(e) => {
cx.editor
.set_error(format!("Failed to open file '{uri:?}': {e}"));
return;
}
}
};
let doc = doc_mut!(cx.editor, &doc_id);
let view = view_mut!(cx.editor);
let len_chars = doc.text().len_chars();
if tag.start >= len_chars || tag.end > len_chars {
cx.editor.set_error("The location you jumped to does not exist anymore because the file has changed.");
return;
}
doc.set_selection(view.id, Selection::single(tag.start, tag.end));
if action.align_view(view, doc.id()) {
align_view(doc, view, Align::Center)
}
},
)
.with_dynamic_query(get_tags, Some(275))
.with_preview(move |_editor, tag| {
Some((
tag.doc.path_or_id()?,
Some((tag.start_line, tag.end_line)),
))
})
.truncate_start(false);
cx.push_layer(Box::new(overlaid(picker)));
}
/// Create a Rope and language config for a given existing path without creating a full Document.
fn syntax_for_path(path: &Path, loader: &Loader) -> Option<(Rope, Syntax)> {
let mut file = std::fs::File::open(path).ok()?;
let (rope, _encoding, _has_bom) = from_reader(&mut file, None).ok()?;
let text = rope.slice(..);
let language = loader
.language_for_filename(path)
.or_else(|| loader.language_for_shebang(text))?;
Syntax::new(text, language, loader)
.ok()
.map(|syntax| (rope, syntax))
}

View File

@ -8,7 +8,7 @@ use crate::events;
use crate::handlers::auto_save::AutoSaveHandler;
use crate::handlers::signature_help::SignatureHelpHandler;
pub use helix_view::handlers::Handlers;
pub use helix_view::handlers::{word_index, Handlers};
use self::document_colors::DocumentColorsHandler;
@ -26,12 +26,14 @@ pub fn setup(config: Arc<ArcSwap<Config>>) -> Handlers {
let signature_hints = SignatureHelpHandler::new().spawn();
let auto_save = AutoSaveHandler::new().spawn();
let document_colors = DocumentColorsHandler::default().spawn();
let word_index = word_index::Handler::spawn();
let handlers = Handlers {
completions: helix_view::handlers::completion::CompletionHandler::new(event_tx),
signature_hints,
auto_save,
document_colors,
word_index,
};
helix_view::handlers::register_hooks(&handlers);

View File

@ -30,6 +30,7 @@ mod item;
mod path;
mod request;
mod resolve;
mod word;
async fn handle_response(
requests: &mut JoinSet<CompletionResponse>,
@ -82,7 +83,7 @@ async fn replace_completions(
fn show_completion(
editor: &mut Editor,
compositor: &mut Compositor,
items: Vec<CompletionItem>,
mut items: Vec<CompletionItem>,
context: HashMap<CompletionProvider, ResponseContext>,
trigger: Trigger,
) {
@ -101,6 +102,7 @@ fn show_completion(
if ui.completion.is_some() {
return;
}
word::retain_valid_completions(trigger, doc, view.id, &mut items);
editor.handlers.completions.active_completions = context;
let completion_area = ui.set_completion(editor, items, trigger.pos, size);

View File

@ -28,6 +28,8 @@ use crate::job::{dispatch, dispatch_blocking};
use crate::ui;
use crate::ui::editor::InsertEvent;
use super::word;
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub(super) enum TriggerKind {
Auto,
@ -242,10 +244,15 @@ fn request_completions(
doc.selection(view.id).clone(),
doc,
handle.clone(),
savepoint,
savepoint.clone(),
) {
requests.spawn_blocking(path_completion_request);
}
if let Some(word_completion_request) =
word::completion(editor, trigger, handle.clone(), savepoint)
{
requests.spawn_blocking(word_completion_request);
}
let ui = compositor.find::<ui::EditorView>().unwrap();
ui.last_insert.1.push(InsertEvent::RequestCompletion);

View File

@ -0,0 +1,134 @@
use std::{borrow::Cow, sync::Arc};
use helix_core::{
self as core, chars::char_is_word, completion::CompletionProvider, movement, Transaction,
};
use helix_event::TaskHandle;
use helix_stdx::rope::RopeSliceExt as _;
use helix_view::{
document::SavePoint, handlers::completion::ResponseContext, Document, Editor, ViewId,
};
use super::{request::TriggerKind, CompletionItem, CompletionItems, CompletionResponse, Trigger};
const COMPLETION_KIND: &str = "word";
pub(super) fn completion(
editor: &Editor,
trigger: Trigger,
handle: TaskHandle,
savepoint: Arc<SavePoint>,
) -> Option<impl FnOnce() -> CompletionResponse> {
if !doc!(editor).word_completion_enabled() {
return None;
}
let config = editor.config().word_completion;
let doc_config = doc!(editor)
.language_config()
.and_then(|config| config.word_completion);
let trigger_length = doc_config
.and_then(|c| c.trigger_length)
.unwrap_or(config.trigger_length)
.get() as usize;
let (view, doc) = current_ref!(editor);
let rope = doc.text().clone();
let word_index = editor.handlers.word_index().clone();
let text = doc.text().slice(..);
let selection = doc.selection(view.id).clone();
let pos = selection.primary().cursor(text);
let cursor = movement::move_prev_word_start(text, core::Range::point(pos), 1);
if cursor.head == pos {
return None;
}
if trigger.kind != TriggerKind::Manual
&& text
.slice(cursor.head..)
.graphemes()
.take(trigger_length)
.take_while(|g| g.chars().all(char_is_word))
.count()
!= trigger_length
{
return None;
}
let typed_word_range = cursor.head..pos;
let typed_word = text.slice(typed_word_range.clone());
let edit_diff = if typed_word
.char(typed_word.len_chars().saturating_sub(1))
.is_whitespace()
{
0
} else {
typed_word.len_chars()
};
if handle.is_canceled() {
return None;
}
let future = move || {
let text = rope.slice(..);
let typed_word: Cow<_> = text.slice(typed_word_range).into();
let items = word_index
.matches(&typed_word)
.into_iter()
.filter(|word| word.as_str() != typed_word.as_ref())
.map(|word| {
let transaction = Transaction::change_by_selection(&rope, &selection, |range| {
let cursor = range.cursor(text);
(cursor - edit_diff, cursor, Some((&word).into()))
});
CompletionItem::Other(core::CompletionItem {
transaction,
label: word.into(),
kind: Cow::Borrowed(COMPLETION_KIND),
documentation: None,
provider: CompletionProvider::Word,
})
})
.collect();
CompletionResponse {
items: CompletionItems::Other(items),
provider: CompletionProvider::Word,
context: ResponseContext {
is_incomplete: false,
priority: 0,
savepoint,
},
}
};
Some(future)
}
pub(super) fn retain_valid_completions(
trigger: Trigger,
doc: &Document,
view_id: ViewId,
items: &mut Vec<CompletionItem>,
) {
if trigger.kind == TriggerKind::Manual {
return;
}
let text = doc.text().slice(..);
let cursor = doc.selection(view_id).primary().cursor(text);
if text
.get_char(cursor.saturating_sub(1))
.is_some_and(|ch| ch.is_whitespace())
{
items.retain(|item| {
!matches!(
item,
CompletionItem::Other(core::CompletionItem {
provider: CompletionProvider::Word,
..
})
)
});
}
}

View File

@ -12,11 +12,17 @@ pub enum TsFeature {
Highlight,
TextObject,
AutoIndent,
Tags,
}
impl TsFeature {
pub fn all() -> &'static [Self] {
&[Self::Highlight, Self::TextObject, Self::AutoIndent]
&[
Self::Highlight,
Self::TextObject,
Self::AutoIndent,
Self::Tags,
]
}
pub fn runtime_filename(&self) -> &'static str {
@ -24,6 +30,7 @@ impl TsFeature {
Self::Highlight => "highlights.scm",
Self::TextObject => "textobjects.scm",
Self::AutoIndent => "indents.scm",
Self::Tags => "tags.scm",
}
}
@ -32,6 +39,7 @@ impl TsFeature {
Self::Highlight => "Syntax Highlighting",
Self::TextObject => "Treesitter Textobjects",
Self::AutoIndent => "Auto Indent",
Self::Tags => "Code Navigation Tags",
}
}
@ -40,6 +48,7 @@ impl TsFeature {
Self::Highlight => "Highlight",
Self::TextObject => "Textobject",
Self::AutoIndent => "Indent",
Self::Tags => "Tags",
}
}
}

View File

@ -229,8 +229,8 @@ pub fn default() -> HashMap<Mode, KeyTrie> {
"E" => file_explorer_in_current_buffer_directory,
"b" => buffer_picker,
"j" => jumplist_picker,
"s" => symbol_picker,
"S" => workspace_symbol_picker,
"s" => lsp_or_syntax_symbol_picker,
"S" => lsp_or_syntax_workspace_symbol_picker,
"d" => diagnostics_picker,
"D" => workspace_diagnostics_picker,
"g" => changed_file_picker,

View File

@ -7,13 +7,15 @@ use helix_core::syntax::{self, HighlightEvent, Highlighter, OverlayHighlights};
use helix_core::text_annotations::TextAnnotations;
use helix_core::{visual_offset_from_block, Position, RopeSlice};
use helix_stdx::rope::RopeSliceExt;
use helix_view::editor::{WhitespaceConfig, WhitespaceRenderValue};
use helix_view::editor::WhitespaceFeature;
use helix_view::graphics::Rect;
use helix_view::theme::Style;
use helix_view::view::ViewPosition;
use helix_view::{Document, Theme};
use tui::buffer::Buffer as Surface;
use super::trailing_whitespace::{TrailingWhitespaceTracker, WhitespaceKind};
use crate::ui::text_decorations::DecorationManager;
#[derive(Debug, PartialEq, Eq, Copy, Clone)]
@ -177,6 +179,7 @@ pub struct TextRenderer<'a> {
surface: &'a mut Surface,
pub text_style: Style,
pub whitespace_style: Style,
pub trailing_whitespace_style: Style,
pub indent_guide_char: String,
pub indent_guide_style: Style,
pub newline: String,
@ -190,6 +193,7 @@ pub struct TextRenderer<'a> {
pub draw_indent_guides: bool,
pub viewport: Rect,
pub offset: Position,
pub trailing_whitespace_tracker: TrailingWhitespaceTracker,
}
pub struct GraphemeStyle {
@ -206,56 +210,27 @@ impl<'a> TextRenderer<'a> {
viewport: Rect,
) -> TextRenderer<'a> {
let editor_config = doc.config.load();
let WhitespaceConfig {
render: ws_render,
characters: ws_chars,
} = &editor_config.whitespace;
let tab_width = doc.tab_width();
let tab = if ws_render.tab() == WhitespaceRenderValue::All {
std::iter::once(ws_chars.tab)
.chain(std::iter::repeat(ws_chars.tabpad).take(tab_width - 1))
.collect()
} else {
" ".repeat(tab_width)
};
let virtual_tab = " ".repeat(tab_width);
let newline = if ws_render.newline() == WhitespaceRenderValue::All {
ws_chars.newline.into()
} else {
" ".to_owned()
};
let space = if ws_render.space() == WhitespaceRenderValue::All {
ws_chars.space.into()
} else {
" ".to_owned()
};
let nbsp = if ws_render.nbsp() == WhitespaceRenderValue::All {
ws_chars.nbsp.into()
} else {
" ".to_owned()
};
let nnbsp = if ws_render.nnbsp() == WhitespaceRenderValue::All {
ws_chars.nnbsp.into()
} else {
" ".to_owned()
};
let text_style = theme.get("ui.text");
let indent_width = doc.indent_style.indent_width(tab_width) as u16;
let ws = &editor_config.whitespace;
let regular_ws = WhitespaceFeature::Regular.palette(ws, tab_width);
let trailing_ws = WhitespaceFeature::Trailing.palette(ws, tab_width);
let trailing_whitespace_tracker = TrailingWhitespaceTracker::new(ws.render, trailing_ws);
TextRenderer {
surface,
indent_guide_char: editor_config.indent_guides.character.into(),
newline,
nbsp,
nnbsp,
space,
tab,
virtual_tab,
newline: regular_ws.newline,
nbsp: regular_ws.nbsp,
nnbsp: regular_ws.nnbsp,
space: regular_ws.space,
tab: regular_ws.tab,
virtual_tab: regular_ws.virtual_tab,
whitespace_style: theme.get("ui.virtual.whitespace"),
trailing_whitespace_style: theme.get("ui.virtual.trailing_whitespace"),
indent_width,
starting_indent: offset.col / indent_width as usize
+ (offset.col % indent_width as usize != 0) as usize
@ -269,6 +244,7 @@ impl<'a> TextRenderer<'a> {
draw_indent_guides: editor_config.indent_guides.render,
viewport,
offset,
trailing_whitespace_tracker,
}
}
/// Draws a single `grapheme` at the current render position with a specified `style`.
@ -343,28 +319,61 @@ impl<'a> TextRenderer<'a> {
} else {
&self.tab
};
let mut whitespace_kind = WhitespaceKind::None;
let grapheme = match grapheme {
Grapheme::Tab { width } => {
whitespace_kind = WhitespaceKind::Tab;
let grapheme_tab_width = char_to_byte_idx(tab, width);
&tab[..grapheme_tab_width]
}
// TODO special rendering for other whitespaces?
Grapheme::Other { ref g } if g == " " => space,
Grapheme::Other { ref g } if g == "\u{00A0}" => nbsp,
Grapheme::Other { ref g } if g == "\u{202F}" => nnbsp,
Grapheme::Other { ref g } if g == " " => {
whitespace_kind = WhitespaceKind::Space;
space
}
Grapheme::Other { ref g } if g == "\u{00A0}" => {
whitespace_kind = WhitespaceKind::NonBreakingSpace;
nbsp
}
Grapheme::Other { ref g } if g == "\u{202F}" => {
whitespace_kind = WhitespaceKind::NarrowNonBreakingSpace;
nnbsp
}
Grapheme::Other { ref g } => g,
Grapheme::Newline => &self.newline,
Grapheme::Newline => {
whitespace_kind = WhitespaceKind::Newline;
&self.newline
}
};
let viewport_right_edge = self.viewport.width as usize + self.offset.col - 1;
let in_bounds = self.column_in_bounds(position.col, width);
if in_bounds {
let in_bounds_col = position.col - self.offset.col;
self.surface.set_string(
self.viewport.x + (position.col - self.offset.col) as u16,
self.viewport.x + in_bounds_col as u16,
self.viewport.y + position.row as u16,
grapheme,
style,
);
if self
.trailing_whitespace_tracker
.track(in_bounds_col, whitespace_kind)
|| position.col == viewport_right_edge
{
self.trailing_whitespace_tracker.render(
&mut |trailing_whitespace: &str, from: usize| {
self.surface.set_string(
self.viewport.x + from as u16,
self.viewport.y + position.row as u16,
trailing_whitespace,
style.patch(self.trailing_whitespace_style),
);
},
);
}
} else if cut_off_start != 0 && cut_off_start < width {
// partially on screen
let rect = Rect::new(

View File

@ -13,6 +13,7 @@ mod spinner;
mod statusline;
mod text;
mod text_decorations;
mod trailing_whitespace;
use crate::compositor::Compositor;
use crate::filter_picker_entry;
@ -356,7 +357,7 @@ fn directory_content(path: &Path) -> Result<Vec<(PathBuf, bool)>, std::io::Error
.map(|entry| {
(
entry.path(),
entry.file_type().is_ok_and(|file_type| file_type.is_dir()),
std::fs::metadata(entry.path()).is_ok_and(|metadata| metadata.is_dir()),
)
})
.collect();

View File

@ -0,0 +1,173 @@
use helix_core::str_utils::char_to_byte_idx;
use helix_view::editor::{WhitespacePalette, WhitespaceRender, WhitespaceRenderValue};
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
pub enum WhitespaceKind {
None,
Space,
NonBreakingSpace,
NarrowNonBreakingSpace,
Tab,
Newline,
}
impl WhitespaceKind {
pub fn to_str(self, palette: &WhitespacePalette) -> &str {
match self {
WhitespaceKind::Space => &palette.space,
WhitespaceKind::NonBreakingSpace => &palette.nbsp,
WhitespaceKind::NarrowNonBreakingSpace => &palette.nnbsp,
WhitespaceKind::Tab => {
let grapheme_tab_width = char_to_byte_idx(&palette.tab, palette.tab.len());
&palette.tab[..grapheme_tab_width]
}
WhitespaceKind::Newline | WhitespaceKind::None => "",
}
}
}
#[derive(Debug)]
pub struct TrailingWhitespaceTracker {
enabled: bool,
palette: WhitespacePalette,
tracking_from: usize,
tracking_content: Vec<(WhitespaceKind, usize)>,
}
impl TrailingWhitespaceTracker {
pub fn new(render: WhitespaceRender, palette: WhitespacePalette) -> Self {
Self {
palette,
enabled: render.any(WhitespaceRenderValue::Trailing),
tracking_from: 0,
tracking_content: vec![],
}
}
// Tracks the whitespace and returns wether [`render`] should be called right after
// to display the trailing whitespace.
pub fn track(&mut self, from: usize, kind: WhitespaceKind) -> bool {
if !self.enabled || kind == WhitespaceKind::None {
self.tracking_content.clear();
return false;
}
if kind == WhitespaceKind::Newline {
return true;
}
if self.tracking_content.is_empty() {
self.tracking_from = from;
}
self.compress(kind);
false
}
pub fn render(&mut self, callback: &mut impl FnMut(&str, usize)) {
if self.tracking_content.is_empty() {
return;
}
let mut offset = self.tracking_from;
self.tracking_content.iter().for_each(|(kind, n)| {
let ws = kind.to_str(&self.palette).repeat(*n);
callback(&ws, offset);
offset += n;
});
self.tracking_content.clear();
}
fn compress(&mut self, kind: WhitespaceKind) {
if let Some((last_kind, n)) = self.tracking_content.last_mut() {
if *last_kind == kind {
*n += 1;
return;
}
}
self.tracking_content.push((kind, 1));
}
}
#[cfg(test)]
mod tests {
use super::*;
use helix_view::editor::WhitespaceRender;
fn palette() -> WhitespacePalette {
WhitespacePalette {
space: "S".into(),
nbsp: "N".into(),
nnbsp: "M".into(),
tab: "<TAB>".into(),
virtual_tab: "V".into(),
newline: "L".into(),
}
}
fn capture(sut: &mut TrailingWhitespaceTracker) -> (String, usize, usize) {
let mut captured_content = String::new();
let mut from: usize = 0;
let mut to: usize = 0;
sut.render(&mut |content: &str, pos: usize| {
captured_content.push_str(content);
if from == 0 {
from = pos;
}
to = pos;
});
(captured_content, from, to)
}
#[test]
fn test_trailing_whitespace_tracker_correctly_tracks_sequences() {
let ws_render = WhitespaceRender::Basic(WhitespaceRenderValue::Trailing);
let mut sut = TrailingWhitespaceTracker::new(ws_render, palette());
sut.track(5, WhitespaceKind::Space);
sut.track(6, WhitespaceKind::NonBreakingSpace);
sut.track(7, WhitespaceKind::NarrowNonBreakingSpace);
sut.track(8, WhitespaceKind::Tab);
let (content, from, to) = capture(&mut sut);
assert_eq!(5, from);
assert_eq!(8, to);
assert_eq!("SNM<TAB>", content);
// Now we break the sequence
sut.track(6, WhitespaceKind::None);
let (content, from, to) = capture(&mut sut);
assert_eq!(0, from);
assert_eq!(0, to);
assert_eq!("", content);
sut.track(10, WhitespaceKind::Tab);
sut.track(11, WhitespaceKind::NonBreakingSpace);
sut.track(12, WhitespaceKind::NarrowNonBreakingSpace);
sut.track(13, WhitespaceKind::Space);
let (content, from, to) = capture(&mut sut);
assert_eq!(10, from);
assert_eq!(13, to);
assert_eq!("<TAB>NMS", content);
// Verify compression works
sut.track(20, WhitespaceKind::Space);
sut.track(21, WhitespaceKind::Space);
sut.track(22, WhitespaceKind::NonBreakingSpace);
sut.track(23, WhitespaceKind::NonBreakingSpace);
sut.track(24, WhitespaceKind::NarrowNonBreakingSpace);
sut.track(25, WhitespaceKind::NarrowNonBreakingSpace);
sut.track(26, WhitespaceKind::Tab);
sut.track(27, WhitespaceKind::Tab);
sut.track(28, WhitespaceKind::Tab);
let (content, from, to) = capture(&mut sut);
assert_eq!(20, from);
assert_eq!(26, to); // Compression means last tracked token is on 26 instead of 28
assert_eq!("SSNNMM<TAB><TAB><TAB>", content);
}
}

View File

@ -52,6 +52,8 @@ log = "~0.4"
parking_lot.workspace = true
thiserror.workspace = true
kstring = "2.0"
[target.'cfg(windows)'.dependencies]
clipboard-win = { version = "5.4", features = ["std"] }

View File

@ -1810,6 +1810,12 @@ impl Document {
self.version
}
pub fn word_completion_enabled(&self) -> bool {
self.language_config()
.and_then(|lang_config| lang_config.word_completion.and_then(|c| c.enable))
.unwrap_or_else(|| self.config.load().word_completion.enable)
}
pub fn path_completion_enabled(&self) -> bool {
self.language_config()
.and_then(|lang_config| lang_config.path_completion)

View File

@ -278,6 +278,9 @@ pub struct Config {
/// either absolute or relative to the current opened document or current working directory (if the buffer is not yet saved).
/// Defaults to true.
pub path_completion: bool,
/// Configures completion of words from open buffers.
/// Defaults to enabled with a trigger length of 7.
pub word_completion: WordCompletion,
/// Automatic formatting on save. Defaults to true.
pub auto_format: bool,
/// Default register used for yank/paste. Defaults to '"'
@ -776,13 +779,88 @@ pub enum WhitespaceRender {
},
}
impl WhitespaceRender {
pub fn any(&self, value: WhitespaceRenderValue) -> bool {
self.space() == value
|| self.nbsp() == value
|| self.nnbsp() == value
|| self.tab() == value
|| self.newline() == value
}
}
pub enum WhitespaceFeature {
Regular,
Trailing,
}
impl WhitespaceFeature {
pub fn is_enabled(&self, render: WhitespaceRenderValue) -> bool {
match self {
WhitespaceFeature::Regular => matches!(render, WhitespaceRenderValue::All),
WhitespaceFeature::Trailing => matches!(
render,
WhitespaceRenderValue::All | WhitespaceRenderValue::Trailing
),
}
}
pub fn palette(self, cfg: &WhitespaceConfig, tab_width: usize) -> WhitespacePalette {
WhitespacePalette::from(self, cfg, tab_width)
}
}
#[derive(Debug)]
pub struct WhitespacePalette {
pub space: String,
pub nbsp: String,
pub nnbsp: String,
pub tab: String,
pub virtual_tab: String,
pub newline: String,
}
impl WhitespacePalette {
fn from(feature: WhitespaceFeature, cfg: &WhitespaceConfig, tab_width: usize) -> Self {
Self {
space: if feature.is_enabled(cfg.render.space()) {
cfg.characters.space.to_string()
} else {
" ".to_string()
},
nbsp: if feature.is_enabled(cfg.render.nbsp()) {
cfg.characters.nbsp.to_string()
} else {
" ".to_string()
},
nnbsp: if feature.is_enabled(cfg.render.nnbsp()) {
cfg.characters.nnbsp.to_string()
} else {
" ".to_string()
},
tab: if feature.is_enabled(cfg.render.tab()) {
cfg.characters.generate_tab(tab_width)
} else {
" ".repeat(tab_width)
},
newline: if feature.is_enabled(cfg.render.newline()) {
cfg.characters.newline.to_string()
} else {
" ".to_string()
},
virtual_tab: " ".repeat(tab_width),
}
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub enum WhitespaceRenderValue {
None,
All,
Trailing,
// TODO
// Selection,
All,
}
impl WhitespaceRender {
@ -907,6 +985,14 @@ impl Default for WhitespaceCharacters {
}
}
impl WhitespaceCharacters {
pub fn generate_tab(&self, width: usize) -> String {
std::iter::once(self.tab)
.chain(std::iter::repeat(self.tabpad).take(width - 1))
.collect()
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
#[serde(default, rename_all = "kebab-case")]
pub struct IndentGuidesConfig {
@ -974,6 +1060,22 @@ pub enum PopupBorderConfig {
Menu,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
#[serde(default, rename_all = "kebab-case", deny_unknown_fields)]
pub struct WordCompletion {
pub enable: bool,
pub trigger_length: NonZeroU8,
}
impl Default for WordCompletion {
fn default() -> Self {
Self {
enable: true,
trigger_length: NonZeroU8::new(7).unwrap(),
}
}
}
impl Default for Config {
fn default() -> Self {
Self {
@ -993,6 +1095,7 @@ impl Default for Config {
auto_pairs: AutoPairConfig::default(),
auto_completion: true,
path_completion: true,
word_completion: WordCompletion::default(),
auto_format: true,
default_yank_register: '"',
auto_save: AutoSave::default(),
@ -2339,3 +2442,95 @@ impl CursorCache {
self.0.set(None)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_whitespace_render_any() {
let sut = WhitespaceRender::Basic(WhitespaceRenderValue::Trailing);
assert!(!sut.any(WhitespaceRenderValue::None));
assert!(!sut.any(WhitespaceRenderValue::All));
assert!(sut.any(WhitespaceRenderValue::Trailing));
}
#[test]
fn test_whitespace_feature_is_enabled_regular() {
let sut = WhitespaceFeature::Regular;
assert!(!sut.is_enabled(WhitespaceRenderValue::None));
assert!(!sut.is_enabled(WhitespaceRenderValue::Trailing));
assert!(sut.is_enabled(WhitespaceRenderValue::All));
}
#[test]
fn test_whitespace_feature_is_enabled_trailing() {
let sut = WhitespaceFeature::Trailing;
assert!(!sut.is_enabled(WhitespaceRenderValue::None));
assert!(sut.is_enabled(WhitespaceRenderValue::Trailing));
assert!(sut.is_enabled(WhitespaceRenderValue::All));
}
#[test]
fn test_whitespace_palette_regular_all() {
let cfg = WhitespaceConfig {
render: WhitespaceRender::Basic(WhitespaceRenderValue::All),
..Default::default()
};
let sut = WhitespacePalette::from(WhitespaceFeature::Regular, &cfg, 2);
assert_eq!("·", sut.space);
assert_eq!("", sut.nbsp);
assert_eq!("", sut.nnbsp);
assert_eq!("", sut.tab);
assert_eq!(" ", sut.virtual_tab);
assert_eq!("", sut.newline);
}
#[test]
fn test_whitespace_palette_regular_trailing() {
let cfg = WhitespaceConfig {
render: WhitespaceRender::Basic(WhitespaceRenderValue::Trailing),
..Default::default()
};
let sut = WhitespacePalette::from(WhitespaceFeature::Regular, &cfg, 2);
assert_eq!(" ", sut.space);
assert_eq!(" ", sut.nbsp);
assert_eq!(" ", sut.nnbsp);
assert_eq!(" ", sut.tab);
assert_eq!(" ", sut.virtual_tab);
assert_eq!(" ", sut.newline);
}
#[test]
fn test_whitespace_palette_trailing_all() {
let cfg = WhitespaceConfig {
render: WhitespaceRender::Basic(WhitespaceRenderValue::All),
..Default::default()
};
let sut = WhitespacePalette::from(WhitespaceFeature::Trailing, &cfg, 2);
assert_eq!("·", sut.space);
assert_eq!("", sut.nbsp);
assert_eq!("", sut.nnbsp);
assert_eq!("", sut.tab);
assert_eq!(" ", sut.virtual_tab);
assert_eq!("", sut.newline);
}
#[test]
fn test_whitespace_characters_render_tab() {
let sut = WhitespaceCharacters::default();
assert_eq!("", sut.generate_tab(1));
assert_eq!("", sut.generate_tab(2));
assert_eq!("", sut.generate_tab(3));
assert_eq!("", sut.generate_tab(4));
}
}

View File

@ -9,6 +9,7 @@ pub mod completion;
pub mod dap;
pub mod diagnostics;
pub mod lsp;
pub mod word_index;
#[derive(Debug)]
pub enum AutoSaveEvent {
@ -22,6 +23,7 @@ pub struct Handlers {
pub signature_hints: Sender<lsp::SignatureHelpEvent>,
pub auto_save: Sender<AutoSaveEvent>,
pub document_colors: Sender<lsp::DocumentColorsEvent>,
pub word_index: word_index::Handler,
}
impl Handlers {
@ -46,8 +48,13 @@ impl Handlers {
};
send_blocking(&self.signature_hints, event)
}
pub fn word_index(&self) -> &word_index::WordIndex {
&self.word_index.index
}
}
pub fn register_hooks(handlers: &Handlers) {
lsp::register_hooks(handlers);
word_index::register_hooks(handlers);
}

View File

@ -0,0 +1,509 @@
//! Indexing of words from open buffers.
//!
//! This provides an eventually consistent set of words used in any open buffers. This set is
//! later used for lexical completion.
use std::{borrow::Cow, collections::HashMap, iter, mem, sync::Arc, time::Duration};
use helix_core::{
chars::char_is_word, fuzzy::fuzzy_match, movement, ChangeSet, Range, Rope, RopeSlice,
};
use helix_event::{register_hook, AsyncHook};
use helix_stdx::rope::RopeSliceExt as _;
use parking_lot::RwLock;
use tokio::{sync::mpsc, time::Instant};
use crate::{
events::{ConfigDidChange, DocumentDidChange, DocumentDidClose, DocumentDidOpen},
DocumentId,
};
use super::Handlers;
#[derive(Debug)]
struct Change {
old_text: Rope,
text: Rope,
changes: ChangeSet,
}
#[derive(Debug)]
enum Event {
Insert(Rope),
Update(DocumentId, Change),
Delete(DocumentId, Rope),
/// Clear the entire word index.
/// This is used to clear memory when the feature is turned off.
Clear,
}
#[derive(Debug)]
pub struct Handler {
pub(super) index: WordIndex,
/// A sender into an async hook which debounces updates to the index.
hook: mpsc::Sender<Event>,
/// A sender to a tokio task which coordinates the indexing of documents.
///
/// See [WordIndex::run]. A supervisor-like task is in charge of spawning tasks to update the
/// index. This ensures that consecutive edits to a document trigger the correct order of
/// insertions and deletions into the word set.
coordinator: mpsc::UnboundedSender<Event>,
}
impl Handler {
pub fn spawn() -> Self {
let index = WordIndex::default();
let (tx, rx) = mpsc::unbounded_channel();
tokio::spawn(index.clone().run(rx));
Self {
hook: Hook {
changes: HashMap::default(),
coordinator: tx.clone(),
}
.spawn(),
index,
coordinator: tx,
}
}
}
#[derive(Debug)]
struct Hook {
changes: HashMap<DocumentId, Change>,
coordinator: mpsc::UnboundedSender<Event>,
}
const DEBOUNCE: Duration = Duration::from_secs(1);
impl AsyncHook for Hook {
type Event = Event;
fn handle_event(&mut self, event: Self::Event, timeout: Option<Instant>) -> Option<Instant> {
match event {
Event::Insert(_) => unreachable!("inserts are sent to the worker directly"),
Event::Update(doc, change) => {
if let Some(pending_change) = self.changes.get_mut(&doc) {
// If there is already a change waiting for this document, merge the two
// changes together by composing the changesets and saving the new `text`.
pending_change.changes =
mem::take(&mut pending_change.changes).compose(change.changes);
pending_change.text = change.text;
Some(Instant::now() + DEBOUNCE)
} else if !is_changeset_significant(&change.changes) {
// If the changeset is fairly large, debounce before updating the index.
self.changes.insert(doc, change);
Some(Instant::now() + DEBOUNCE)
} else {
// Otherwise if the change is small, queue the update to the index immediately.
self.coordinator.send(Event::Update(doc, change)).unwrap();
timeout
}
}
Event::Delete(doc, text) => {
// If there are pending changes that haven't been indexed since the last debounce,
// forget them and delete the old text.
if let Some(change) = self.changes.remove(&doc) {
self.coordinator
.send(Event::Delete(doc, change.old_text))
.unwrap();
} else {
self.coordinator.send(Event::Delete(doc, text)).unwrap();
}
timeout
}
Event::Clear => unreachable!("clear is sent to the worker directly"),
}
}
fn finish_debounce(&mut self) {
for (doc, change) in self.changes.drain() {
self.coordinator.send(Event::Update(doc, change)).unwrap();
}
}
}
/// Minimum number of grapheme clusters required to include a word in the index
const MIN_WORD_GRAPHEMES: usize = 3;
/// Maximum word length allowed (in chars)
const MAX_WORD_LEN: usize = 50;
type Word = kstring::KString;
#[derive(Debug, Default)]
struct WordIndexInner {
/// Reference counted storage for words.
///
/// Words are very likely to be reused many times. Instead of storing duplicates we keep a
/// reference count of times a word is used. When the reference count drops to zero the word
/// is removed from the index.
words: HashMap<Word, u32>,
}
impl WordIndexInner {
fn words(&self) -> impl Iterator<Item = &Word> {
self.words.keys()
}
fn insert(&mut self, word: RopeSlice) {
let word: Cow<str> = word.into();
if let Some(rc) = self.words.get_mut(word.as_ref()) {
*rc = rc.saturating_add(1);
} else {
let word = match word {
Cow::Owned(s) => Word::from_string(s),
Cow::Borrowed(s) => Word::from_ref(s),
};
self.words.insert(word, 1);
}
}
fn remove(&mut self, word: RopeSlice) {
let word: Cow<str> = word.into();
match self.words.get_mut(word.as_ref()) {
Some(1) => {
self.words.remove(word.as_ref());
}
Some(n) => *n -= 1,
None => (),
}
}
fn clear(&mut self) {
std::mem::take(&mut self.words);
}
}
#[derive(Debug, Default, Clone)]
pub struct WordIndex {
inner: Arc<RwLock<WordIndexInner>>,
}
impl WordIndex {
pub fn matches(&self, pattern: &str) -> Vec<String> {
let inner = self.inner.read();
let mut matches = fuzzy_match(pattern, inner.words(), false);
matches.sort_unstable_by_key(|(_, score)| *score);
matches
.into_iter()
.map(|(word, _)| word.to_string())
.collect()
}
fn add_document(&self, text: &Rope) {
let mut inner = self.inner.write();
for word in words(text.slice(..)) {
inner.insert(word);
}
}
fn update_document(&self, old_text: &Rope, text: &Rope, changes: &ChangeSet) {
let mut inner = self.inner.write();
for (old_window, new_window) in changed_windows(old_text.slice(..), text.slice(..), changes)
{
for word in words(new_window) {
inner.insert(word);
}
for word in words(old_window) {
inner.remove(word);
}
}
}
fn remove_document(&self, text: &Rope) {
let mut inner = self.inner.write();
for word in words(text.slice(..)) {
inner.remove(word);
}
}
fn clear(&self) {
let mut inner = self.inner.write();
inner.clear();
}
/// Coordinate the indexing of documents.
///
/// This task wraps a MPSC queue and spawns blocking tasks which update the index. Updates
/// are applied one-by-one to ensure that changes to the index are **serialized**:
/// updates to each document must be applied in-order.
async fn run(self, mut events: mpsc::UnboundedReceiver<Event>) {
while let Some(event) = events.recv().await {
let this = self.clone();
tokio::task::spawn_blocking(move || match event {
Event::Insert(text) => {
this.add_document(&text);
}
Event::Update(
_doc,
Change {
old_text,
text,
changes,
..
},
) => {
this.update_document(&old_text, &text, &changes);
}
Event::Delete(_doc, text) => {
this.remove_document(&text);
}
Event::Clear => {
this.clear();
}
})
.await
.unwrap();
}
}
}
fn words(text: RopeSlice) -> impl Iterator<Item = RopeSlice> {
let mut cursor = Range::point(0);
if text
.get_char(cursor.anchor)
.is_some_and(|ch| !ch.is_whitespace())
{
let cursor_word_end = movement::move_next_word_end(text, cursor, 1);
if cursor_word_end.anchor == 0 {
cursor = cursor_word_end;
}
}
iter::from_fn(move || {
while cursor.head <= text.len_chars() {
let mut word = None;
if text
.slice(..cursor.head)
.graphemes_rev()
.take(MIN_WORD_GRAPHEMES)
.take_while(|g| g.chars().all(char_is_word))
.count()
== MIN_WORD_GRAPHEMES
{
cursor.anchor += text
.chars_at(cursor.anchor)
.take_while(|&c| !char_is_word(c))
.count();
let slice = cursor.slice(text);
if slice.len_chars() <= MAX_WORD_LEN {
word = Some(slice);
}
}
let head = cursor.head;
cursor = movement::move_next_word_end(text, cursor, 1);
if cursor.head == head {
cursor.head = usize::MAX;
}
if word.is_some() {
return word;
}
}
None
})
}
/// Finds areas of the old and new texts around each operation in `changes`.
///
/// The window is larger than the changed area and can encompass multiple insert/delete operations
/// if they are grouped closely together.
///
/// The ranges of the old and new text should usually be of different sizes. For example a
/// deletion of "foo" surrounded by large retain sections would give a longer window into the
/// `old_text` and shorter window of `new_text`. Vice-versa for an insertion. A full replacement
/// of a word though would give two slices of the same size.
fn changed_windows<'a>(
old_text: RopeSlice<'a>,
new_text: RopeSlice<'a>,
changes: &'a ChangeSet,
) -> impl Iterator<Item = (RopeSlice<'a>, RopeSlice<'a>)> {
use helix_core::Operation::*;
let mut operations = changes.changes().iter().peekable();
let mut old_pos = 0;
let mut new_pos = 0;
iter::from_fn(move || loop {
let operation = operations.next()?;
let old_start = old_pos;
let new_start = new_pos;
let len = operation.len_chars();
match operation {
Retain(_) => {
old_pos += len;
new_pos += len;
continue;
}
Insert(_) => new_pos += len,
Delete(_) => old_pos += len,
}
// Scan ahead until a `Retain` is found which would end a window.
while let Some(o) = operations.next_if(|op| !matches!(op, Retain(n) if *n > MAX_WORD_LEN)) {
let len = o.len_chars();
match o {
Retain(_) => {
old_pos += len;
new_pos += len;
}
Delete(_) => old_pos += len,
Insert(_) => new_pos += len,
}
}
let old_window = old_start.saturating_sub(MAX_WORD_LEN)
..(old_pos + MAX_WORD_LEN).min(old_text.len_chars());
let new_window = new_start.saturating_sub(MAX_WORD_LEN)
..(new_pos + MAX_WORD_LEN).min(new_text.len_chars());
return Some((old_text.slice(old_window), new_text.slice(new_window)));
})
}
/// Estimates whether a changeset is significant or small.
fn is_changeset_significant(changes: &ChangeSet) -> bool {
use helix_core::Operation::*;
let mut diff = 0;
for operation in changes.changes() {
match operation {
Retain(_) => continue,
Delete(_) | Insert(_) => diff += operation.len_chars(),
}
}
// This is arbitrary and could be tuned further:
diff > 1_000
}
pub(crate) fn register_hooks(handlers: &Handlers) {
let coordinator = handlers.word_index.coordinator.clone();
register_hook!(move |event: &mut DocumentDidOpen<'_>| {
let doc = doc!(event.editor, &event.doc);
if doc.word_completion_enabled() {
coordinator.send(Event::Insert(doc.text().clone())).unwrap();
}
Ok(())
});
let tx = handlers.word_index.hook.clone();
register_hook!(move |event: &mut DocumentDidChange<'_>| {
if !event.ghost_transaction && event.doc.word_completion_enabled() {
helix_event::send_blocking(
&tx,
Event::Update(
event.doc.id(),
Change {
old_text: event.old_text.clone(),
text: event.doc.text().clone(),
changes: event.changes.clone(),
},
),
);
}
Ok(())
});
let tx = handlers.word_index.hook.clone();
register_hook!(move |event: &mut DocumentDidClose<'_>| {
if event.doc.word_completion_enabled() {
helix_event::send_blocking(
&tx,
Event::Delete(event.doc.id(), event.doc.text().clone()),
);
}
Ok(())
});
let coordinator = handlers.word_index.coordinator.clone();
register_hook!(move |event: &mut ConfigDidChange<'_>| {
// The feature has been turned off. Clear the index and reclaim any used memory.
if event.old.word_completion.enable && !event.new.word_completion.enable {
coordinator.send(Event::Clear).unwrap();
}
// The feature has been turned on. Index open documents.
if !event.old.word_completion.enable && event.new.word_completion.enable {
for doc in event.editor.documents() {
if doc.word_completion_enabled() {
coordinator.send(Event::Insert(doc.text().clone())).unwrap();
}
}
}
Ok(())
});
}
#[cfg(test)]
mod tests {
use std::collections::HashSet;
use super::*;
use helix_core::diff::compare_ropes;
impl WordIndex {
fn words(&self) -> HashSet<String> {
let inner = self.inner.read();
inner.words().map(|w| w.to_string()).collect()
}
}
#[track_caller]
fn assert_words<I: ToString, T: IntoIterator<Item = I>>(text: &str, expected: T) {
let text = Rope::from_str(text);
let index = WordIndex::default();
index.add_document(&text);
let actual = index.words();
let expected: HashSet<_> = expected.into_iter().map(|i| i.to_string()).collect();
assert_eq!(expected, actual);
}
#[test]
fn parse() {
assert_words("one two three", ["one", "two", "three"]);
assert_words("a foo c", ["foo"]);
}
#[track_caller]
fn assert_diff<S, R, I>(before: &str, after: &str, expect_removed: R, expect_inserted: I)
where
S: ToString,
R: IntoIterator<Item = S>,
I: IntoIterator<Item = S>,
{
let before = Rope::from_str(before);
let after = Rope::from_str(after);
let diff = compare_ropes(&before, &after);
let expect_removed: HashSet<_> =
expect_removed.into_iter().map(|i| i.to_string()).collect();
let expect_inserted: HashSet<_> =
expect_inserted.into_iter().map(|i| i.to_string()).collect();
let index = WordIndex::default();
index.add_document(&before);
let words_before = index.words();
index.update_document(&before, &after, diff.changes());
let words_after = index.words();
let actual_removed = words_before.difference(&words_after).cloned().collect();
let actual_inserted = words_after.difference(&words_before).cloned().collect();
eprintln!("\"{before}\" {words_before:?} => \"{after}\" {words_after:?}");
assert_eq!(
expect_removed, actual_removed,
"expected {expect_removed:?} to be removed, instead {actual_removed:?} was"
);
assert_eq!(
expect_inserted, actual_inserted,
"expected {expect_inserted:?} to be inserted, instead {actual_inserted:?} was"
);
}
#[test]
fn diff() {
assert_diff("one two three", "one five three", ["two"], ["five"]);
assert_diff("one two three", "one to three", ["two"], []);
assert_diff("one two three", "one three", ["two"], []);
assert_diff("one two three", "one t{o three", ["two"], []);
assert_diff("one foo three", "one fooo three", ["foo"], ["fooo"]);
}
}

View File

@ -65,6 +65,7 @@ julia = { command = "julia", timeout = 60, args = [ "--startup-file=no", "--hist
just-lsp = { command = "just-lsp" }
koka = { command = "koka", args = ["--language-server", "--lsstdio"] }
koto-ls = { command = "koto-ls" }
kotlin-lsp = { command = "kotlin-lsp", args = ["--stdio"] }
kotlin-language-server = { command = "kotlin-language-server" }
lean = { command = "lean", args = [ "--server", "--memory=1024" ] }
ltex-ls = { command = "ltex-ls" }
@ -1783,6 +1784,7 @@ roots = [".marksman.toml"]
language-servers = [ "marksman", "markdown-oxide" ]
indent = { tab-width = 2, unit = " " }
block-comment-tokens = { start = "<!--", end = "-->" }
word-completion.trigger-length = 4
[[grammar]]
name = "markdown"
@ -2046,7 +2048,10 @@ roots = ["rebar.config"]
shebangs = ["escript"]
comment-token = "%%"
indent = { tab-width = 4, unit = " " }
language-servers = [ "erlang-ls", "elp" ]
language-servers = [
{ name = "erlang-ls", except-features = ["document-symbols", "workspace-symbols"] },
{ name = "elp", except-features = ["document-symbols", "workspace-symbols"] }
]
[[grammar]]
name = "erlang"
@ -3064,7 +3069,7 @@ formatter = { command = "inko", args = ["fmt", "-"] }
[[grammar]]
name = "inko"
source = { git = "https://github.com/inko-lang/tree-sitter-inko", rev = "7860637ce1b43f5f79cfb7cc3311bf3234e9479f" }
source = { git = "https://github.com/inko-lang/tree-sitter-inko", rev = "f58a87ac4dc6a7955c64c9e4408fbd693e804686" }
[[language]]
name = "bicep"
@ -3418,7 +3423,7 @@ language-servers = ["just-lsp"]
[[grammar]]
name = "just"
source = { git = "https://github.com/poliorcetics/tree-sitter-just", rev = "8d03cfdd7ab89ff76d935827de1b93450fa0ec0a" }
source = { git = "https://github.com/poliorcetics/tree-sitter-just", rev = "0f84211c637813bcf1eb32c9e35847cdaea8760d" }
[[language]]
name = "gn"

View File

@ -19,5 +19,3 @@
(invocation_expression function: (member_access_expression name: (identifier) @name)) @reference.send
(namespace_declaration name: (identifier) @name) @definition.module
(namespace_declaration name: (identifier) @name) @module

View File

@ -0,0 +1,9 @@
(function_declarator
declarator: [(identifier) (field_identifier)] @definition.function)
(preproc_function_def name: (identifier) @definition.function)
(type_definition
declarator: (type_identifier) @definition.type)
(preproc_def name: (identifier) @definition.constant)

View File

@ -0,0 +1,12 @@
; inherits: c
(function_declarator
declarator: (qualified_identifier name: (identifier) @definition.function))
(struct_specifier
name: (type_identifier) @definition.struct
body: (field_declaration_list))
(class_specifier
name: (type_identifier) @definition.class
body: (field_declaration_list))

View File

@ -0,0 +1,54 @@
; Definitions
; * modules and protocols
(call
target: (identifier) @ignore
(arguments (alias) @name)
(#any-of? @ignore "defmodule" "defprotocol")) @definition.module
; * functions/macros
(call
target: (identifier) @ignore
(arguments
[
; zero-arity functions with no parentheses
(identifier) @name
; regular function clause
(call target: (identifier) @name)
; function clause with a guard clause
(binary_operator
left: (call target: (identifier) @name)
operator: "when")
])
(#any-of? @ignore "def" "defp" "defdelegate" "defguard" "defguardp" "defmacro" "defmacrop" "defn" "defnp")) @definition.function
; References
; ignore calls to kernel/special-forms keywords
(call
target: (identifier) @ignore
(#any-of? @ignore "def" "defp" "defdelegate" "defguard" "defguardp" "defmacro" "defmacrop" "defn" "defnp" "defmodule" "defprotocol" "defimpl" "defstruct" "defexception" "defoverridable" "alias" "case" "cond" "else" "for" "if" "import" "quote" "raise" "receive" "require" "reraise" "super" "throw" "try" "unless" "unquote" "unquote_splicing" "use" "with"))
; ignore module attributes
(unary_operator
operator: "@"
operand: (call
target: (identifier) @ignore))
; * function call
(call
target: [
; local
(identifier) @name
; remote
(dot
right: (identifier) @name)
]) @reference.call
; * pipe into function call
(binary_operator
operator: "|>"
right: (identifier) @name) @reference.call
; * modules
(alias) @name @reference.module

View File

@ -0,0 +1,45 @@
; Modules
(attribute
name: (atom) @_attr
(arguments (atom) @definition.module)
(#eq? @_attr "module"))
; Constants
((attribute
name: (atom) @_attr
(arguments
.
[
(atom) @definition.constant
(call function: [(variable) (atom)] @definition.macro)
]))
(#eq? @_attr "define"))
; Record definitions
((attribute
name: (atom) @_attr
(arguments
.
(atom) @definition.struct))
(#eq? @_attr "record"))
; Function specs
((attribute
name: (atom) @_attr
(stab_clause name: (atom) @definition.interface))
(#eq? @_attr "spec"))
; Types
((attribute
name: (atom) @_attr
(arguments
(binary_operator
left: [
(atom) @definition.type
(call function: (atom) @definition.type)
]
operator: "::")))
(#any-of? @_attr "type" "opaque"))
; Functions
(function_clause name: (atom) @definition.function)

View File

@ -2,4 +2,4 @@
(function_definition (name) @name) @definition.function
(call (name) @name) @reference.call
(call (identifier) @name) @reference.call

View File

@ -4,7 +4,7 @@
(function_declaration
name: (identifier) @name) @definition.function
(#strip! @doc "^//\\s*")
(#set-adjacent! @doc @definition.function)
(#select-adjacent! @doc @definition.function)
)
(
@ -13,7 +13,7 @@
(method_declaration
name: (field_identifier) @name) @definition.method
(#strip! @doc "^//\\s*")
(#set-adjacent! @doc @definition.method)
(#select-adjacent! @doc @definition.method)
)
(call_expression

View File

@ -78,7 +78,7 @@
] @keyword.operator
[
"class"
"type"
"trait"
] @keyword.storage.type

View File

@ -0,0 +1,14 @@
(class
name: _ @definition.struct)
(trait
name: _ @definition.interface)
(external_function
name: _ @definition.function)
(method
name: _ @definition.function)
(define_constant
name: _ @definition.constant)

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,28 @@
; aligns forms to the second position if there's two in a line:
; (-> 10
; (* 2)
; (print))
(par_tup_lit . (sym_lit) @first . (_) @anchor
(#set! "scope" "tail")
(#same-line? @first @anchor)
; anything that doesn't match should be indented normally
; from https://github.com/janet-lang/spork/blob/5601dc883535473bca28351cc6df04ed6c656c65/spork/fmt.janet#L87C12-L93C38
(#not-match? @first "^(fn|match|with|with-dyns|def|def-|var|var-|defn|defn-|varfn|defmacro|defmacro-|defer|edefer|loop|seq|tabseq|catseq|generate|coro|for|each|eachp|eachk|case|cond|do|defglobal|varglobal|if|when|when-let|when-with|while|with-syms|with-vars|if-let|if-not|if-with|let|short-fn|try|unless|default|forever|upscope|repeat|forv|compwhen|compif|ev/spawn|ev/do-thread|ev/spawn-thread|ev/with-deadline|label|prompt|forever)$")) @align
; everything else should be indented normally:
;
; (let [foo 10]
; (print foo))
;
; (foo
; bar)
(par_tup_lit . (sym_lit)) @indent
; for `{}` and `[]`:
; {:foo 10
; :bar 20}
(struct_lit . (_) @anchor) @align
; [foo
; bar]
(sqr_tup_lit . (_) @anchor) @align

View File

@ -0,0 +1,2 @@
((comment) @injection.content
(#set! injection.language "comment"))

View File

@ -61,6 +61,9 @@
(mod
name: (identifier) @namespace)
(module_path
name: (identifier) @namespace)
; Paths
(mod

View File

@ -30,6 +30,9 @@
(function_call
name: (identifier) @local.reference)
(module_path
name: (identifier) @local.reference)
(recipe_dependency
name: (identifier) @local.reference)

View File

@ -0,0 +1,2 @@
; TODO: have symbol types for markup?
(atx_heading) @definition.class

View File

@ -0,0 +1,26 @@
(struct_item
name: (type_identifier) @definition.struct)
(const_item
name: (identifier) @definition.constant)
(trait_item
name: (type_identifier) @definition.interface)
(function_item
name: (identifier) @definition.function)
(function_signature_item
name: (identifier) @definition.function)
(enum_item
name: (type_identifier) @definition.type)
(enum_variant
name: (identifier) @definition.struct)
(mod_item
name: (identifier) @definition.module)
(macro_definition
name: (identifier) @definition.macro)

View File

@ -0,0 +1,6 @@
; should be a heading
(heading (text) @definition.class)
; should be a label/reference/tag
(heading (label) @definition.function)
(content (label) @definition.function)

View File

@ -3,126 +3,4 @@
# Ported by: @satoqz
# License: MIT
"attribute" = "green"
"comment" = { fg = "grey1", modifiers = ["italic"] }
"constant" = "fg0"
"constant.builtin" = "purple"
"constant.character.escape" = "green"
"constant.numeric" = "purple"
"constructor" = "green"
"function" = "green"
"keyword" = "red"
"keyword.directive" = "purple"
"keyword.operator" = "orange"
"label" = "red"
"namespace" = "yellow"
"operator" = "orange"
"punctuation" = "grey1"
"punctuation.bracket" = "fg0"
"punctuation.delimiter" = "grey1"
"punctuation.special" = "blue"
"special" = "green"
"string" = "aqua"
"string.regexp" = "green"
"string.special.path" = "yellow"
"string.special.symbol" = "fg0"
"string.special.url" = { fg = "fg0", modifiers = ["underlined"] }
"tag" = "orange"
"type" = "yellow"
"type.enum.variant" = "purple"
"variable" = "fg0"
"variable.builtin" = "purple"
"variable.other.member" = "blue"
"variable.parameter" = "fg0"
"markup.heading.1" = "red"
"markup.heading.2" = "orange"
"markup.heading.3" = "yellow"
"markup.heading.4" = "green"
"markup.heading.5" = "blue"
"markup.heading.6" = "purple"
"markup.bold" = { fg = "fg0", modifiers = ["bold"] }
"markup.italic" = { fg = "fg0", modifiers = ["italic"] }
"markup.strikethrough" = { fg = "fg0", modifiers = ["crossed_out"] }
"markup.link.label" = "blue"
"markup.link.text" = "yellow"
"markup.link.url" = { fg = "blue", modifiers = ["underlined"] }
"markup.list" = "blue"
"markup.list.checked" = "green"
"markup.list.unchecked" = "grey1"
"markup.quote" = "grey1"
"markup.raw" = "green"
"diff.delta" = "blue"
"diff.minus" = "red"
"diff.plus" = "green"
"diagnostic.error" = { underline = { color = "red", style = "curl" } }
"diagnostic.hint" = { underline = { color = "green", style = "curl" } }
"diagnostic.info" = { underline = { color = "blue", style = "curl" } }
"diagnostic.unnecessary" = { modifiers = ["dim"] }
"diagnostic.warning" = { underline = { color = "yellow", style = "curl" } }
error = "red"
hint = "green"
info = "blue"
warning = "yellow"
"ui.background" = { fg = "fg0", bg = "bg0" }
"ui.bufferline" = { fg = "fg1", bg = "bg4" }
"ui.bufferline.active" = { fg = "bg0", bg = "grey2" }
"ui.bufferline.background" = { bg = "bg1" }
"ui.cursor" = { fg = "bg0", bg = "grey1" }
"ui.cursor.primary" = { fg = "bg0", bg = "fg0" }
"ui.cursor.match" = { bg = "bg2" }
"ui.cursorline.primary" = { bg = "bg1" }
"ui.help" = { fg = "grey1", bg = "bg0" }
"ui.highlight" = { bg = "bg2" }
"ui.linenr" = "bg3"
"ui.linenr.selected" = "grey1"
"ui.menu" = { fg = "fg1", bg = "bg2" }
"ui.menu.scroll" = { fg = "grey0", bg = "bg1" }
"ui.menu.selected" = { fg = "bg2", bg = "grey2" }
"ui.popup" = { fg = "fg1", bg = "bg2" }
"ui.popup.info" = { "fg" = "grey1", bg = "bg0" }
"ui.selection" = { bg = "bg2" }
"ui.statusline" = { fg = "fg1", bg = "bg1" }
"ui.statusline.inactive" = { fg = "grey1", bg = "bg1" }
"ui.statusline.insert" = { fg = "bg0", bg = "green" }
"ui.statusline.normal" = { fg = "bg0", bg = "grey2" }
"ui.statusline.select" = { fg = "bg0", bg = "red" }
"ui.text" = "fg0"
"ui.text.directory" = { fg = "blue" }
"ui.text.focus" = { bg = "bg2" }
"ui.text.inactive" = { fg = "grey1" }
"ui.text.info" = "grey1"
"ui.virtual" = "grey0"
"ui.virtual.indent-guide" = "bg3"
"ui.virtual.inlay-hint" = "grey0"
"ui.virtual.jump-label" = "grey2"
"ui.virtual.ruler" = { bg = "bg1" }
"ui.window" = { fg = "bg3" }
[palette]
fg0 = "#d4be98"
fg1 = "#ddc7a1"
bg0 = "#282828"
bg1 = "#32302f"
bg2 = "#45403d"
bg3 = "#5a524c"
bg4 = "#504945"
grey0 = "#7c6f64"
grey1 = "#928374"
grey2 = "#a89984"
aqua = "#89b482"
blue = "#7daea3"
green = "#a9b665"
orange = "#e78a4e"
purple = "#d3869b"
red = "#ea6962"
yellow = "#d8a657"
inherits = "gruvbox_material_dark_medium"

View File

@ -0,0 +1,13 @@
# Gruvbox Material Dark Hard for Helix
# Original Author: @sainnhe (https://github.com/sainnhe/gruvbox-material)
# Base theme ported by: @satoqz
# Palette ported by: @ivan-shymkiv
# License: MIT
inherits = "gruvbox_material_dark_medium"
[palette]
bg0 = "#1d2021"
bg1 = "#282828"
bg2 = "#3c3836"
bg3 = "#504945"

View File

@ -0,0 +1,128 @@
# Gruvbox Material Dark Medium for Helix
# Original Author: @sainnhe (https://github.com/sainnhe/gruvbox-material)
# Ported by: @satoqz
# License: MIT
"attribute" = "green"
"comment" = { fg = "grey1", modifiers = ["italic"] }
"constant" = "fg0"
"constant.builtin" = "purple"
"constant.character.escape" = "green"
"constant.numeric" = "purple"
"constructor" = "green"
"function" = "green"
"keyword" = "red"
"keyword.directive" = "purple"
"keyword.operator" = "orange"
"label" = "red"
"namespace" = "yellow"
"operator" = "orange"
"punctuation" = "grey1"
"punctuation.bracket" = "fg0"
"punctuation.delimiter" = "grey1"
"punctuation.special" = "blue"
"special" = "green"
"string" = "aqua"
"string.regexp" = "green"
"string.special.path" = "yellow"
"string.special.symbol" = "fg0"
"string.special.url" = { fg = "fg0", modifiers = ["underlined"] }
"tag" = "orange"
"type" = "yellow"
"type.enum.variant" = "purple"
"variable" = "fg0"
"variable.builtin" = "purple"
"variable.other.member" = "blue"
"variable.parameter" = "fg0"
"markup.heading.1" = "red"
"markup.heading.2" = "orange"
"markup.heading.3" = "yellow"
"markup.heading.4" = "green"
"markup.heading.5" = "blue"
"markup.heading.6" = "purple"
"markup.bold" = { fg = "fg0", modifiers = ["bold"] }
"markup.italic" = { fg = "fg0", modifiers = ["italic"] }
"markup.strikethrough" = { fg = "fg0", modifiers = ["crossed_out"] }
"markup.link.label" = "blue"
"markup.link.text" = "yellow"
"markup.link.url" = { fg = "blue", modifiers = ["underlined"] }
"markup.list" = "blue"
"markup.list.checked" = "green"
"markup.list.unchecked" = "grey1"
"markup.quote" = "grey1"
"markup.raw" = "green"
"diff.delta" = "blue"
"diff.minus" = "red"
"diff.plus" = "green"
"diagnostic.error" = { underline = { color = "red", style = "curl" } }
"diagnostic.hint" = { underline = { color = "green", style = "curl" } }
"diagnostic.info" = { underline = { color = "blue", style = "curl" } }
"diagnostic.unnecessary" = { modifiers = ["dim"] }
"diagnostic.warning" = { underline = { color = "yellow", style = "curl" } }
error = "red"
hint = "green"
info = "blue"
warning = "yellow"
"ui.background" = { fg = "fg0", bg = "bg0" }
"ui.bufferline" = { fg = "fg1", bg = "bg4" }
"ui.bufferline.active" = { fg = "bg0", bg = "grey2" }
"ui.bufferline.background" = { bg = "bg1" }
"ui.cursor" = { fg = "bg0", bg = "grey1" }
"ui.cursor.primary" = { fg = "bg0", bg = "fg0" }
"ui.cursor.match" = { bg = "bg2" }
"ui.cursorline.primary" = { bg = "bg1" }
"ui.help" = { fg = "grey1", bg = "bg0" }
"ui.highlight" = { bg = "bg2" }
"ui.linenr" = "bg3"
"ui.linenr.selected" = "grey1"
"ui.menu" = { fg = "fg1", bg = "bg2" }
"ui.menu.scroll" = { fg = "grey0", bg = "bg1" }
"ui.menu.selected" = { fg = "bg2", bg = "grey2" }
"ui.popup" = { fg = "fg1", bg = "bg2" }
"ui.popup.info" = { "fg" = "grey1", bg = "bg0" }
"ui.selection" = { bg = "bg2" }
"ui.statusline" = { fg = "fg1", bg = "bg1" }
"ui.statusline.inactive" = { fg = "grey1", bg = "bg1" }
"ui.statusline.insert" = { fg = "bg0", bg = "green" }
"ui.statusline.normal" = { fg = "bg0", bg = "grey2" }
"ui.statusline.select" = { fg = "bg0", bg = "red" }
"ui.text" = "fg0"
"ui.text.directory" = { fg = "blue" }
"ui.text.focus" = { bg = "bg2" }
"ui.text.inactive" = { fg = "grey1" }
"ui.text.info" = "grey1"
"ui.virtual" = "grey0"
"ui.virtual.indent-guide" = "bg3"
"ui.virtual.inlay-hint" = "grey0"
"ui.virtual.jump-label" = "grey2"
"ui.virtual.ruler" = { bg = "bg1" }
"ui.window" = { fg = "bg3" }
[palette]
fg0 = "#d4be98"
fg1 = "#ddc7a1"
bg0 = "#282828"
bg1 = "#32302f"
bg2 = "#45403d"
bg3 = "#5a524c"
bg4 = "#504945"
grey0 = "#7c6f64"
grey1 = "#928374"
grey2 = "#a89984"
aqua = "#89b482"
blue = "#7daea3"
green = "#a9b665"
orange = "#e78a4e"
purple = "#d3869b"
red = "#ea6962"
yellow = "#d8a657"

View File

@ -0,0 +1,14 @@
# Gruvbox Material Dark Soft for Helix
# Original Author: @sainnhe (https://github.com/sainnhe/gruvbox-material)
# Base theme ported by: @satoqz
# Palette ported by: @ivan-shymkiv
# License: MIT
inherits = "gruvbox_material_dark_medium"
[palette]
bg0 = "#32302f"
bg1 = "#3c3836"
bg2 = "#504945"
bg3 = "#665c54"
bg4 = "#5b534d"

View File

@ -0,0 +1,14 @@
# Gruvbox Material Light Hard for Helix
# Original Author: @sainnhe (https://github.com/sainnhe/gruvbox-material)
# Base theme ported by: @satoqz
# Palette ported by: @ivan-shymkiv
# License: MIT
inherits = "gruvbox_material_light_medium"
[palette]
bg0 = "#f9f5d7"
bg1 = "#f5edca"
bg2 = "#f2e5bc"
bg3 = "#ebdbb2"
bg4 = "#eee0b7"

View File

@ -0,0 +1,129 @@
# Gruvbox Material Light Medium for Helix
# Original Author: @sainnhe (https://github.com/sainnhe/gruvbox-material)
# Base theme ported by: @satoqz
# Palette ported by: @ivan-shymkiv
# License: MIT
"attribute" = "green"
"comment" = { fg = "grey1", modifiers = ["italic"] }
"constant" = "fg0"
"constant.builtin" = "purple"
"constant.character.escape" = "green"
"constant.numeric" = "purple"
"constructor" = "green"
"function" = "green"
"keyword" = "red"
"keyword.directive" = "purple"
"keyword.operator" = "orange"
"label" = "red"
"namespace" = "yellow"
"operator" = "orange"
"punctuation" = "grey1"
"punctuation.bracket" = "fg0"
"punctuation.delimiter" = "grey1"
"punctuation.special" = "blue"
"special" = "green"
"string" = "aqua"
"string.regexp" = "green"
"string.special.path" = "yellow"
"string.special.symbol" = "fg0"
"string.special.url" = { fg = "fg0", modifiers = ["underlined"] }
"tag" = "orange"
"type" = "yellow"
"type.enum.variant" = "purple"
"variable" = "fg0"
"variable.builtin" = "purple"
"variable.other.member" = "blue"
"variable.parameter" = "fg0"
"markup.heading.1" = "red"
"markup.heading.2" = "orange"
"markup.heading.3" = "yellow"
"markup.heading.4" = "green"
"markup.heading.5" = "blue"
"markup.heading.6" = "purple"
"markup.bold" = { fg = "fg0", modifiers = ["bold"] }
"markup.italic" = { fg = "fg0", modifiers = ["italic"] }
"markup.strikethrough" = { fg = "fg0", modifiers = ["crossed_out"] }
"markup.link.label" = "blue"
"markup.link.text" = "yellow"
"markup.link.url" = { fg = "blue", modifiers = ["underlined"] }
"markup.list" = "blue"
"markup.list.checked" = "green"
"markup.list.unchecked" = "grey1"
"markup.quote" = "grey1"
"markup.raw" = "green"
"diff.delta" = "blue"
"diff.minus" = "red"
"diff.plus" = "green"
"diagnostic.error" = { underline = { color = "red", style = "curl" } }
"diagnostic.hint" = { underline = { color = "green", style = "curl" } }
"diagnostic.info" = { underline = { color = "blue", style = "curl" } }
"diagnostic.unnecessary" = { modifiers = ["dim"] }
"diagnostic.warning" = { underline = { color = "yellow", style = "curl" } }
error = "red"
hint = "green"
info = "blue"
warning = "yellow"
"ui.background" = { fg = "fg0", bg = "bg0" }
"ui.bufferline" = { fg = "fg1", bg = "bg4" }
"ui.bufferline.active" = { fg = "bg0", bg = "grey2" }
"ui.bufferline.background" = { bg = "bg1" }
"ui.cursor" = { fg = "bg0", bg = "grey1" }
"ui.cursor.primary" = { fg = "bg0", bg = "fg0" }
"ui.cursor.match" = { bg = "bg2" }
"ui.cursorline.primary" = { bg = "bg1" }
"ui.help" = { fg = "grey1", bg = "bg0" }
"ui.highlight" = { bg = "bg2" }
"ui.linenr" = "bg3"
"ui.linenr.selected" = "grey1"
"ui.menu" = { fg = "fg1", bg = "bg2" }
"ui.menu.scroll" = { fg = "grey0", bg = "bg1" }
"ui.menu.selected" = { fg = "bg2", bg = "grey2" }
"ui.popup" = { fg = "fg1", bg = "bg2" }
"ui.popup.info" = { "fg" = "grey1", bg = "bg0" }
"ui.selection" = { bg = "bg2" }
"ui.statusline" = { fg = "fg1", bg = "bg1" }
"ui.statusline.inactive" = { fg = "grey1", bg = "bg1" }
"ui.statusline.insert" = { fg = "bg0", bg = "green" }
"ui.statusline.normal" = { fg = "bg0", bg = "grey2" }
"ui.statusline.select" = { fg = "bg0", bg = "red" }
"ui.text" = "fg0"
"ui.text.directory" = { fg = "blue" }
"ui.text.focus" = { bg = "bg2" }
"ui.text.inactive" = { fg = "grey1" }
"ui.text.info" = "grey1"
"ui.virtual" = "grey0"
"ui.virtual.indent-guide" = "bg3"
"ui.virtual.inlay-hint" = "grey0"
"ui.virtual.jump-label" = "grey2"
"ui.virtual.ruler" = { bg = "bg1" }
"ui.window" = { fg = "bg3" }
[palette]
fg0 = "#654735"
fg1 = "#4f3829"
bg0 = "#fbf1c7"
bg1 = "#f4e8be"
bg2 = "#eee0b7"
bg3 = "#ddccab"
bg4 = "#e5d5ad"
grey0 = "#a89984"
grey1 = "#928374"
grey2 = "#7c6f64"
aqua = "#4c7a5d"
blue = "#45707a"
green = "#6c782e"
orange = "#c35e0a"
purple = "#945e80"
red = "#c14a4a"
yellow = "#b47109"

View File

@ -0,0 +1,14 @@
# Gruvbox Material Light Soft for Helix
# Original Author: @sainnhe (https://github.com/sainnhe/gruvbox-material)
# Base theme ported by: @satoqz
# Palette ported by: @ivan-shymkiv
# License: MIT
inherits = "gruvbox_material_light_medium"
[palette]
bg0 = "#f2e5bc"
bg1 = "#eddeb5"
bg2 = "#e6d5ae"
bg3 = "#d5c4a1"
bg4 = "#dac9a5"

View File

@ -36,6 +36,7 @@ pub mod tasks {
let grammar = syntax_config.grammar;
LanguageData::compile_indent_query(grammar, config)?;
LanguageData::compile_textobject_query(grammar, config)?;
LanguageData::compile_tag_query(grammar, config)?;
}
println!("Query check succeeded");