Compare commits

...

10 Commits

Author SHA1 Message Date
SofusA aa4111016d
Merge 43cbbcd586 into fed3edcab7 2025-06-16 12:13:58 -06:00
Michael Davis fed3edcab7
queries: Fix highlighting of '#' in CSS color hex codes
This was previously highlighted as `punctuation` because the capture
applied to the whole `(color_value)` node rather than the `"#"` child
node specifically.
2025-06-16 13:43:41 -04:00
Michael Davis 4099465632
stdx: Add an iterator over grapheme indices in a rope slice 2025-06-16 13:12:54 -04:00
Michael Davis 9100bce9aa
stdx: Unify RopeSlice grapheme cluster iterators
This style for RopeGraphemes is identical to Ropey's Chars and Bytes
iterators. Being able to move the iterator types like cursors over the
bytes/chars/graphemes is useful in some cases. For example see
`helix_core::movement::<Chars as CharHelpers>::range_to_target`.

This change also adds `RopeSliceExt::graphemes_at` for flexibility.
`graphemes` and `graphemes_rev` are now implemented in terms of
`graphemes_at` and `RopeGraphemes::reversed`.
2025-06-16 13:12:13 -04:00
Michael Davis f5dc8245ea
stdx: Add `RopeSliceExt::(nth_){next,prev}_grapheme_boundary`
These functions mirror those in `helix_core::graphemes` but operate
directly on byte indices rather than character indices. These are meant
to be used as we transition to Ropey v2 and always use byte indices.
2025-06-16 13:10:30 -04:00
Sofus Addington 43cbbcd586
Add separate event for pulling visible documents diagnostics 2025-06-11 09:06:24 +02:00
Sofus Addington ec10b7e741
Cancel ongoing pull diagnostic events 2025-06-11 09:06:23 +02:00
Sofus Addington 53738e5392
Remove related documents changes 2025-06-11 09:06:23 +02:00
Sofus Addington 86b9ef9367
Update document by path 2025-06-11 09:06:23 +02:00
Sofus Addington 00a06a4024
Pull diagnostics 2025-06-11 09:06:22 +02:00
11 changed files with 762 additions and 111 deletions

View File

@ -265,6 +265,7 @@ pub enum LanguageServerFeature {
WorkspaceSymbols,
// Symbols, use bitflags, see above?
Diagnostics,
PullDiagnostics,
RenameSymbol,
InlayHints,
DocumentColors,
@ -289,6 +290,7 @@ impl Display for LanguageServerFeature {
DocumentSymbols => "document-symbols",
WorkspaceSymbols => "workspace-symbols",
Diagnostics => "diagnostics",
PullDiagnostics => "pull-diagnostics",
RenameSymbol => "rename-symbol",
InlayHints => "inlay-hints",
DocumentColors => "document-colors",

View File

@ -62,6 +62,7 @@ pub struct Client {
initialize_notify: Arc<Notify>,
/// workspace folders added while the server is still initializing
req_timeout: u64,
ongoing_work: Mutex<Vec<(lsp::TextDocumentIdentifier, lsp::ProgressToken)>>,
}
impl Client {
@ -258,6 +259,7 @@ impl Client {
root_uri,
workspace_folders: Mutex::new(workspace_folders),
initialize_notify: initialize_notify.clone(),
ongoing_work: Mutex::new(Default::default()),
};
Ok((client, server_rx, initialize_notify))
@ -372,6 +374,7 @@ impl Client {
Some(OneOf::Left(true) | OneOf::Right(_))
),
LanguageServerFeature::Diagnostics => true, // there's no extra server capability
LanguageServerFeature::PullDiagnostics => capabilities.diagnostic_provider.is_some(),
LanguageServerFeature::RenameSymbol => matches!(
capabilities.rename_provider,
Some(OneOf::Left(true)) | Some(OneOf::Right(_))
@ -602,6 +605,9 @@ impl Client {
did_rename: Some(true),
..Default::default()
}),
diagnostic: Some(lsp::DiagnosticWorkspaceClientCapabilities {
refresh_support: Some(true),
}),
..Default::default()
}),
text_document: Some(lsp::TextDocumentClientCapabilities {
@ -679,6 +685,10 @@ impl Client {
}),
..Default::default()
}),
diagnostic: Some(lsp::DiagnosticClientCapabilities {
dynamic_registration: Some(false),
related_document_support: Some(true),
}),
publish_diagnostics: Some(lsp::PublishDiagnosticsClientCapabilities {
version_support: Some(true),
tag_support: Some(lsp::TagSupport {
@ -1229,6 +1239,75 @@ impl Client {
Some(self.call::<lsp::request::RangeFormatting>(params))
}
pub fn mark_work_as_done(&self, id: lsp::ProgressToken) {
self.ongoing_work.lock().retain(|x| x.1 != id);
}
fn cancel_ongoing_work(&self, id: lsp::ProgressToken) {
self.notify::<lsp::notification::Cancel>(lsp::CancelParams { id: id.clone() });
self.mark_work_as_done(id);
}
pub fn text_document_diagnostic(
&self,
text_document: lsp::TextDocumentIdentifier,
previous_result_id: Option<String>,
) -> Option<(
impl Future<Output = Result<lsp::DocumentDiagnosticReportResult>>,
lsp::ProgressToken,
)> {
let capabilities = self.capabilities();
let ongoing_work = {
let ongoing_work_lock = self.ongoing_work.lock();
ongoing_work_lock
.iter()
.filter(|x| x.0 == text_document)
.cloned()
.collect::<Vec<_>>()
};
if !ongoing_work.is_empty() {
for id in ongoing_work.into_iter().map(|x| x.1) {
self.cancel_ongoing_work(id.clone());
}
}
// Return early if the server does not support pull diagnostic.
let identifier = match capabilities.diagnostic_provider.as_ref()? {
lsp::DiagnosticServerCapabilities::Options(cap) => cap.identifier.clone(),
lsp::DiagnosticServerCapabilities::RegistrationOptions(cap) => {
cap.diagnostic_options.identifier.clone()
}
};
let request_id = match self.next_request_id() {
jsonrpc::Id::Null => lsp::ProgressToken::Number(1),
jsonrpc::Id::Num(num) => lsp::ProgressToken::Number(num as i32),
jsonrpc::Id::Str(str) => lsp::ProgressToken::String(str),
};
let params = lsp::DocumentDiagnosticParams {
text_document: text_document.clone(),
identifier,
previous_result_id,
work_done_progress_params: lsp::WorkDoneProgressParams {
work_done_token: Some(request_id.clone()),
},
partial_result_params: lsp::PartialResultParams::default(),
};
self.ongoing_work
.lock()
.push((text_document, request_id.clone()));
Some((
self.call::<lsp::request::DocumentDiagnosticRequest>(params),
request_id,
))
}
pub fn text_document_document_highlight(
&self,
text_document: lsp::TextDocumentIdentifier,

View File

@ -463,6 +463,7 @@ pub enum MethodCall {
RegisterCapability(lsp::RegistrationParams),
UnregisterCapability(lsp::UnregistrationParams),
ShowDocument(lsp::ShowDocumentParams),
WorkspaceDiagnosticRefresh,
}
impl MethodCall {
@ -494,6 +495,7 @@ impl MethodCall {
let params: lsp::ShowDocumentParams = params.parse()?;
Self::ShowDocument(params)
}
lsp::request::WorkspaceDiagnosticRefresh::METHOD => Self::WorkspaceDiagnosticRefresh,
_ => {
return Err(Error::Unhandled);
}

View File

@ -135,7 +135,9 @@ pub trait RopeSliceExt<'a>: Sized {
/// let graphemes: Vec<_> = text.graphemes().collect();
/// assert_eq!(graphemes.as_slice(), &["😶‍🌫️", "🏴‍☠️", "🖼️"]);
/// ```
fn graphemes(self) -> RopeGraphemes<'a>;
fn graphemes(self) -> RopeGraphemes<'a> {
self.graphemes_at(0)
}
/// Returns an iterator over the grapheme clusters in the slice, reversed.
///
/// The returned iterator starts at the end of the slice and ends at the beginning of the
@ -150,7 +152,127 @@ pub trait RopeSliceExt<'a>: Sized {
/// let graphemes: Vec<_> = text.graphemes_rev().collect();
/// assert_eq!(graphemes.as_slice(), &["🖼️", "🏴‍☠️", "😶‍🌫️"]);
/// ```
fn graphemes_rev(self) -> RevRopeGraphemes<'a>;
fn graphemes_rev(self) -> RopeGraphemes<'a>;
/// Returns an iterator over the grapheme clusters in the slice at the given byte index.
///
/// # Example
///
/// ```
/// # use ropey::Rope;
/// # use helix_stdx::rope::RopeSliceExt;
/// let text = Rope::from_str("😶‍🌫️🏴‍☠️🖼️");
/// // 14 is the byte index of the pirate flag's starting cluster boundary.
/// let graphemes: Vec<_> = text.slice(..).graphemes_at(14).collect();
/// assert_eq!(graphemes.as_slice(), &["🏴‍☠️", "🖼️"]);
/// // 27 is the byte index of the pirate flag's ending cluster boundary.
/// let graphemes: Vec<_> = text.slice(..).graphemes_at(27).reversed().collect();
/// assert_eq!(graphemes.as_slice(), &["🏴‍☠️", "😶‍🌫️"]);
/// ```
fn graphemes_at(self, byte_idx: usize) -> RopeGraphemes<'a>;
/// Returns an iterator over the grapheme clusters in a rope and the byte index where each
/// grapheme cluster starts.
///
/// # Example
///
/// ```
/// # use ropey::Rope;
/// # use helix_stdx::rope::RopeSliceExt;
/// let text = Rope::from_str("😶‍🌫️🏴‍☠️🖼️");
/// let slice = text.slice(..);
/// let graphemes: Vec<_> = slice.grapheme_indices_at(0).collect();
/// assert_eq!(
/// graphemes.as_slice(),
/// &[(0, "😶‍🌫️".into()), (14, "🏴‍☠️".into()), (27, "🖼️".into())]
/// );
/// let graphemes: Vec<_> = slice.grapheme_indices_at(slice.len_bytes()).reversed().collect();
/// assert_eq!(
/// graphemes.as_slice(),
/// &[(27, "🖼️".into()), (14, "🏴‍☠️".into()), (0, "😶‍🌫️".into())]
/// );
/// ```
fn grapheme_indices_at(self, byte_idx: usize) -> RopeGraphemeIndices<'a>;
/// Finds the byte index of the next grapheme boundary after `byte_idx`.
///
/// If the byte index lies on the last grapheme cluster in the slice then this function
/// returns `RopeSlice::len_bytes`.
///
/// # Example
///
/// ```
/// # use ropey::Rope;
/// # use helix_stdx::rope::RopeSliceExt;
/// let text = Rope::from_str("😶‍🌫️🏴‍☠️🖼️");
/// let slice = text.slice(..);
/// let mut byte_idx = 0;
/// assert_eq!(slice.graphemes_at(byte_idx).next(), Some("😶‍🌫️".into()));
/// byte_idx = slice.next_grapheme_boundary(byte_idx);
/// assert_eq!(slice.graphemes_at(byte_idx).next(), Some("🏴‍☠️".into()));
///
/// // If `byte_idx` does not lie on a character or grapheme boundary then this function is
/// // functionally the same as `ceil_grapheme_boundary`.
/// assert_eq!(slice.next_grapheme_boundary(byte_idx - 1), byte_idx);
/// assert_eq!(slice.next_grapheme_boundary(byte_idx - 2), byte_idx);
/// assert_eq!(slice.next_grapheme_boundary(byte_idx + 1), slice.next_grapheme_boundary(byte_idx));
/// assert_eq!(slice.next_grapheme_boundary(byte_idx + 2), slice.next_grapheme_boundary(byte_idx));
///
/// byte_idx = slice.next_grapheme_boundary(byte_idx);
/// assert_eq!(slice.graphemes_at(byte_idx).next(), Some("🖼️".into()));
/// byte_idx = slice.next_grapheme_boundary(byte_idx);
/// assert_eq!(slice.graphemes_at(byte_idx).next(), None);
/// assert_eq!(byte_idx, slice.len_bytes());
/// ```
fn next_grapheme_boundary(self, byte_idx: usize) -> usize {
self.nth_next_grapheme_boundary(byte_idx, 1)
}
/// Finds the byte index of the `n`th grapheme cluster after the given `byte_idx`.
///
/// If there are fewer than `n` grapheme clusters after `byte_idx` in the rope then this
/// function returns `RopeSlice::len_bytes`.
///
/// This is functionally equivalent to calling `next_grapheme_boundary` `n` times but is more
/// efficient.
fn nth_next_grapheme_boundary(self, byte_idx: usize, n: usize) -> usize;
/// Finds the byte index of the previous grapheme boundary before `byte_idx`.
///
/// If the byte index lies on the first grapheme cluster in the slice then this function
/// returns zero.
///
/// # Example
///
/// ```
/// # use ropey::Rope;
/// # use helix_stdx::rope::RopeSliceExt;
/// let text = Rope::from_str("😶‍🌫️🏴‍☠️🖼️");
/// let slice = text.slice(..);
/// let mut byte_idx = text.len_bytes();
/// assert_eq!(slice.graphemes_at(byte_idx).prev(), Some("🖼️".into()));
/// byte_idx = slice.prev_grapheme_boundary(byte_idx);
/// assert_eq!(slice.graphemes_at(byte_idx).prev(), Some("🏴‍☠️".into()));
///
/// // If `byte_idx` does not lie on a character or grapheme boundary then this function is
/// // functionally the same as `floor_grapheme_boundary`.
/// assert_eq!(slice.prev_grapheme_boundary(byte_idx + 1), byte_idx);
/// assert_eq!(slice.prev_grapheme_boundary(byte_idx + 2), byte_idx);
/// assert_eq!(slice.prev_grapheme_boundary(byte_idx - 1), slice.prev_grapheme_boundary(byte_idx));
/// assert_eq!(slice.prev_grapheme_boundary(byte_idx - 2), slice.prev_grapheme_boundary(byte_idx));
///
/// byte_idx = slice.prev_grapheme_boundary(byte_idx);
/// assert_eq!(slice.graphemes_at(byte_idx).prev(), Some("😶‍🌫️".into()));
/// byte_idx = slice.prev_grapheme_boundary(byte_idx);
/// assert_eq!(slice.graphemes_at(byte_idx).prev(), None);
/// assert_eq!(byte_idx, 0);
/// ```
fn prev_grapheme_boundary(self, byte_idx: usize) -> usize {
self.nth_prev_grapheme_boundary(byte_idx, 1)
}
/// Finds the byte index of the `n`th grapheme cluster before the given `byte_idx`.
///
/// If there are fewer than `n` grapheme clusters before `byte_idx` in the rope then this
/// function returns zero.
///
/// This is functionally equivalent to calling `prev_grapheme_boundary` `n` times but is more
/// efficient.
fn nth_prev_grapheme_boundary(self, byte_idx: usize, n: usize) -> usize;
}
impl<'a> RopeSliceExt<'a> for RopeSlice<'a> {
@ -335,31 +457,111 @@ impl<'a> RopeSliceExt<'a> for RopeSlice<'a> {
}
}
fn graphemes(self) -> RopeGraphemes<'a> {
let mut chunks = self.chunks();
let first_chunk = chunks.next().unwrap_or("");
fn graphemes_rev(self) -> RopeGraphemes<'a> {
self.graphemes_at(self.len_bytes()).reversed()
}
fn graphemes_at(self, byte_idx: usize) -> RopeGraphemes<'a> {
// Bounds check
assert!(byte_idx <= self.len_bytes());
let (mut chunks, chunk_byte_idx, _, _) = self.chunks_at_byte(byte_idx);
let current_chunk = chunks.next().unwrap_or("");
RopeGraphemes {
text: self,
chunks,
cur_chunk: first_chunk,
cur_chunk_start: 0,
cursor: GraphemeCursor::new(0, self.len_bytes(), true),
current_chunk,
chunk_byte_idx,
cursor: GraphemeCursor::new(byte_idx, self.len_bytes(), true),
is_reversed: false,
}
}
fn graphemes_rev(self) -> RevRopeGraphemes<'a> {
let (mut chunks, mut cur_chunk_start, _, _) = self.chunks_at_byte(self.len_bytes());
chunks.reverse();
let first_chunk = chunks.next().unwrap_or("");
cur_chunk_start -= first_chunk.len();
RevRopeGraphemes {
text: self,
chunks,
cur_chunk: first_chunk,
cur_chunk_start,
cursor: GraphemeCursor::new(self.len_bytes(), self.len_bytes(), true),
fn grapheme_indices_at(self, byte_idx: usize) -> RopeGraphemeIndices<'a> {
// Bounds check
assert!(byte_idx <= self.len_bytes());
RopeGraphemeIndices {
front_offset: byte_idx,
iter: self.graphemes_at(byte_idx),
is_reversed: false,
}
}
fn nth_next_grapheme_boundary(self, mut byte_idx: usize, n: usize) -> usize {
// Bounds check
assert!(byte_idx <= self.len_bytes());
byte_idx = self.floor_char_boundary(byte_idx);
// Get the chunk with our byte index in it.
let (mut chunk, mut chunk_byte_idx, _, _) = self.chunk_at_byte(byte_idx);
// Set up the grapheme cursor.
let mut gc = GraphemeCursor::new(byte_idx, self.len_bytes(), true);
// Find the nth next grapheme cluster boundary.
for _ in 0..n {
loop {
match gc.next_boundary(chunk, chunk_byte_idx) {
Ok(None) => return self.len_bytes(),
Ok(Some(boundary)) => {
byte_idx = boundary;
break;
}
Err(GraphemeIncomplete::NextChunk) => {
chunk_byte_idx += chunk.len();
let (a, _, _, _) = self.chunk_at_byte(chunk_byte_idx);
chunk = a;
}
Err(GraphemeIncomplete::PreContext(n)) => {
let ctx_chunk = self.chunk_at_byte(n - 1).0;
gc.provide_context(ctx_chunk, n - ctx_chunk.len());
}
_ => unreachable!(),
}
}
}
byte_idx
}
fn nth_prev_grapheme_boundary(self, mut byte_idx: usize, n: usize) -> usize {
// Bounds check
assert!(byte_idx <= self.len_bytes());
byte_idx = self.ceil_char_boundary(byte_idx);
// Get the chunk with our byte index in it.
let (mut chunk, mut chunk_byte_idx, _, _) = self.chunk_at_byte(byte_idx);
// Set up the grapheme cursor.
let mut gc = GraphemeCursor::new(byte_idx, self.len_bytes(), true);
for _ in 0..n {
loop {
match gc.prev_boundary(chunk, chunk_byte_idx) {
Ok(None) => return 0,
Ok(Some(boundary)) => {
byte_idx = boundary;
break;
}
Err(GraphemeIncomplete::PrevChunk) => {
let (a, b, _, _) = self.chunk_at_byte(chunk_byte_idx - 1);
chunk = a;
chunk_byte_idx = b;
}
Err(GraphemeIncomplete::PreContext(n)) => {
let ctx_chunk = self.chunk_at_byte(n - 1).0;
gc.provide_context(ctx_chunk, n - ctx_chunk.len());
}
_ => unreachable!(),
}
}
}
byte_idx
}
}
// copied from std
@ -370,13 +572,19 @@ const fn is_utf8_char_boundary(b: u8) -> bool {
}
/// An iterator over the graphemes of a `RopeSlice`.
///
/// This iterator is cursor-like: rather than implementing DoubleEndedIterator it can be reversed
/// like a cursor. This style matches `Bytes` and `Chars` iterator types in Ropey and is more
/// natural and useful for wrapping `GraphemeCursor`.
#[derive(Clone)]
pub struct RopeGraphemes<'a> {
text: RopeSlice<'a>,
chunks: Chunks<'a>,
cur_chunk: &'a str,
cur_chunk_start: usize,
current_chunk: &'a str,
/// Byte index of the start of the current chunk.
chunk_byte_idx: usize,
cursor: GraphemeCursor,
is_reversed: bool,
}
impl fmt::Debug for RopeGraphemes<'_> {
@ -384,112 +592,178 @@ impl fmt::Debug for RopeGraphemes<'_> {
f.debug_struct("RopeGraphemes")
.field("text", &self.text)
.field("chunks", &self.chunks)
.field("cur_chunk", &self.cur_chunk)
.field("cur_chunk_start", &self.cur_chunk_start)
.field("current_chunk", &self.current_chunk)
.field("chunk_byte_idx", &self.chunk_byte_idx)
// .field("cursor", &self.cursor)
.field("is_reversed", &self.is_reversed)
.finish()
}
}
impl<'a> RopeGraphemes<'a> {
#[allow(clippy::should_implement_trait)]
pub fn next(&mut self) -> Option<RopeSlice<'a>> {
if self.is_reversed {
self.prev_impl()
} else {
self.next_impl()
}
}
pub fn prev(&mut self) -> Option<RopeSlice<'a>> {
if self.is_reversed {
self.next_impl()
} else {
self.prev_impl()
}
}
pub fn reverse(&mut self) {
self.is_reversed = !self.is_reversed;
}
#[must_use]
pub fn reversed(mut self) -> Self {
self.reverse();
self
}
fn next_impl(&mut self) -> Option<RopeSlice<'a>> {
let a = self.cursor.cur_cursor();
let b;
loop {
match self
.cursor
.next_boundary(self.current_chunk, self.chunk_byte_idx)
{
Ok(None) => return None,
Ok(Some(boundary)) => {
b = boundary;
break;
}
Err(GraphemeIncomplete::NextChunk) => {
self.chunk_byte_idx += self.current_chunk.len();
self.current_chunk = self.chunks.next().unwrap_or("");
}
Err(GraphemeIncomplete::PreContext(idx)) => {
let (chunk, byte_idx, _, _) = self.text.chunk_at_byte(idx.saturating_sub(1));
self.cursor.provide_context(chunk, byte_idx);
}
_ => unreachable!(),
}
}
if a < self.chunk_byte_idx {
Some(self.text.byte_slice(a..b))
} else {
let a2 = a - self.chunk_byte_idx;
let b2 = b - self.chunk_byte_idx;
Some((&self.current_chunk[a2..b2]).into())
}
}
fn prev_impl(&mut self) -> Option<RopeSlice<'a>> {
let a = self.cursor.cur_cursor();
let b;
loop {
match self
.cursor
.prev_boundary(self.current_chunk, self.chunk_byte_idx)
{
Ok(None) => return None,
Ok(Some(boundary)) => {
b = boundary;
break;
}
Err(GraphemeIncomplete::PrevChunk) => {
self.current_chunk = self.chunks.prev().unwrap_or("");
self.chunk_byte_idx -= self.current_chunk.len();
}
Err(GraphemeIncomplete::PreContext(idx)) => {
let (chunk, byte_idx, _, _) = self.text.chunk_at_byte(idx.saturating_sub(1));
self.cursor.provide_context(chunk, byte_idx);
}
_ => unreachable!(),
}
}
if a >= self.chunk_byte_idx + self.current_chunk.len() {
Some(self.text.byte_slice(b..a))
} else {
let a2 = a - self.chunk_byte_idx;
let b2 = b - self.chunk_byte_idx;
Some((&self.current_chunk[b2..a2]).into())
}
}
}
impl<'a> Iterator for RopeGraphemes<'a> {
type Item = RopeSlice<'a>;
fn next(&mut self) -> Option<Self::Item> {
let a = self.cursor.cur_cursor();
let b;
loop {
match self
.cursor
.next_boundary(self.cur_chunk, self.cur_chunk_start)
{
Ok(None) => {
return None;
}
Ok(Some(n)) => {
b = n;
break;
}
Err(GraphemeIncomplete::NextChunk) => {
self.cur_chunk_start += self.cur_chunk.len();
self.cur_chunk = self.chunks.next().unwrap_or("");
}
Err(GraphemeIncomplete::PreContext(idx)) => {
let (chunk, byte_idx, _, _) = self.text.chunk_at_byte(idx.saturating_sub(1));
self.cursor.provide_context(chunk, byte_idx);
}
_ => unreachable!(),
}
}
RopeGraphemes::next(self)
}
}
if a < self.cur_chunk_start {
Some(self.text.byte_slice(a..b))
/// An iterator over the grapheme clusters in a rope and the byte indices where each grapheme
/// cluster starts.
///
/// This iterator wraps `RopeGraphemes` and is also cursor-like. Use `reverse` or `reversed` to
/// toggle the direction of the iterator. See [RopeGraphemes].
#[derive(Debug, Clone)]
pub struct RopeGraphemeIndices<'a> {
front_offset: usize,
iter: RopeGraphemes<'a>,
is_reversed: bool,
}
impl<'a> RopeGraphemeIndices<'a> {
#[allow(clippy::should_implement_trait)]
pub fn next(&mut self) -> Option<(usize, RopeSlice<'a>)> {
if self.is_reversed {
self.prev_impl()
} else {
let a2 = a - self.cur_chunk_start;
let b2 = b - self.cur_chunk_start;
Some((&self.cur_chunk[a2..b2]).into())
self.next_impl()
}
}
}
/// An iterator over the graphemes of a `RopeSlice` in reverse.
#[derive(Clone)]
pub struct RevRopeGraphemes<'a> {
text: RopeSlice<'a>,
chunks: Chunks<'a>,
cur_chunk: &'a str,
cur_chunk_start: usize,
cursor: GraphemeCursor,
}
pub fn prev(&mut self) -> Option<(usize, RopeSlice<'a>)> {
if self.is_reversed {
self.next_impl()
} else {
self.prev_impl()
}
}
impl fmt::Debug for RevRopeGraphemes<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RevRopeGraphemes")
.field("text", &self.text)
.field("chunks", &self.chunks)
.field("cur_chunk", &self.cur_chunk)
.field("cur_chunk_start", &self.cur_chunk_start)
// .field("cursor", &self.cursor)
.finish()
pub fn reverse(&mut self) {
self.is_reversed = !self.is_reversed;
}
#[must_use]
pub fn reversed(mut self) -> Self {
self.reverse();
self
}
fn next_impl(&mut self) -> Option<(usize, RopeSlice<'a>)> {
let slice = self.iter.next()?;
let idx = self.front_offset;
self.front_offset += slice.len_bytes();
Some((idx, slice))
}
fn prev_impl(&mut self) -> Option<(usize, RopeSlice<'a>)> {
let slice = self.iter.prev()?;
self.front_offset -= slice.len_bytes();
Some((self.front_offset, slice))
}
}
impl<'a> Iterator for RevRopeGraphemes<'a> {
type Item = RopeSlice<'a>;
impl<'a> Iterator for RopeGraphemeIndices<'a> {
type Item = (usize, RopeSlice<'a>);
fn next(&mut self) -> Option<Self::Item> {
let a = self.cursor.cur_cursor();
let b;
loop {
match self
.cursor
.prev_boundary(self.cur_chunk, self.cur_chunk_start)
{
Ok(None) => {
return None;
}
Ok(Some(n)) => {
b = n;
break;
}
Err(GraphemeIncomplete::PrevChunk) => {
self.cur_chunk = self.chunks.next().unwrap_or("");
self.cur_chunk_start -= self.cur_chunk.len();
}
Err(GraphemeIncomplete::PreContext(idx)) => {
let (chunk, byte_idx, _, _) = self.text.chunk_at_byte(idx.saturating_sub(1));
self.cursor.provide_context(chunk, byte_idx);
}
_ => unreachable!(),
}
}
if a >= self.cur_chunk_start + self.cur_chunk.len() {
Some(self.text.byte_slice(b..a))
} else {
let a2 = a - self.cur_chunk_start;
let b2 = b - self.cur_chunk_start;
Some((&self.cur_chunk[b2..a2]).into())
}
RopeGraphemeIndices::next(self)
}
}

View File

@ -1009,6 +1009,21 @@ impl Application {
let result = self.handle_show_document(params, offset_encoding);
Ok(json!(result))
}
Ok(MethodCall::WorkspaceDiagnosticRefresh) => {
for document in self.editor.documents() {
let language_server = language_server!();
if language_server.supports_feature(
syntax::config::LanguageServerFeature::PullDiagnostics,
) && document.supports_language_server(language_server.id())
{
handlers::diagnostics::pull_diagnostics_for_document(
document,
language_server,
);
}
}
Ok(serde_json::Value::Null)
}
};
let language_server = language_server!();

View File

@ -1,11 +1,13 @@
use std::sync::Arc;
use arc_swap::ArcSwap;
use diagnostics::PullAllDocumentsDiagnosticHandler;
use helix_event::AsyncHook;
use crate::config::Config;
use crate::events;
use crate::handlers::auto_save::AutoSaveHandler;
use crate::handlers::diagnostics::PullDiagnosticsHandler;
use crate::handlers::signature_help::SignatureHelpHandler;
pub use helix_view::handlers::Handlers;
@ -14,7 +16,7 @@ use self::document_colors::DocumentColorsHandler;
mod auto_save;
pub mod completion;
mod diagnostics;
pub mod diagnostics;
mod document_colors;
mod signature_help;
mod snippet;
@ -26,12 +28,16 @@ pub fn setup(config: Arc<ArcSwap<Config>>) -> Handlers {
let signature_hints = SignatureHelpHandler::new().spawn();
let auto_save = AutoSaveHandler::new().spawn();
let document_colors = DocumentColorsHandler::default().spawn();
let pull_diagnostics = PullDiagnosticsHandler::new().spawn();
let pull_all_documents_diagnostics = PullAllDocumentsDiagnosticHandler::new().spawn();
let handlers = Handlers {
completions: helix_view::handlers::completion::CompletionHandler::new(event_tx),
signature_hints,
auto_save,
document_colors,
pull_diagnostics,
pull_all_documents_diagnostics,
};
helix_view::handlers::register_hooks(&handlers);

View File

@ -1,12 +1,25 @@
use std::collections::HashSet;
use std::time::Duration;
use tokio::time::Instant;
use helix_core::diagnostic::DiagnosticProvider;
use helix_core::syntax::config::LanguageServerFeature;
use helix_core::Uri;
use helix_event::{register_hook, send_blocking};
use helix_lsp::lsp;
use helix_view::document::Mode;
use helix_view::events::DiagnosticsDidChange;
use helix_view::events::{
DiagnosticsDidChange, DocumentDidChange, DocumentDidOpen, LanguageServerInitialized,
};
use helix_view::handlers::diagnostics::DiagnosticEvent;
use helix_view::handlers::lsp::{PullAllDocumentsDiagnosticsEvent, PullDiagnosticsEvent};
use helix_view::handlers::Handlers;
use helix_view::{DocumentId, Editor};
use crate::events::OnModeSwitch;
use crate::job;
pub(super) fn register_hooks(_handlers: &Handlers) {
pub(super) fn register_hooks(handlers: &Handlers) {
register_hook!(move |event: &mut DiagnosticsDidChange<'_>| {
if event.editor.mode != Mode::Insert {
for (view, _) in event.editor.tree.views_mut() {
@ -21,4 +34,249 @@ pub(super) fn register_hooks(_handlers: &Handlers) {
}
Ok(())
});
let tx = handlers.pull_diagnostics.clone();
let tx_all_documents = handlers.pull_all_documents_diagnostics.clone();
register_hook!(move |event: &mut DocumentDidChange<'_>| {
if event
.doc
.has_language_server_with_feature(LanguageServerFeature::PullDiagnostics)
{
let document_id = event.doc.id();
send_blocking(&tx, PullDiagnosticsEvent { document_id });
send_blocking(&tx_all_documents, PullAllDocumentsDiagnosticsEvent {});
}
Ok(())
});
register_hook!(move |event: &mut DocumentDidOpen<'_>| {
let doc = doc!(event.editor, &event.doc);
for language_server in
doc.language_servers_with_feature(LanguageServerFeature::PullDiagnostics)
{
pull_diagnostics_for_document(doc, language_server);
}
Ok(())
});
register_hook!(move |event: &mut LanguageServerInitialized<'_>| {
let language_server = event.editor.language_server_by_id(event.server_id).unwrap();
if language_server.supports_feature(LanguageServerFeature::PullDiagnostics) {
for doc in event
.editor
.documents()
.filter(|doc| doc.supports_language_server(event.server_id))
{
pull_diagnostics_for_document(doc, language_server);
}
}
Ok(())
});
}
#[derive(Debug)]
pub(super) struct PullDiagnosticsHandler {
document_ids: HashSet<DocumentId>,
}
impl PullDiagnosticsHandler {
pub fn new() -> Self {
PullDiagnosticsHandler {
document_ids: Default::default(),
}
}
}
impl helix_event::AsyncHook for PullDiagnosticsHandler {
type Event = PullDiagnosticsEvent;
fn handle_event(
&mut self,
event: Self::Event,
_timeout: Option<tokio::time::Instant>,
) -> Option<tokio::time::Instant> {
self.document_ids.insert(event.document_id);
Some(Instant::now() + Duration::from_millis(125))
}
fn finish_debounce(&mut self) {
let document_ids = self.document_ids.clone();
job::dispatch_blocking(move |editor, _| {
for document_id in document_ids {
let document = editor.document(document_id);
let Some(document) = document else {
return;
};
let language_servers = document
.language_servers_with_feature(LanguageServerFeature::PullDiagnostics)
.filter(|ls| ls.is_initialized());
for language_server in language_servers {
pull_diagnostics_for_document(document, language_server);
}
}
})
}
}
#[derive(Debug)]
pub(super) struct PullAllDocumentsDiagnosticHandler {}
impl PullAllDocumentsDiagnosticHandler {
pub fn new() -> Self {
PullAllDocumentsDiagnosticHandler {}
}
}
impl helix_event::AsyncHook for PullAllDocumentsDiagnosticHandler {
type Event = PullAllDocumentsDiagnosticsEvent;
fn handle_event(
&mut self,
_event: Self::Event,
_timeout: Option<tokio::time::Instant>,
) -> Option<tokio::time::Instant> {
Some(Instant::now() + Duration::from_millis(500))
}
fn finish_debounce(&mut self) {
job::dispatch_blocking(move |editor, _| {
for document in editor.documents.values() {
let language_servers = document
.language_servers_with_feature(LanguageServerFeature::PullDiagnostics)
.filter(|ls| ls.is_initialized())
.filter(|ls| {
ls.capabilities().diagnostic_provider.as_ref().is_some_and(
|diagnostic_provider| match diagnostic_provider {
lsp::DiagnosticServerCapabilities::Options(options) => {
options.inter_file_dependencies
}
lsp::DiagnosticServerCapabilities::RegistrationOptions(options) => {
options.diagnostic_options.inter_file_dependencies
}
},
)
});
for language_server in language_servers {
pull_diagnostics_for_document(document, language_server);
}
}
})
}
}
pub fn pull_diagnostics_for_document(
doc: &helix_view::Document,
language_server: &helix_lsp::Client,
) {
let Some(future) = language_server
.text_document_diagnostic(doc.identifier(), doc.previous_diagnostic_id.clone())
else {
return;
};
let Some(uri) = doc.uri() else {
return;
};
let identifier = language_server
.capabilities()
.diagnostic_provider
.as_ref()
.and_then(|diagnostic_provider| match diagnostic_provider {
lsp::DiagnosticServerCapabilities::Options(options) => options.identifier.clone(),
lsp::DiagnosticServerCapabilities::RegistrationOptions(options) => {
options.diagnostic_options.identifier.clone()
}
});
let language_server_id = language_server.id();
let provider = DiagnosticProvider::Lsp {
server_id: language_server_id,
identifier,
};
let document_id = doc.id();
tokio::spawn(async move {
match future.0.await {
Ok(result) => {
job::dispatch(move |editor, _| {
if let Some(language_server) = editor.language_server_by_id(language_server_id)
{
language_server.mark_work_as_done(future.1);
};
handle_pull_diagnostics_response(editor, result, provider, uri, document_id)
})
.await
}
Err(err) => {
let parsed_cancellation_data = if let helix_lsp::Error::Rpc(error) = err {
error.data.and_then(|data| {
serde_json::from_value::<lsp::DiagnosticServerCancellationData>(data).ok()
})
} else {
log::error!("Pull diagnostic request failed: {err}");
return;
};
if let Some(parsed_cancellation_data) = parsed_cancellation_data {
if parsed_cancellation_data.retrigger_request {
tokio::time::sleep(Duration::from_millis(500)).await;
job::dispatch(move |editor, _| {
if let (Some(doc), Some(language_server)) = (
editor.document(document_id),
editor.language_server_by_id(language_server_id),
) {
language_server.mark_work_as_done(future.1);
if doc.supports_language_server(language_server_id) {
pull_diagnostics_for_document(doc, language_server);
}
}
})
.await;
}
}
}
}
});
}
fn handle_pull_diagnostics_response(
editor: &mut Editor,
result: lsp::DocumentDiagnosticReportResult,
provider: DiagnosticProvider,
uri: Uri,
document_id: DocumentId,
) {
match result {
lsp::DocumentDiagnosticReportResult::Report(report) => {
let result_id = match report {
lsp::DocumentDiagnosticReport::Full(report) => {
editor.handle_lsp_diagnostics(
&provider,
uri,
None,
report.full_document_diagnostic_report.items,
);
report.full_document_diagnostic_report.result_id
}
lsp::DocumentDiagnosticReport::Unchanged(report) => {
Some(report.unchanged_document_diagnostic_report.result_id)
}
};
if let Some(doc) = editor.document_mut(document_id) {
doc.previous_diagnostic_id = result_id;
};
}
lsp::DocumentDiagnosticReportResult::Partial(_) => {}
};
}

View File

@ -204,6 +204,8 @@ pub struct Document {
pub readonly: bool,
pub previous_diagnostic_id: Option<String>,
/// Annotations for LSP document color swatches
pub color_swatches: Option<DocumentColorSwatches>,
// NOTE: ideally this would live on the handler for color swatches. This is blocked on a
@ -728,6 +730,7 @@ impl Document {
color_swatches: None,
color_swatch_controller: TaskController::new(),
syn_loader,
previous_diagnostic_id: None,
}
}
@ -2277,6 +2280,10 @@ impl Document {
pub fn reset_all_inlay_hints(&mut self) {
self.inlay_hints = Default::default();
}
pub fn has_language_server_with_feature(&self, feature: LanguageServerFeature) -> bool {
self.language_servers_with_feature(feature).next().is_some()
}
}
#[derive(Debug, Default)]

View File

@ -22,6 +22,8 @@ pub struct Handlers {
pub signature_hints: Sender<lsp::SignatureHelpEvent>,
pub auto_save: Sender<AutoSaveEvent>,
pub document_colors: Sender<lsp::DocumentColorsEvent>,
pub pull_diagnostics: Sender<lsp::PullDiagnosticsEvent>,
pub pull_all_documents_diagnostics: Sender<lsp::PullAllDocumentsDiagnosticsEvent>,
}
impl Handlers {

View File

@ -30,6 +30,12 @@ pub enum SignatureHelpEvent {
RequestComplete { open: bool },
}
pub struct PullDiagnosticsEvent {
pub document_id: DocumentId,
}
pub struct PullAllDocumentsDiagnosticsEvent {}
#[derive(Debug)]
pub struct ApplyEditError {
pub kind: ApplyEditErrorKind,

View File

@ -65,7 +65,7 @@
] @punctuation
(string_value) @string
((color_value) "#") @string.special
(color_value "#" @string.special)
(color_value) @string.special
(integer_value) @constant.numeric.integer