From c72755437a65a79589e32948c57a87cba39533a7 Mon Sep 17 00:00:00 2001 From: Nikita Revenco <154856872+NikitaRevenco@users.noreply.github.com> Date: Mon, 3 Feb 2025 21:05:36 +0000 Subject: [PATCH] feat: continue comment uses injected comment tokens --- helix-core/src/comment.rs | 83 +++++++++++++++++++++++--------------- helix-term/src/commands.rs | 42 ++++++++++--------- 2 files changed, 72 insertions(+), 53 deletions(-) diff --git a/helix-core/src/comment.rs b/helix-core/src/comment.rs index 3e809bc53..549110a32 100644 --- a/helix-core/src/comment.rs +++ b/helix-core/src/comment.rs @@ -11,19 +11,56 @@ use std::borrow::Cow; pub const DEFAULT_COMMENT_TOKEN: &str = "#"; /// Returns the longest matching comment token of the given line (if it exists). -pub fn get_comment_token<'a, S: AsRef>( +pub fn get_comment_token( + syntax: Option<&Syntax>, text: RopeSlice, - tokens: &'a [S], + doc_default_tokens: Option<&Vec>, line_num: usize, -) -> Option<&'a str> { +) -> Option { let line = text.line(line_num); let start = line.first_non_whitespace_char()?; + let start_char = text.line_to_char(line_num) + start; - tokens - .iter() - .map(AsRef::as_ref) - .filter(|token| line.slice(start..).starts_with(token)) - .max_by_key(|token| token.len()) + let injected_tokens = get_injected_tokens(syntax, start_char, start_char) + // we don't care about block comment tokens + .0 + .and_then(|tokens| { + tokens + .into_iter() + .filter(|token| line.slice(start..).starts_with(token)) + .max_by_key(|token| token.len()) + }); + + injected_tokens.or( + // no comment tokens found for injection. Use doc tokens instead + doc_default_tokens.and_then(|tokens| { + tokens + .iter() + .filter(|token| line.slice(start..).starts_with(token)) + .max_by_key(|token| token.len()) + .cloned() + }), + ) +} + +pub fn get_injected_tokens( + syntax: Option<&Syntax>, + start: usize, + end: usize, +) -> (Option>, Option>) { + // Find the injection with the most tightly encompassing range. + syntax + .and_then(|syntax| { + injection_for_range(syntax, start, end) + .map(|language_id| syntax.layer_config(language_id)) + .map(|config| { + ( + config.comment_tokens.clone(), + config.block_comment_tokens.clone(), + ) + }) + }) + .unwrap_or_default() } /// For a given range in the document, get the most tightly encompassing @@ -58,26 +95,6 @@ pub fn injection_for_range(syntax: &Syntax, from: usize, to: usize) -> Option, - start: usize, - end: usize, -) -> (Option>, Option>) { - // Find the injection with the most tightly encompassing range. - syntax - .and_then(|syntax| { - injection_for_range(syntax, start, end) - .map(|language_id| syntax.layer_config(language_id)) - .map(|config| { - ( - config.comment_tokens.clone(), - config.block_comment_tokens.clone(), - ) - }) - }) - .unwrap_or_default() -} - /// Given text, a comment token, and a set of line indices, returns the following: /// - Whether the given lines should be considered commented /// - If any of the lines are uncommented, all lines are considered as such. @@ -599,10 +616,10 @@ mod test { #[test] fn test_get_comment_with_char_boundaries() { let rope = Rope::from("··"); - let tokens = ["//", "///"]; + let tokens = vec!["//".to_owned(), "///".to_owned()]; assert_eq!( - super::get_comment_token(rope.slice(..), tokens.as_slice(), 0), + super::get_comment_token(None, rope.slice(..), Some(&tokens), 0), None ); } @@ -614,11 +631,11 @@ mod test { #[test] fn test_use_longest_comment() { let text = Rope::from(" /// amogus ඞ"); - let tokens = ["///", "//"]; + let tokens = vec!["///".to_owned(), "//".to_owned()]; assert_eq!( - super::get_comment_token(text.slice(..), tokens.as_slice(), 0), - Some("///") + super::get_comment_token(None, text.slice(..), Some(&tokens), 0), + Some("///".to_owned()) ); } } diff --git a/helix-term/src/commands.rs b/helix-term/src/commands.rs index e12c80b65..753a42743 100644 --- a/helix-term/src/commands.rs +++ b/helix-term/src/commands.rs @@ -3623,13 +3623,14 @@ fn open(cx: &mut Context, open: Open, comment_continuation: CommentContinuation) let mut ranges = SmallVec::with_capacity(selection.len()); - let continue_comment_tokens = - if comment_continuation == CommentContinuation::Enabled && config.continue_comments { - doc.language_config() - .and_then(|config| config.comment_tokens.as_ref()) - } else { - None - }; + let continue_comments = + comment_continuation == CommentContinuation::Enabled && config.continue_comments; + + let doc_default_tokens = doc + .language_config() + .and_then(|config| config.comment_tokens.as_ref()); + + let syntax = doc.syntax(); let mut transaction = Transaction::change_by_selection(contents, selection, |range| { // the line number, where the cursor is currently @@ -3646,8 +3647,9 @@ fn open(cx: &mut Context, open: Open, comment_continuation: CommentContinuation) let above_next_new_line_num = next_new_line_num.saturating_sub(1); - let continue_comment_token = continue_comment_tokens - .and_then(|tokens| comment::get_comment_token(text, tokens, curr_line_num)); + let continue_comment_token = + comment::get_comment_token(syntax, text, doc_default_tokens, curr_line_num) + .filter(|_| continue_comments); // Index to insert newlines after, as well as the char width // to use to compensate for those inserted newlines. @@ -3681,7 +3683,7 @@ fn open(cx: &mut Context, open: Open, comment_continuation: CommentContinuation) if open == Open::Above && next_new_line_num == 0 { text.push_str(&indent); - if let Some(token) = continue_comment_token { + if let Some(ref token) = continue_comment_token { text.push_str(token); text.push(' '); } @@ -3690,7 +3692,7 @@ fn open(cx: &mut Context, open: Open, comment_continuation: CommentContinuation) text.push_str(doc.line_ending.as_str()); text.push_str(&indent); - if let Some(token) = continue_comment_token { + if let Some(ref token) = continue_comment_token { text.push_str(token); text.push(' '); } @@ -4139,12 +4141,11 @@ pub mod insert { let mut global_offs = 0; let mut new_text = String::new(); - let continue_comment_tokens = if config.continue_comments { - doc.language_config() - .and_then(|config| config.comment_tokens.as_ref()) - } else { - None - }; + let doc_default_comment_token = doc + .language_config() + .and_then(|config| config.comment_tokens.as_ref()); + + let syntax = doc.syntax(); let mut transaction = Transaction::change_by_selection(contents, selection, |range| { // Tracks the number of trailing whitespace characters deleted by this selection. @@ -4161,8 +4162,9 @@ pub mod insert { let current_line = text.char_to_line(pos); let line_start = text.line_to_char(current_line); - let continue_comment_token = continue_comment_tokens - .and_then(|tokens| comment::get_comment_token(text, tokens, current_line)); + let continue_comment_token = + comment::get_comment_token(syntax, text, doc_default_comment_token, current_line) + .filter(|_| config.continue_comments); let (from, to, local_offs) = if let Some(idx) = text.slice(line_start..pos).last_non_whitespace_char() @@ -4197,7 +4199,7 @@ pub mod insert { new_text.reserve_exact(line_ending.len() + indent.len() + token.len() + 1); new_text.push_str(line_ending); new_text.push_str(&indent); - new_text.push_str(token); + new_text.push_str(&token); new_text.push(' '); new_text.chars().count() } else if on_auto_pair {