refactor: Rename varibles + Remove accidentally commited files

pull/12759/head
Nik Revenco 2025-05-25 00:46:07 +01:00
parent d6d7a3c9e2
commit 76687f5389
4 changed files with 84 additions and 131 deletions

View File

@ -12,8 +12,8 @@ use std::borrow::Cow;
pub const DEFAULT_COMMENT_TOKEN: &str = "#"; pub const DEFAULT_COMMENT_TOKEN: &str = "#";
/// Returns the longest matching comment token of the given line (if it exists). /// Returns the longest matching line comment token of the given line (if it exists).
pub fn get_comment_token( pub fn get_line_comment_token(
loader: &syntax::Loader, loader: &syntax::Loader,
syntax: Option<&Syntax>, syntax: Option<&Syntax>,
text: RopeSlice, text: RopeSlice,
@ -24,8 +24,8 @@ pub fn get_comment_token(
let start = line.first_non_whitespace_char()?; let start = line.first_non_whitespace_char()?;
let start_char = text.line_to_char(line_num) + start; let start_char = text.line_to_char(line_num) + start;
let injected_tokens = get_injected_tokens(loader, syntax, start_char as u32, start_char as u32) let injected_line_comment_tokens =
// we only care about line comment tokens injected_tokens_for_range(loader, syntax, start_char as u32, start_char as u32)
.0 .0
.and_then(|tokens| { .and_then(|tokens| {
tokens tokens
@ -34,20 +34,22 @@ pub fn get_comment_token(
.max_by_key(|token| token.len()) .max_by_key(|token| token.len())
}); });
injected_tokens.or( injected_line_comment_tokens.or_else(||
// no comment tokens found for injection, use doc comments if exists // no line comment tokens found for injection, use doc comments if exists
doc_default_tokens.and_then(|tokens| { doc_default_tokens.and_then(|tokens| {
tokens tokens
.iter() .iter()
.filter(|token| line.slice(start..).starts_with(token)) .filter(|token| line.slice(start..).starts_with(token))
.max_by_key(|token| token.len()) .max_by_key(|token| token.len())
.cloned() .cloned()
}), }))
)
} }
/// Find the injection with the most tightly encompassing range. /// Get the injected line and block comment of the smallest
pub fn get_injected_tokens( /// injection around the range which fully includes `start..=end`.
///
/// Injections that do not have any comment tokens are skipped.
pub fn injected_tokens_for_range(
loader: &syntax::Loader, loader: &syntax::Loader,
syntax: Option<&Syntax>, syntax: Option<&Syntax>,
start: u32, start: u32,
@ -69,8 +71,8 @@ pub fn get_injected_tokens(
// if the language does not have any comment tokens, it does not make // if the language does not have any comment tokens, it does not make
// any sense to consider it. // any sense to consider it.
// //
// This includes languages such as comment, jsdoc and regex: These // This includes languages such as `comment`, `jsdoc` and `regex`.
// languages are injected and never found in files by themselves // These languages are injected and never found in files by themselves
has_any_comment_tokens.then_some(( has_any_comment_tokens.then_some((
lang_config.comment_tokens.clone(), lang_config.comment_tokens.clone(),
lang_config.block_comment_tokens.clone(), lang_config.block_comment_tokens.clone(),
@ -80,19 +82,20 @@ pub fn get_injected_tokens(
.unwrap_or_default() .unwrap_or_default()
} }
/// Given text, a comment token, and a set of line indices, returns the following: /// Given `text`, a comment `token`, and a set of line indices `lines_to_modify`,
/// - Whether the given lines should be considered commented /// Returns the following:
/// 1. Whether the given lines should be considered commented
/// - If any of the lines are uncommented, all lines are considered as such. /// - If any of the lines are uncommented, all lines are considered as such.
/// - The lines to change for toggling comments /// 2. The lines to change for toggling comments
/// - This is all provided lines excluding blanks lines. /// - This is all provided lines excluding blanks lines.
/// - The column of the comment tokens /// 3. The column of the comment tokens
/// - Column of existing tokens, if the lines are commented; column to place tokens at otherwise. /// - Column of existing tokens, if the lines are commented; column to place tokens at otherwise.
/// - The margin to the right of the comment tokens /// 4. The margin to the right of the comment tokens
/// - Defaults to `1`. If any existing comment token is not followed by a space, changes to `0`. /// - Defaults to `1`. If any existing comment token is not followed by a space, changes to `0`.
fn find_line_comment( fn find_line_comment(
token: &str, token: &str,
text: RopeSlice, text: RopeSlice,
lines: impl IntoIterator<Item = usize>, lines_to_modify: impl IntoIterator<Item = usize>,
) -> (bool, Vec<usize>, usize, usize) { ) -> (bool, Vec<usize>, usize, usize) {
let mut commented = true; let mut commented = true;
let mut to_change = Vec::new(); let mut to_change = Vec::new();
@ -100,7 +103,7 @@ fn find_line_comment(
let mut margin = 1; let mut margin = 1;
let token_len = token.chars().count(); let token_len = token.chars().count();
for line in lines { for line in lines_to_modify {
let line_slice = text.line(line); let line_slice = text.line(line);
if let Some(pos) = line_slice.first_non_whitespace_char() { if let Some(pos) = line_slice.first_non_whitespace_char() {
let len = line_slice.len_chars(); let len = line_slice.len_chars();
@ -130,39 +133,55 @@ fn find_line_comment(
(commented, to_change, min, margin) (commented, to_change, min, margin)
} }
/// Returns the edits required to toggle the comment `token` for the `range` in the `doc`
#[must_use] #[must_use]
pub fn toggle_line_comments(doc: &Rope, range: &Range, token: Option<&str>) -> Vec<Change> { pub fn toggle_line_comments(doc: &Rope, range: &Range, token: Option<&str>) -> Vec<Change> {
let text = doc.slice(..); let text = doc.slice(..);
let token = token.unwrap_or(DEFAULT_COMMENT_TOKEN); let token = token.unwrap_or(DEFAULT_COMMENT_TOKEN);
// Add a space between the comment token and the line.
let comment = Tendril::from(format!("{} ", token)); let comment = Tendril::from(format!("{} ", token));
let start = text.char_to_line(range.from());
let end = text.char_to_line(range.to().saturating_sub(1));
let line_count = text.len_lines(); let line_count = text.len_lines();
let start = start.clamp(0, line_count);
let end = (end + 1).min(line_count);
let mut lines = vec![]; let start = text.char_to_line(range.from()).clamp(0, line_count);
lines.extend(start..end); let end = (text.char_to_line(range.to().saturating_sub(1)) + 1).min(line_count);
let (was_commented, to_change, min, margin) = find_line_comment(token, text, lines); let lines_to_modify = start..end;
let mut changes: Vec<Change> = Vec::with_capacity(to_change.len()); let (
was_commented,
lines_to_modify,
column_to_place_comment_tokens_at,
comment_tokens_right_margin,
) = find_line_comment(token, text, lines_to_modify);
for line in to_change { lines_to_modify
let pos = text.line_to_char(line) + min; .into_iter()
.map(|line| {
let place_comment_tokens_at =
text.line_to_char(line) + column_to_place_comment_tokens_at;
if !was_commented { if !was_commented {
// comment line // comment line
changes.push((pos, pos, Some(comment.clone()))); (
place_comment_tokens_at,
place_comment_tokens_at,
// insert the token
Some(comment.clone()),
)
} else { } else {
// uncomment line // uncomment line
changes.push((pos, pos + token.len() + margin, None)); (
place_comment_tokens_at,
place_comment_tokens_at + token.len() + comment_tokens_right_margin,
// remove the token - replace range with nothing
None,
)
} }
} })
.collect()
changes
} }
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
@ -582,33 +601,5 @@ mod test {
transaction.apply(&mut doc); transaction.apply(&mut doc);
assert_eq!(doc, ""); assert_eq!(doc, "");
} }
// Test, if `get_comment_tokens` works, even if the content of the file includes chars, whose
// byte size unequal the amount of chars
// #[test]
// fn test_get_comment_with_char_boundaries() {
// let rope = Rope::from("··");
// let tokens = vec!["//".to_owned(), "///".to_owned()];
// assert_eq!(
// super::get_comment_token(None, rope.slice(..), Some(&tokens), 0),
// None
// );
// }
// /// Test for `get_comment_token`.
// ///
// /// Assuming the comment tokens are stored as `["///", "//"]`, `get_comment_token` should still
// /// return `///` instead of `//` if the user is in a doc-comment section.
// #[test]
// fn test_use_longest_comment() {
// let text = Rope::from(" /// amogus ඞ");
// let tokens = vec!["///".to_owned(), "//".to_owned()];
// assert_eq!(
// super::get_comment_token(None, text.slice(..), Some(&tokens), 0),
// Some("///".to_owned())
// );
// }
} }
} }

View File

@ -3671,8 +3671,13 @@ fn open(cx: &mut Context, open: Open, comment_continuation: CommentContinuation)
let above_next_new_line_num = next_new_line_num.saturating_sub(1); let above_next_new_line_num = next_new_line_num.saturating_sub(1);
let continue_comment_token = let continue_comment_token = comment::get_line_comment_token(
comment::get_comment_token(&loader, syntax, text, doc_default_tokens, curr_line_num) &loader,
syntax,
text,
doc_default_tokens,
curr_line_num,
)
.filter(|_| continue_comments); .filter(|_| continue_comments);
// Index to insert newlines after, as well as the char width // Index to insert newlines after, as well as the char width
@ -4225,7 +4230,7 @@ pub mod insert {
let current_line = text.char_to_line(pos); let current_line = text.char_to_line(pos);
let line_start = text.line_to_char(current_line); let line_start = text.line_to_char(current_line);
let continue_comment_token = comment::get_comment_token( let continue_comment_token = comment::get_line_comment_token(
&doc.syn_loader.load(), &doc.syn_loader.load(),
syntax, syntax,
text, text,
@ -5164,12 +5169,13 @@ pub fn completion(cx: &mut Context) {
// comments // comments
/// commenting behavior, for each range in selection: /// Commenting behavior, for each range in selection:
/// 1. only line comment tokens -> line comment ///
/// 2. each line block commented -> uncomment all lines /// 1. Only line comment tokens -> line comment
/// 3. whole selection block commented -> uncomment selection /// 2. Each line block commented -> uncomment all lines
/// 4. all lines not commented and block tokens -> comment uncommented lines /// 3. Whole selection block commented -> uncomment selection
/// 5. no comment tokens and not block commented -> line comment /// 4. All lines not commented and block tokens -> comment uncommented lines
/// 5. No comment tokens and not block commented -> line comment
fn toggle_comments_impl<F>(cx: &mut Context, comments_transaction: F) fn toggle_comments_impl<F>(cx: &mut Context, comments_transaction: F)
where where
F: Fn( F: Fn(
@ -5219,7 +5225,7 @@ fn toggle_comments(cx: &mut Context) {
rope, rope,
selection.iter().flat_map(|range| { selection.iter().flat_map(|range| {
let (injected_line_tokens, injected_block_tokens) = let (injected_line_tokens, injected_block_tokens) =
comment::get_injected_tokens( comment::injected_tokens_for_range(
loader, loader,
syntax, syntax,
range.from() as u32, range.from() as u32,
@ -5301,7 +5307,7 @@ fn toggle_line_comments(cx: &mut Context) {
rope, rope,
selection.iter().flat_map(|range| { selection.iter().flat_map(|range| {
let (injected_line_tokens, injected_block_tokens) = let (injected_line_tokens, injected_block_tokens) =
comment::get_injected_tokens( comment::injected_tokens_for_range(
loader, loader,
syntax, syntax,
range.from() as u32, range.from() as u32,
@ -5355,7 +5361,7 @@ fn toggle_block_comments(cx: &mut Context) {
rope, rope,
selection.iter().flat_map(|range| { selection.iter().flat_map(|range| {
let (injected_line_tokens, injected_block_tokens) = let (injected_line_tokens, injected_block_tokens) =
comment::get_injected_tokens( comment::injected_tokens_for_range(
loader, loader,
syntax, syntax,
range.from() as u32, range.from() as u32,

View File

@ -1,5 +0,0 @@
<!-- <p>Comment toggle #[|on this line ]#should use the HTML comment token(s).</p> -->
<script type="text/javascript">
// Comment toggle #(|on this line )#should use the javascript comment token(s).
foo();
</script>

View File

@ -1,39 +0,0 @@
use crate::DynError;
pub fn query_check() -> Result<(), DynError> {
use crate::helpers::lang_config;
use helix_core::{syntax::read_query, tree_sitter::Query};
use helix_loader::grammar::get_language;
let query_files = [
"highlights.scm",
"locals.scm",
"injections.scm",
"textobjects.scm",
"indents.scm",
];
for language in lang_config().language {
let language_name = &language.language_name;
let grammar_name = language.grammar.as_ref().unwrap_or(language_name);
for query_file in query_files {
let language = get_language(grammar_name);
let query_text = read_query(language_name, query_file);
if let Ok(lang) = language {
if !query_text.is_empty() {
if let Err(reason) = Query::new(&lang, &query_text) {
return Err(format!(
"Failed to parse {} queries for {}: {}",
query_file, language_name, reason
)
.into());
}
}
}
}
}
println!("Query check succeeded");
Ok(())
}