it works! (ish)

pull/13870/head
Gareth Widlansky 2025-06-30 10:45:25 -07:00
parent 194a00fbfc
commit 4ad4ff1384
1 changed files with 104 additions and 283 deletions

View File

@ -379,6 +379,7 @@ impl MappableCommand {
search_selection_detect_word_boundaries, "Use current selection as the search pattern, automatically wrapping with `\\b` on word boundaries", search_selection_detect_word_boundaries, "Use current selection as the search pattern, automatically wrapping with `\\b` on word boundaries",
make_search_word_bounded, "Modify current search to make it word bounded", make_search_word_bounded, "Modify current search to make it word bounded",
global_search, "Global search in workspace folder", global_search, "Global search in workspace folder",
global_refactor, "Global refactoring in workspace folder",
extend_line, "Select current line, if already selected, extend to another line based on the anchor", extend_line, "Select current line, if already selected, extend to another line based on the anchor",
extend_line_below, "Select current line, if already selected, extend to next line", extend_line_below, "Select current line, if already selected, extend to next line",
extend_line_above, "Select current line, if already selected, extend to previous line", extend_line_above, "Select current line, if already selected, extend to previous line",
@ -2445,13 +2446,15 @@ fn global_search(cx: &mut Context) {
path: PathBuf, path: PathBuf,
/// 0 indexed lines /// 0 indexed lines
line_num: usize, line_num: usize,
line_content: String,
} }
impl FileResult { impl FileResult {
fn new(path: &Path, line_num: usize) -> Self { fn new(path: &Path, line_num: usize, line_content: String) -> Self {
Self { Self {
path: path.to_path_buf(), path: path.to_path_buf(),
line_num, line_num,
line_content: line_content.into(),
} }
} }
} }
@ -2576,9 +2579,13 @@ fn global_search(cx: &mut Context) {
}; };
let mut stop = false; let mut stop = false;
let sink = sinks::UTF8(|line_num, _line_content| { let sink = sinks::UTF8(|line_num, line_content| {
stop = injector stop = injector
.push(FileResult::new(entry.path(), line_num as usize - 1)) .push(FileResult::new(
entry.path(),
line_num as usize - 1,
line_content.into(),
))
.is_err(); .is_err();
Ok(!stop) Ok(!stop)
@ -2664,17 +2671,43 @@ fn global_search(cx: &mut Context) {
Some((path.as_path().into(), Some((*line_num, *line_num)))) Some((path.as_path().into(), Some((*line_num, *line_num))))
}) })
.with_quickfix(move |cx, results: Vec<&FileResult>| { .with_quickfix(move |cx, results: Vec<&FileResult>| {
let quickfix_line = results if results.is_empty() {
.iter() cx.editor.set_status("No matches found");
.map(|FileResult { path, line_num, .. }| format!("{}:{}", path.display(), line_num)) return;
.collect::<Vec<_>>() }
.join(" ");
log::info!("Quickfix entries: {}", quickfix_line); let mut matches: HashMap<PathBuf, Vec<(usize, String)>> = HashMap::new();
cx.editor
.set_status(format!("Quickfix entries: {}", quickfix_line)); for result in results {
// cx.editor let path = result.path.clone();
// .set_error(format!("Quickfix entries: {}", quickfix_line)); let line = result.line_num;
let text = result.line_content.clone();
matches.entry(path).or_default().push((line, text));
}
let mut doc_text = Rope::new();
let mut line_map = HashMap::new();
let mut count = 0;
for (key, value) in &matches {
for (line, text) in value {
doc_text.insert(doc_text.len_chars(), text);
line_map.insert((key.clone(), *line), count);
count += 1;
}
}
doc_text.split_off(doc_text.len_chars().saturating_sub(1));
let doc = Document::refactor(
doc_text,
matches,
line_map,
// TODO: actually learn how to detect encoding
None,
cx.editor.config.clone(),
cx.editor.syn_loader.clone(),
);
cx.editor.new_file_from_document(Action::Replace, doc);
}) })
.with_history_register(Some(reg)) .with_history_register(Some(reg))
.with_dynamic_query(get_files, Some(275)); .with_dynamic_query(get_files, Some(275));
@ -2682,277 +2715,65 @@ fn global_search(cx: &mut Context) {
cx.push_layer(Box::new(overlaid(picker))); cx.push_layer(Box::new(overlaid(picker)));
} }
// TODO make this worky again fn global_refactor(cx: &mut Context) {
// fn global_refactor(cx: &mut Context) { let document_type = doc!(cx.editor).document_type.clone();
// let document_type = doc!(cx.editor).document_type.clone();
// match &document_type { match &document_type {
// helix_view::document::DocumentType::File => { helix_view::document::DocumentType::File => return,
// let (all_matches_sx, all_matches_rx) = helix_view::document::DocumentType::Refactor { matches, line_map } => {
// tokio::sync::mpsc::unbounded_channel::<(PathBuf, usize, String)>(); let refactor_id = doc!(cx.editor).id();
// let config = cx.editor.config(); let replace_text = doc!(cx.editor).text().clone();
// let smart_case = config.search.smart_case; let view = view!(cx.editor).clone();
// let file_picker_config = config.file_picker.clone(); let mut documents: usize = 0;
let mut count: usize = 0;
// let reg = cx.register.unwrap_or('/'); for (key, value) in matches {
let mut changes = Vec::<(usize, usize, String)>::new();
// // Restrict to current file type if possible for (line, text) in value {
// let file_extension = doc!(cx.editor).path().and_then(|f| f.extension()); if let Some(re_line) = line_map.get(&(key.clone(), *line)) {
// let file_glob = if let Some(file_glob) = file_extension.and_then(|f| f.to_str()) { let mut replace = replace_text
// let mut tb = ignore::types::TypesBuilder::new(); .get_line(*re_line)
// tb.add("p", &(String::from("*.") + file_glob)) .unwrap_or_else(|| "\n".into())
// .ok() .to_string()
// .and_then(|_| { .clone();
// tb.select("all"); replace = replace.strip_suffix('\n').unwrap_or(&replace).to_string();
// tb.build().ok() if text != &replace {
// }) changes.push((*line, text.chars().count(), replace));
// } else { }
// None }
// }; }
if !changes.is_empty() {
// let encoding = Some(doc!(cx.editor).encoding()); if let Some(doc) = cx
.editor
// let completions = search_completions(cx, Some(reg)); .open(key, Action::Load)
// ui::regex_prompt( .ok()
// cx, .and_then(|id| cx.editor.document_mut(id))
// "global-refactor:".into(), {
// Some(reg), documents += 1;
// move |_editor: &Editor, input: &str| { let mut applychanges = Vec::<(usize, usize, Option<Tendril>)>::new();
// completions for (line, length, text) in changes {
// .iter() if doc.text().len_lines() > line {
// .filter(|comp| comp.starts_with(input)) let start = doc.text().line_to_char(line);
// .map(|comp| (0.., comp.clone().into())) applychanges.push((
// .collect() start,
// }, start + length,
// move |editor, regex, event| { Some(Tendril::from(text.to_string())),
// if event != PromptEvent::Validate { ));
// return; count += 1;
// } }
}
// if let Ok(matcher) = RegexMatcherBuilder::new() let transaction = Transaction::change(doc.text(), applychanges.into_iter());
// .case_smart(smart_case) doc.apply(&transaction, view.id);
// .build(regex.into()) }
// { }
// let searcher = SearcherBuilder::new() }
// .binary_detection(BinaryDetection::quit(b'\x00')) cx.editor.set_status(format!(
// .build(query); "Refactored {} documents, {} lines changed.",
documents, count
// let mut checked = HashSet::<PathBuf>::new(); ));
// let file_extension = editor.documents cx.editor.close_document(refactor_id, true).ok();
// [&editor.tree.get(editor.tree.focus).doc] }
// .path() }
// .and_then(|f| f.extension()); }
// for doc in editor.documents() {
// searcher
// .clone()
// .search_slice(
// matcher.clone(),
// doc.text().to_string().as_bytes(),
// sinks::UTF8(|line_num, matched| {
// if let Some(path) = doc.path() {
// if let Some(extension) = path.extension() {
// if let Some(file_extension) = file_extension {
// if file_extension == extension {
// all_matches_sx
// .send((
// path.clone(),
// line_num as usize - 1,
// String::from(
// matched
// .strip_suffix("\r\n")
// .or_else(|| {
// matched
// .strip_suffix('\n')
// })
// .unwrap_or(matched),
// ),
// ))
// .unwrap();
// }
// }
// }
// // Exclude from file search
// checked.insert(path.clone());
// }
// Ok(true)
// }),
// )
// .ok();
// }
// let search_root = std::env::current_dir()
// .expect("Global search error: Failed to get current dir");
// let mut wb = WalkBuilder::new(search_root);
// wb.hidden(file_picker_config.hidden)
// .parents(file_picker_config.parents)
// .ignore(file_picker_config.ignore)
// .git_ignore(file_picker_config.git_ignore)
// .git_global(file_picker_config.git_global)
// .git_exclude(file_picker_config.git_exclude)
// .max_depth(file_picker_config.max_depth);
// if let Some(file_glob) = &file_glob {
// wb.types(file_glob.clone());
// }
// wb.build_parallel().run(|| {
// let mut searcher = searcher.clone();
// let matcher = matcher.clone();
// let all_matches_sx = all_matches_sx.clone();
// let checked = checked.clone();
// Box::new(move |entry: Result<DirEntry, ignore::Error>| -> WalkState {
// let entry = match entry {
// Ok(entry) => entry,
// Err(_) => return WalkState::Continue,
// };
// match entry.file_type() {
// Some(entry) if entry.is_file() => {}
// // skip everything else
// _ => return WalkState::Continue,
// };
// let result = searcher.search_path(
// &matcher,
// entry.path(),
// sinks::UTF8(|line_num, matched| {
// let path = entry.clone().into_path();
// if !checked.contains(&path) {
// all_matches_sx
// .send((
// path,
// line_num as usize - 1,
// String::from(
// matched
// .strip_suffix("\r\n")
// .or_else(|| matched.strip_suffix('\n'))
// .unwrap_or(matched),
// ),
// ))
// .unwrap();
// }
// Ok(true)
// }),
// );
// if let Err(err) = result {
// log::error!(
// "Global search error: {}, {}",
// entry.path().display(),
// err
// );
// }
// WalkState::Continue
// })
// });
// }
// },
// );
// let show_refactor = async move {
// let all_matches: Vec<(PathBuf, usize, String)> =
// UnboundedReceiverStream::new(all_matches_rx).collect().await;
// let call: job::Callback = Callback::Editor(Box::new(move |editor: &mut Editor| {
// if all_matches.is_empty() {
// editor.set_status("No matches found");
// return;
// }
// let mut matches: HashMap<PathBuf, Vec<(usize, String)>> = HashMap::new();
// for (path, line, text) in all_matches {
// if let Some(vec) = matches.get_mut(&path) {
// vec.push((line, text));
// } else {
// let v = Vec::from([(line, text)]);
// matches.insert(path, v);
// }
// }
// let language_id = doc!(editor).language_id().map(String::from);
// let mut doc_text = Rope::new();
// let mut line_map = HashMap::new();
// let mut count = 0;
// for (key, value) in &matches {
// for (line, text) in value {
// doc_text.insert(doc_text.len_chars(), text);
// doc_text.insert(doc_text.len_chars(), "\n");
// line_map.insert((key.clone(), *line), count);
// count += 1;
// }
// }
// doc_text.split_off(doc_text.len_chars().saturating_sub(1));
// let mut doc = Document::refactor(
// doc_text,
// matches,
// line_map,
// // TODO: actually learn how to detect encoding
// None,
// editor.config.clone(),
// editor.syn_loader.clone(),
// );
// // if let Some(language_id) = language_id {
// // doc.set_language_by_language_id(&language_id, editor.syn_loader.clone())
// // .ok();
// // };
// editor.new_file_from_document(Action::Replace, doc);
// }));
// Ok(call)
// };
// cx.jobs.callback(show_refactor);
// }
// helix_view::document::DocumentType::Refactor { matches, line_map } => {
// let refactor_id = doc!(cx.editor).id();
// let replace_text = doc!(cx.editor).text().clone();
// let view = view!(cx.editor).clone();
// let mut documents: usize = 0;
// let mut count: usize = 0;
// for (key, value) in matches {
// let mut changes = Vec::<(usize, usize, String)>::new();
// for (line, text) in value {
// if let Some(re_line) = line_map.get(&(key.clone(), *line)) {
// let mut replace = replace_text
// .get_line(*re_line)
// .unwrap_or_else(|| "\n".into())
// .to_string()
// .clone();
// replace = replace.strip_suffix('\n').unwrap_or(&replace).to_string();
// if text != &replace {
// changes.push((*line, text.chars().count(), replace));
// }
// }
// }
// if !changes.is_empty() {
// if let Some(doc) = cx
// .editor
// .open(key, Action::Load)
// .ok()
// .and_then(|id| cx.editor.document_mut(id))
// {
// documents += 1;
// let mut applychanges = Vec::<(usize, usize, Option<Tendril>)>::new();
// for (line, length, text) in changes {
// if doc.text().len_lines() > line {
// let start = doc.text().line_to_char(line);
// applychanges.push((
// start,
// start + length,
// Some(Tendril::from(text.to_string())),
// ));
// count += 1;
// }
// }
// let transaction = Transaction::change(doc.text(), applychanges.into_iter());
// doc.apply(&transaction, view.id);
// }
// }
// }
// cx.editor.set_status(format!(
// "Refactored {} documents, {} lines changed.",
// documents, count
// ));
// cx.editor.close_document(refactor_id, true).ok();
// }
// }
// }
enum Extend { enum Extend {
Above, Above,