mirror of https://github.com/helix-editor/helix
feat: WIP quickfix keybind
parent
810dc46010
commit
194a00fbfc
|
@ -52,6 +52,7 @@ use helix_view::{
|
|||
view::View,
|
||||
Document, DocumentId, Editor, ViewId,
|
||||
};
|
||||
use tokio_stream::wrappers::UnboundedReceiverStream;
|
||||
|
||||
use anyhow::{anyhow, bail, ensure, Context as _};
|
||||
use insert::*;
|
||||
|
@ -378,7 +379,6 @@ impl MappableCommand {
|
|||
search_selection_detect_word_boundaries, "Use current selection as the search pattern, automatically wrapping with `\\b` on word boundaries",
|
||||
make_search_word_bounded, "Modify current search to make it word bounded",
|
||||
global_search, "Global search in workspace folder",
|
||||
global_refactor, "Global refactoring in workspace folder",
|
||||
extend_line, "Select current line, if already selected, extend to another line based on the anchor",
|
||||
extend_line_below, "Select current line, if already selected, extend to next line",
|
||||
extend_line_above, "Select current line, if already selected, extend to previous line",
|
||||
|
@ -2663,280 +2663,296 @@ fn global_search(cx: &mut Context) {
|
|||
.with_preview(|_editor, FileResult { path, line_num, .. }| {
|
||||
Some((path.as_path().into(), Some((*line_num, *line_num))))
|
||||
})
|
||||
.with_quickfix(move |cx, results: Vec<&FileResult>| {
|
||||
let quickfix_line = results
|
||||
.iter()
|
||||
.map(|FileResult { path, line_num, .. }| format!("{}:{}", path.display(), line_num))
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ");
|
||||
|
||||
log::info!("Quickfix entries: {}", quickfix_line);
|
||||
cx.editor
|
||||
.set_status(format!("Quickfix entries: {}", quickfix_line));
|
||||
// cx.editor
|
||||
// .set_error(format!("Quickfix entries: {}", quickfix_line));
|
||||
})
|
||||
.with_history_register(Some(reg))
|
||||
.with_dynamic_query(get_files, Some(275));
|
||||
|
||||
cx.push_layer(Box::new(overlaid(picker)));
|
||||
}
|
||||
|
||||
fn global_refactor(cx: &mut Context) {
|
||||
let document_type = doc!(cx.editor).document_type.clone();
|
||||
// TODO make this worky again
|
||||
// fn global_refactor(cx: &mut Context) {
|
||||
// let document_type = doc!(cx.editor).document_type.clone();
|
||||
|
||||
match &document_type {
|
||||
helix_view::document::DocumentType::File => {
|
||||
let (all_matches_sx, all_matches_rx) =
|
||||
tokio::sync::mpsc::unbounded_channel::<(PathBuf, usize, String)>();
|
||||
let config = cx.editor.config();
|
||||
let smart_case = config.search.smart_case;
|
||||
let file_picker_config = config.file_picker.clone();
|
||||
// match &document_type {
|
||||
// helix_view::document::DocumentType::File => {
|
||||
// let (all_matches_sx, all_matches_rx) =
|
||||
// tokio::sync::mpsc::unbounded_channel::<(PathBuf, usize, String)>();
|
||||
// let config = cx.editor.config();
|
||||
// let smart_case = config.search.smart_case;
|
||||
// let file_picker_config = config.file_picker.clone();
|
||||
|
||||
let reg = cx.register.unwrap_or('/');
|
||||
// let reg = cx.register.unwrap_or('/');
|
||||
|
||||
// Restrict to current file type if possible
|
||||
let file_extension = doc!(cx.editor).path().and_then(|f| f.extension());
|
||||
let file_glob = if let Some(file_glob) = file_extension.and_then(|f| f.to_str()) {
|
||||
let mut tb = ignore::types::TypesBuilder::new();
|
||||
tb.add("p", &(String::from("*.") + file_glob))
|
||||
.ok()
|
||||
.and_then(|_| {
|
||||
tb.select("all");
|
||||
tb.build().ok()
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
// // Restrict to current file type if possible
|
||||
// let file_extension = doc!(cx.editor).path().and_then(|f| f.extension());
|
||||
// let file_glob = if let Some(file_glob) = file_extension.and_then(|f| f.to_str()) {
|
||||
// let mut tb = ignore::types::TypesBuilder::new();
|
||||
// tb.add("p", &(String::from("*.") + file_glob))
|
||||
// .ok()
|
||||
// .and_then(|_| {
|
||||
// tb.select("all");
|
||||
// tb.build().ok()
|
||||
// })
|
||||
// } else {
|
||||
// None
|
||||
// };
|
||||
|
||||
let encoding = Some(doc!(cx.editor).encoding());
|
||||
// let encoding = Some(doc!(cx.editor).encoding());
|
||||
|
||||
let completions = search_completions(cx, Some(reg));
|
||||
ui::regex_prompt(
|
||||
cx,
|
||||
"global-refactor:".into(),
|
||||
Some(reg),
|
||||
move |_editor: &Editor, input: &str| {
|
||||
completions
|
||||
.iter()
|
||||
.filter(|comp| comp.starts_with(input))
|
||||
.map(|comp| (0.., std::borrow::Cow::Owned(comp.clone())))
|
||||
.collect()
|
||||
},
|
||||
move |editor, regex, event| {
|
||||
if event != PromptEvent::Validate {
|
||||
return;
|
||||
}
|
||||
// let completions = search_completions(cx, Some(reg));
|
||||
// ui::regex_prompt(
|
||||
// cx,
|
||||
// "global-refactor:".into(),
|
||||
// Some(reg),
|
||||
// move |_editor: &Editor, input: &str| {
|
||||
// completions
|
||||
// .iter()
|
||||
// .filter(|comp| comp.starts_with(input))
|
||||
// .map(|comp| (0.., comp.clone().into()))
|
||||
// .collect()
|
||||
// },
|
||||
// move |editor, regex, event| {
|
||||
// if event != PromptEvent::Validate {
|
||||
// return;
|
||||
// }
|
||||
|
||||
if let Ok(matcher) = RegexMatcherBuilder::new()
|
||||
.case_smart(smart_case)
|
||||
.build(regex.as_str())
|
||||
{
|
||||
let searcher = SearcherBuilder::new()
|
||||
.binary_detection(BinaryDetection::quit(b'\x00'))
|
||||
.build();
|
||||
// if let Ok(matcher) = RegexMatcherBuilder::new()
|
||||
// .case_smart(smart_case)
|
||||
// .build(regex.into())
|
||||
// {
|
||||
// let searcher = SearcherBuilder::new()
|
||||
// .binary_detection(BinaryDetection::quit(b'\x00'))
|
||||
// .build(query);
|
||||
|
||||
let mut checked = HashSet::<PathBuf>::new();
|
||||
let file_extension = editor.documents
|
||||
[&editor.tree.get(editor.tree.focus).doc]
|
||||
.path()
|
||||
.and_then(|f| f.extension());
|
||||
for doc in editor.documents() {
|
||||
searcher
|
||||
.clone()
|
||||
.search_slice(
|
||||
matcher.clone(),
|
||||
doc.text().to_string().as_bytes(),
|
||||
sinks::UTF8(|line_num, matched| {
|
||||
if let Some(path) = doc.path() {
|
||||
if let Some(extension) = path.extension() {
|
||||
if let Some(file_extension) = file_extension {
|
||||
if file_extension == extension {
|
||||
all_matches_sx
|
||||
.send((
|
||||
path.clone(),
|
||||
line_num as usize - 1,
|
||||
String::from(
|
||||
matched
|
||||
.strip_suffix("\r\n")
|
||||
.or_else(|| {
|
||||
matched
|
||||
.strip_suffix('\n')
|
||||
})
|
||||
.unwrap_or(matched),
|
||||
),
|
||||
))
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
// Exclude from file search
|
||||
checked.insert(path.clone());
|
||||
}
|
||||
Ok(true)
|
||||
}),
|
||||
)
|
||||
.ok();
|
||||
}
|
||||
// let mut checked = HashSet::<PathBuf>::new();
|
||||
// let file_extension = editor.documents
|
||||
// [&editor.tree.get(editor.tree.focus).doc]
|
||||
// .path()
|
||||
// .and_then(|f| f.extension());
|
||||
// for doc in editor.documents() {
|
||||
// searcher
|
||||
// .clone()
|
||||
// .search_slice(
|
||||
// matcher.clone(),
|
||||
// doc.text().to_string().as_bytes(),
|
||||
// sinks::UTF8(|line_num, matched| {
|
||||
// if let Some(path) = doc.path() {
|
||||
// if let Some(extension) = path.extension() {
|
||||
// if let Some(file_extension) = file_extension {
|
||||
// if file_extension == extension {
|
||||
// all_matches_sx
|
||||
// .send((
|
||||
// path.clone(),
|
||||
// line_num as usize - 1,
|
||||
// String::from(
|
||||
// matched
|
||||
// .strip_suffix("\r\n")
|
||||
// .or_else(|| {
|
||||
// matched
|
||||
// .strip_suffix('\n')
|
||||
// })
|
||||
// .unwrap_or(matched),
|
||||
// ),
|
||||
// ))
|
||||
// .unwrap();
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// // Exclude from file search
|
||||
// checked.insert(path.clone());
|
||||
// }
|
||||
// Ok(true)
|
||||
// }),
|
||||
// )
|
||||
// .ok();
|
||||
// }
|
||||
|
||||
let search_root = std::env::current_dir()
|
||||
.expect("Global search error: Failed to get current dir");
|
||||
let mut wb = WalkBuilder::new(search_root);
|
||||
wb.hidden(file_picker_config.hidden)
|
||||
.parents(file_picker_config.parents)
|
||||
.ignore(file_picker_config.ignore)
|
||||
.git_ignore(file_picker_config.git_ignore)
|
||||
.git_global(file_picker_config.git_global)
|
||||
.git_exclude(file_picker_config.git_exclude)
|
||||
.max_depth(file_picker_config.max_depth);
|
||||
if let Some(file_glob) = &file_glob {
|
||||
wb.types(file_glob.clone());
|
||||
}
|
||||
wb.build_parallel().run(|| {
|
||||
let mut searcher = searcher.clone();
|
||||
let matcher = matcher.clone();
|
||||
let all_matches_sx = all_matches_sx.clone();
|
||||
let checked = checked.clone();
|
||||
Box::new(move |entry: Result<DirEntry, ignore::Error>| -> WalkState {
|
||||
let entry = match entry {
|
||||
Ok(entry) => entry,
|
||||
Err(_) => return WalkState::Continue,
|
||||
};
|
||||
// let search_root = std::env::current_dir()
|
||||
// .expect("Global search error: Failed to get current dir");
|
||||
// let mut wb = WalkBuilder::new(search_root);
|
||||
// wb.hidden(file_picker_config.hidden)
|
||||
// .parents(file_picker_config.parents)
|
||||
// .ignore(file_picker_config.ignore)
|
||||
// .git_ignore(file_picker_config.git_ignore)
|
||||
// .git_global(file_picker_config.git_global)
|
||||
// .git_exclude(file_picker_config.git_exclude)
|
||||
// .max_depth(file_picker_config.max_depth);
|
||||
// if let Some(file_glob) = &file_glob {
|
||||
// wb.types(file_glob.clone());
|
||||
// }
|
||||
// wb.build_parallel().run(|| {
|
||||
// let mut searcher = searcher.clone();
|
||||
// let matcher = matcher.clone();
|
||||
// let all_matches_sx = all_matches_sx.clone();
|
||||
// let checked = checked.clone();
|
||||
// Box::new(move |entry: Result<DirEntry, ignore::Error>| -> WalkState {
|
||||
// let entry = match entry {
|
||||
// Ok(entry) => entry,
|
||||
// Err(_) => return WalkState::Continue,
|
||||
// };
|
||||
|
||||
match entry.file_type() {
|
||||
Some(entry) if entry.is_file() => {}
|
||||
// skip everything else
|
||||
_ => return WalkState::Continue,
|
||||
};
|
||||
// match entry.file_type() {
|
||||
// Some(entry) if entry.is_file() => {}
|
||||
// // skip everything else
|
||||
// _ => return WalkState::Continue,
|
||||
// };
|
||||
|
||||
let result = searcher.search_path(
|
||||
&matcher,
|
||||
entry.path(),
|
||||
sinks::UTF8(|line_num, matched| {
|
||||
let path = entry.clone().into_path();
|
||||
if !checked.contains(&path) {
|
||||
all_matches_sx
|
||||
.send((
|
||||
path,
|
||||
line_num as usize - 1,
|
||||
String::from(
|
||||
matched
|
||||
.strip_suffix("\r\n")
|
||||
.or_else(|| matched.strip_suffix('\n'))
|
||||
.unwrap_or(matched),
|
||||
),
|
||||
))
|
||||
.unwrap();
|
||||
}
|
||||
Ok(true)
|
||||
}),
|
||||
);
|
||||
// let result = searcher.search_path(
|
||||
// &matcher,
|
||||
// entry.path(),
|
||||
// sinks::UTF8(|line_num, matched| {
|
||||
// let path = entry.clone().into_path();
|
||||
// if !checked.contains(&path) {
|
||||
// all_matches_sx
|
||||
// .send((
|
||||
// path,
|
||||
// line_num as usize - 1,
|
||||
// String::from(
|
||||
// matched
|
||||
// .strip_suffix("\r\n")
|
||||
// .or_else(|| matched.strip_suffix('\n'))
|
||||
// .unwrap_or(matched),
|
||||
// ),
|
||||
// ))
|
||||
// .unwrap();
|
||||
// }
|
||||
// Ok(true)
|
||||
// }),
|
||||
// );
|
||||
|
||||
if let Err(err) = result {
|
||||
log::error!(
|
||||
"Global search error: {}, {}",
|
||||
entry.path().display(),
|
||||
err
|
||||
);
|
||||
}
|
||||
WalkState::Continue
|
||||
})
|
||||
});
|
||||
}
|
||||
},
|
||||
);
|
||||
// if let Err(err) = result {
|
||||
// log::error!(
|
||||
// "Global search error: {}, {}",
|
||||
// entry.path().display(),
|
||||
// err
|
||||
// );
|
||||
// }
|
||||
// WalkState::Continue
|
||||
// })
|
||||
// });
|
||||
// }
|
||||
// },
|
||||
// );
|
||||
|
||||
let show_refactor = async move {
|
||||
let all_matches: Vec<(PathBuf, usize, String)> =
|
||||
UnboundedReceiverStream::new(all_matches_rx).collect().await;
|
||||
let call: job::Callback = Callback::Editor(Box::new(move |editor: &mut Editor| {
|
||||
if all_matches.is_empty() {
|
||||
editor.set_status("No matches found");
|
||||
return;
|
||||
}
|
||||
let mut matches: HashMap<PathBuf, Vec<(usize, String)>> = HashMap::new();
|
||||
for (path, line, text) in all_matches {
|
||||
if let Some(vec) = matches.get_mut(&path) {
|
||||
vec.push((line, text));
|
||||
} else {
|
||||
let v = Vec::from([(line, text)]);
|
||||
matches.insert(path, v);
|
||||
}
|
||||
}
|
||||
// let show_refactor = async move {
|
||||
// let all_matches: Vec<(PathBuf, usize, String)> =
|
||||
// UnboundedReceiverStream::new(all_matches_rx).collect().await;
|
||||
// let call: job::Callback = Callback::Editor(Box::new(move |editor: &mut Editor| {
|
||||
// if all_matches.is_empty() {
|
||||
// editor.set_status("No matches found");
|
||||
// return;
|
||||
// }
|
||||
// let mut matches: HashMap<PathBuf, Vec<(usize, String)>> = HashMap::new();
|
||||
// for (path, line, text) in all_matches {
|
||||
// if let Some(vec) = matches.get_mut(&path) {
|
||||
// vec.push((line, text));
|
||||
// } else {
|
||||
// let v = Vec::from([(line, text)]);
|
||||
// matches.insert(path, v);
|
||||
// }
|
||||
// }
|
||||
|
||||
let language_id = doc!(editor).language_id().map(String::from);
|
||||
// let language_id = doc!(editor).language_id().map(String::from);
|
||||
|
||||
let mut doc_text = Rope::new();
|
||||
let mut line_map = HashMap::new();
|
||||
// let mut doc_text = Rope::new();
|
||||
// let mut line_map = HashMap::new();
|
||||
|
||||
let mut count = 0;
|
||||
for (key, value) in &matches {
|
||||
for (line, text) in value {
|
||||
doc_text.insert(doc_text.len_chars(), text);
|
||||
doc_text.insert(doc_text.len_chars(), "\n");
|
||||
line_map.insert((key.clone(), *line), count);
|
||||
count += 1;
|
||||
}
|
||||
}
|
||||
doc_text.split_off(doc_text.len_chars().saturating_sub(1));
|
||||
let mut doc = Document::refactor(
|
||||
doc_text,
|
||||
matches,
|
||||
line_map,
|
||||
encoding,
|
||||
editor.config.clone(),
|
||||
);
|
||||
if let Some(language_id) = language_id {
|
||||
doc.set_language_by_language_id(&language_id, editor.syn_loader.clone())
|
||||
.ok();
|
||||
};
|
||||
editor.new_file_from_document(Action::Replace, doc);
|
||||
}));
|
||||
Ok(call)
|
||||
};
|
||||
cx.jobs.callback(show_refactor);
|
||||
}
|
||||
helix_view::document::DocumentType::Refactor { matches, line_map } => {
|
||||
let refactor_id = doc!(cx.editor).id();
|
||||
let replace_text = doc!(cx.editor).text().clone();
|
||||
let view = view!(cx.editor).clone();
|
||||
let mut documents: usize = 0;
|
||||
let mut count: usize = 0;
|
||||
for (key, value) in matches {
|
||||
let mut changes = Vec::<(usize, usize, String)>::new();
|
||||
for (line, text) in value {
|
||||
if let Some(re_line) = line_map.get(&(key.clone(), *line)) {
|
||||
let mut replace = replace_text
|
||||
.get_line(*re_line)
|
||||
.unwrap_or_else(|| "\n".into())
|
||||
.to_string()
|
||||
.clone();
|
||||
replace = replace.strip_suffix('\n').unwrap_or(&replace).to_string();
|
||||
if text != &replace {
|
||||
changes.push((*line, text.chars().count(), replace));
|
||||
}
|
||||
}
|
||||
}
|
||||
if !changes.is_empty() {
|
||||
if let Some(doc) = cx
|
||||
.editor
|
||||
.open(key, Action::Load)
|
||||
.ok()
|
||||
.and_then(|id| cx.editor.document_mut(id))
|
||||
{
|
||||
documents += 1;
|
||||
let mut applychanges = Vec::<(usize, usize, Option<Tendril>)>::new();
|
||||
for (line, length, text) in changes {
|
||||
if doc.text().len_lines() > line {
|
||||
let start = doc.text().line_to_char(line);
|
||||
applychanges.push((
|
||||
start,
|
||||
start + length,
|
||||
Some(Tendril::from(text.to_string())),
|
||||
));
|
||||
count += 1;
|
||||
}
|
||||
}
|
||||
let transaction = Transaction::change(doc.text(), applychanges.into_iter());
|
||||
doc.apply(&transaction, view.id);
|
||||
}
|
||||
}
|
||||
}
|
||||
cx.editor.set_status(format!(
|
||||
"Refactored {} documents, {} lines changed.",
|
||||
documents, count
|
||||
));
|
||||
cx.editor.close_document(refactor_id, true).ok();
|
||||
}
|
||||
}
|
||||
}
|
||||
// let mut count = 0;
|
||||
// for (key, value) in &matches {
|
||||
// for (line, text) in value {
|
||||
// doc_text.insert(doc_text.len_chars(), text);
|
||||
// doc_text.insert(doc_text.len_chars(), "\n");
|
||||
// line_map.insert((key.clone(), *line), count);
|
||||
// count += 1;
|
||||
// }
|
||||
// }
|
||||
// doc_text.split_off(doc_text.len_chars().saturating_sub(1));
|
||||
// let mut doc = Document::refactor(
|
||||
// doc_text,
|
||||
// matches,
|
||||
// line_map,
|
||||
// // TODO: actually learn how to detect encoding
|
||||
// None,
|
||||
// editor.config.clone(),
|
||||
// editor.syn_loader.clone(),
|
||||
// );
|
||||
// // if let Some(language_id) = language_id {
|
||||
// // doc.set_language_by_language_id(&language_id, editor.syn_loader.clone())
|
||||
// // .ok();
|
||||
// // };
|
||||
// editor.new_file_from_document(Action::Replace, doc);
|
||||
// }));
|
||||
// Ok(call)
|
||||
// };
|
||||
// cx.jobs.callback(show_refactor);
|
||||
// }
|
||||
// helix_view::document::DocumentType::Refactor { matches, line_map } => {
|
||||
// let refactor_id = doc!(cx.editor).id();
|
||||
// let replace_text = doc!(cx.editor).text().clone();
|
||||
// let view = view!(cx.editor).clone();
|
||||
// let mut documents: usize = 0;
|
||||
// let mut count: usize = 0;
|
||||
// for (key, value) in matches {
|
||||
// let mut changes = Vec::<(usize, usize, String)>::new();
|
||||
// for (line, text) in value {
|
||||
// if let Some(re_line) = line_map.get(&(key.clone(), *line)) {
|
||||
// let mut replace = replace_text
|
||||
// .get_line(*re_line)
|
||||
// .unwrap_or_else(|| "\n".into())
|
||||
// .to_string()
|
||||
// .clone();
|
||||
// replace = replace.strip_suffix('\n').unwrap_or(&replace).to_string();
|
||||
// if text != &replace {
|
||||
// changes.push((*line, text.chars().count(), replace));
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// if !changes.is_empty() {
|
||||
// if let Some(doc) = cx
|
||||
// .editor
|
||||
// .open(key, Action::Load)
|
||||
// .ok()
|
||||
// .and_then(|id| cx.editor.document_mut(id))
|
||||
// {
|
||||
// documents += 1;
|
||||
// let mut applychanges = Vec::<(usize, usize, Option<Tendril>)>::new();
|
||||
// for (line, length, text) in changes {
|
||||
// if doc.text().len_lines() > line {
|
||||
// let start = doc.text().line_to_char(line);
|
||||
// applychanges.push((
|
||||
// start,
|
||||
// start + length,
|
||||
// Some(Tendril::from(text.to_string())),
|
||||
// ));
|
||||
// count += 1;
|
||||
// }
|
||||
// }
|
||||
// let transaction = Transaction::change(doc.text(), applychanges.into_iter());
|
||||
// doc.apply(&transaction, view.id);
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// cx.editor.set_status(format!(
|
||||
// "Refactored {} documents, {} lines changed.",
|
||||
// documents, count
|
||||
// ));
|
||||
// cx.editor.close_document(refactor_id, true).ok();
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
enum Extend {
|
||||
Above,
|
||||
|
@ -3414,50 +3430,16 @@ fn buffer_picker(cx: &mut Context) {
|
|||
path: Option<PathBuf>,
|
||||
is_modified: bool,
|
||||
is_current: bool,
|
||||
<<<<<<< HEAD
|
||||
focused_at: std::time::Instant,
|
||||
=======
|
||||
is_refactor: bool,
|
||||
}
|
||||
|
||||
impl ui::menu::Item for BufferMeta {
|
||||
type Data = ();
|
||||
|
||||
fn format(&self, _data: &Self::Data) -> Row {
|
||||
let path = self
|
||||
.path
|
||||
.as_deref()
|
||||
.map(helix_core::path::get_relative_path);
|
||||
let path = if self.is_refactor {
|
||||
REFACTOR_BUFFER_NAME
|
||||
} else {
|
||||
match path.as_deref().and_then(Path::to_str) {
|
||||
Some(path) => path,
|
||||
None => SCRATCH_BUFFER_NAME,
|
||||
}
|
||||
};
|
||||
|
||||
let mut flags = String::new();
|
||||
if self.is_modified {
|
||||
flags.push('+');
|
||||
}
|
||||
if self.is_current {
|
||||
flags.push('*');
|
||||
}
|
||||
|
||||
Row::new([self.id.to_string(), flags, path.to_string()])
|
||||
}
|
||||
>>>>>>> f55507e4 (Impl refactoring view)
|
||||
}
|
||||
|
||||
let new_meta = |doc: &Document| BufferMeta {
|
||||
id: doc.id(),
|
||||
path: doc.path().cloned(),
|
||||
is_modified: doc.is_modified(),
|
||||
is_current: doc.id() == current,
|
||||
<<<<<<< HEAD
|
||||
focused_at: doc.focused_at,
|
||||
=======
|
||||
is_refactor: matches!(
|
||||
&doc.document_type,
|
||||
helix_view::document::DocumentType::Refactor {
|
||||
|
@ -3465,7 +3447,6 @@ fn buffer_picker(cx: &mut Context) {
|
|||
line_map: _
|
||||
}
|
||||
),
|
||||
>>>>>>> f55507e4 (Impl refactoring view)
|
||||
};
|
||||
|
||||
let mut items = cx
|
||||
|
@ -3491,6 +3472,10 @@ fn buffer_picker(cx: &mut Context) {
|
|||
flags.into()
|
||||
}),
|
||||
PickerColumn::new("path", |meta: &BufferMeta, _| {
|
||||
// TODO: make this rust look like actual rust
|
||||
if meta.is_refactor {
|
||||
return helix_view::document::REFACTOR_BUFFER_NAME.into();
|
||||
}
|
||||
let path = meta
|
||||
.path
|
||||
.as_deref()
|
||||
|
|
|
@ -13,6 +13,7 @@ use crate::{
|
|||
EditorView,
|
||||
},
|
||||
};
|
||||
use crossterm::event::KeyEvent;
|
||||
use futures_util::future::BoxFuture;
|
||||
use helix_event::AsyncHook;
|
||||
use nucleo::pattern::{CaseMatching, Normalization};
|
||||
|
@ -29,10 +30,10 @@ use tui::{
|
|||
use tui::widgets::Widget;
|
||||
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
borrow::{BorrowMut, Cow},
|
||||
collections::HashMap,
|
||||
io::Read,
|
||||
path::Path,
|
||||
path::{Path, PathBuf},
|
||||
sync::{
|
||||
atomic::{self, AtomicUsize},
|
||||
Arc,
|
||||
|
@ -266,6 +267,7 @@ pub struct Picker<T: 'static + Send + Sync, D: 'static> {
|
|||
/// Given an item in the picker, return the file path and line number to display.
|
||||
file_fn: Option<FileCallback<T>>,
|
||||
/// An event handler for syntax highlighting the currently previewed file.
|
||||
quickfix_fn: QuickfixCallback<T>,
|
||||
preview_highlight_handler: Sender<Arc<Path>>,
|
||||
dynamic_query_handler: Option<Sender<DynamicQueryChange>>,
|
||||
}
|
||||
|
@ -335,6 +337,7 @@ impl<T: 'static + Send + Sync, D: 'static + Send + Sync> Picker<T, D> {
|
|||
primary_column: usize,
|
||||
injector: Injector<T, D>,
|
||||
callback_fn: impl Fn(&mut Context, &T, Action) + 'static,
|
||||
quickfix_fn: Option<Box<dyn Fn(&mut Context, Vec<&T>) + 'static>>,
|
||||
) -> Self {
|
||||
Self::with(
|
||||
matcher,
|
||||
|
@ -382,6 +385,7 @@ impl<T: 'static + Send + Sync, D: 'static + Send + Sync> Picker<T, D> {
|
|||
truncate_start: true,
|
||||
show_preview: true,
|
||||
callback_fn: Box::new(callback_fn),
|
||||
quickfix_fn: None,
|
||||
completion_height: 0,
|
||||
widths,
|
||||
preview_cache: HashMap::new(),
|
||||
|
@ -419,6 +423,11 @@ impl<T: 'static + Send + Sync, D: 'static + Send + Sync> Picker<T, D> {
|
|||
self
|
||||
}
|
||||
|
||||
pub fn with_quickfix(mut self, quickfix_fn: impl Fn(&mut Context, Vec<&T>) + 'static) -> Self {
|
||||
self.quickfix_fn = Some(Box::new(quickfix_fn));
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_history_register(mut self, history_register: Option<char>) -> Self {
|
||||
self.prompt.with_history_register(history_register);
|
||||
self
|
||||
|
@ -490,6 +499,15 @@ impl<T: 'static + Send + Sync, D: 'static + Send + Sync> Picker<T, D> {
|
|||
.map(|item| item.data)
|
||||
}
|
||||
|
||||
pub fn get_list(&self) -> Vec<&T> {
|
||||
let matcher = self.matcher.snapshot();
|
||||
let total = matcher.matched_item_count();
|
||||
matcher
|
||||
.matched_items(0..total)
|
||||
.map(|item| item.data)
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn primary_query(&self) -> Arc<str> {
|
||||
self.query
|
||||
.get(&self.columns[self.primary_column].name)
|
||||
|
@ -1124,6 +1142,15 @@ impl<I: 'static + Send + Sync, D: 'static + Send + Sync> Component for Picker<I,
|
|||
ctrl!('t') => {
|
||||
self.toggle_preview();
|
||||
}
|
||||
ctrl!('q') => {
|
||||
if let Some(_) = self.selection() {
|
||||
if let Some(quickfix) = &self.quickfix_fn {
|
||||
let items = self.get_list();
|
||||
(quickfix)(ctx, items);
|
||||
}
|
||||
}
|
||||
return close_fn(self);
|
||||
}
|
||||
_ => {
|
||||
self.prompt_handle_event(event, ctx);
|
||||
}
|
||||
|
@ -1168,3 +1195,4 @@ impl<T: 'static + Send + Sync, D> Drop for Picker<T, D> {
|
|||
}
|
||||
|
||||
type PickerCallback<T> = Box<dyn Fn(&mut Context, &T, Action)>;
|
||||
type QuickfixCallback<T> = Option<Box<dyn Fn(&mut Context, Vec<&T>)>>;
|
||||
|
|
|
@ -140,7 +140,7 @@ pub enum DocumentOpenError {
|
|||
IoError(#[from] io::Error),
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum DocumentType {
|
||||
File,
|
||||
Refactor {
|
||||
|
@ -221,6 +221,7 @@ pub struct Document {
|
|||
// large refactor that would make `&mut Editor` available on the `DocumentDidChange` event.
|
||||
pub color_swatch_controller: TaskController,
|
||||
|
||||
pub document_type: DocumentType,
|
||||
// NOTE: this field should eventually go away - we should use the Editor's syn_loader instead
|
||||
// of storing a copy on every doc. Then we can remove the surrounding `Arc` and use the
|
||||
// `ArcSwap` directly.
|
||||
|
@ -232,7 +233,6 @@ pub struct DocumentColorSwatches {
|
|||
pub color_swatches: Vec<InlineAnnotation>,
|
||||
pub colors: Vec<syntax::Highlight>,
|
||||
pub color_swatches_padding: Vec<InlineAnnotation>,
|
||||
pub document_type: DocumentType,
|
||||
}
|
||||
|
||||
/// Inlay hints for a single `(Document, View)` combo.
|
||||
|
@ -748,23 +748,30 @@ impl Document {
|
|||
text: Rope,
|
||||
matches: HashMap<PathBuf, Vec<(usize, String)>>,
|
||||
line_map: HashMap<(PathBuf, usize), usize>,
|
||||
encoding: Option<&'static encoding::Encoding>,
|
||||
encoding_with_bom_info: Option<(&'static Encoding, bool)>,
|
||||
config: Arc<dyn DynAccess<Config>>,
|
||||
syn_loader: Arc<ArcSwap<syntax::Loader>>,
|
||||
) -> Self {
|
||||
let encoding = encoding.unwrap_or(encoding::UTF_8);
|
||||
let changes = ChangeSet::new(&text);
|
||||
let (encoding, has_bom) = encoding_with_bom_info.unwrap_or((encoding::UTF_8, false));
|
||||
let line_ending = config.load().default_line_ending.into();
|
||||
let changes = ChangeSet::new(text.slice(..));
|
||||
let old_state = None;
|
||||
|
||||
Self {
|
||||
id: DocumentId::default(),
|
||||
active_snippet: None,
|
||||
path: None,
|
||||
relative_path: OnceCell::new(),
|
||||
encoding,
|
||||
has_bom,
|
||||
text,
|
||||
selections: HashMap::default(),
|
||||
inlay_hints: HashMap::default(),
|
||||
inlay_hints_oudated: false,
|
||||
view_data: Default::default(),
|
||||
indent_style: DEFAULT_INDENT,
|
||||
line_ending: DEFAULT_LINE_ENDING,
|
||||
editor_config: EditorConfig::default(),
|
||||
line_ending,
|
||||
restore_cursor: false,
|
||||
syntax: None,
|
||||
language: None,
|
||||
|
@ -777,10 +784,16 @@ impl Document {
|
|||
last_saved_time: SystemTime::now(),
|
||||
last_saved_revision: 0,
|
||||
modified_since_accessed: false,
|
||||
language_server: None,
|
||||
language_servers: HashMap::new(),
|
||||
diff_handle: None,
|
||||
config,
|
||||
version_control_head: None,
|
||||
focused_at: std::time::Instant::now(),
|
||||
readonly: false,
|
||||
jump_labels: HashMap::new(),
|
||||
color_swatches: None,
|
||||
color_swatch_controller: TaskController::new(),
|
||||
syn_loader,
|
||||
document_type: DocumentType::Refactor { matches, line_map },
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue