mirror of https://github.com/helix-editor/helix
Impl refactoring view
parent
f75d71844f
commit
810dc46010
|
@ -42,7 +42,7 @@ use helix_core::{
|
|||
Selection, SmallVec, Syntax, Tendril, Transaction,
|
||||
};
|
||||
use helix_view::{
|
||||
document::{FormatterError, Mode, SCRATCH_BUFFER_NAME},
|
||||
document::{FormatterError, Mode, REFACTOR_BUFFER_NAME, SCRATCH_BUFFER_NAME},
|
||||
editor::Action,
|
||||
info::Info,
|
||||
input::KeyEvent,
|
||||
|
@ -378,6 +378,7 @@ impl MappableCommand {
|
|||
search_selection_detect_word_boundaries, "Use current selection as the search pattern, automatically wrapping with `\\b` on word boundaries",
|
||||
make_search_word_bounded, "Modify current search to make it word bounded",
|
||||
global_search, "Global search in workspace folder",
|
||||
global_refactor, "Global refactoring in workspace folder",
|
||||
extend_line, "Select current line, if already selected, extend to another line based on the anchor",
|
||||
extend_line_below, "Select current line, if already selected, extend to next line",
|
||||
extend_line_above, "Select current line, if already selected, extend to previous line",
|
||||
|
@ -2668,6 +2669,275 @@ fn global_search(cx: &mut Context) {
|
|||
cx.push_layer(Box::new(overlaid(picker)));
|
||||
}
|
||||
|
||||
fn global_refactor(cx: &mut Context) {
|
||||
let document_type = doc!(cx.editor).document_type.clone();
|
||||
|
||||
match &document_type {
|
||||
helix_view::document::DocumentType::File => {
|
||||
let (all_matches_sx, all_matches_rx) =
|
||||
tokio::sync::mpsc::unbounded_channel::<(PathBuf, usize, String)>();
|
||||
let config = cx.editor.config();
|
||||
let smart_case = config.search.smart_case;
|
||||
let file_picker_config = config.file_picker.clone();
|
||||
|
||||
let reg = cx.register.unwrap_or('/');
|
||||
|
||||
// Restrict to current file type if possible
|
||||
let file_extension = doc!(cx.editor).path().and_then(|f| f.extension());
|
||||
let file_glob = if let Some(file_glob) = file_extension.and_then(|f| f.to_str()) {
|
||||
let mut tb = ignore::types::TypesBuilder::new();
|
||||
tb.add("p", &(String::from("*.") + file_glob))
|
||||
.ok()
|
||||
.and_then(|_| {
|
||||
tb.select("all");
|
||||
tb.build().ok()
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let encoding = Some(doc!(cx.editor).encoding());
|
||||
|
||||
let completions = search_completions(cx, Some(reg));
|
||||
ui::regex_prompt(
|
||||
cx,
|
||||
"global-refactor:".into(),
|
||||
Some(reg),
|
||||
move |_editor: &Editor, input: &str| {
|
||||
completions
|
||||
.iter()
|
||||
.filter(|comp| comp.starts_with(input))
|
||||
.map(|comp| (0.., std::borrow::Cow::Owned(comp.clone())))
|
||||
.collect()
|
||||
},
|
||||
move |editor, regex, event| {
|
||||
if event != PromptEvent::Validate {
|
||||
return;
|
||||
}
|
||||
|
||||
if let Ok(matcher) = RegexMatcherBuilder::new()
|
||||
.case_smart(smart_case)
|
||||
.build(regex.as_str())
|
||||
{
|
||||
let searcher = SearcherBuilder::new()
|
||||
.binary_detection(BinaryDetection::quit(b'\x00'))
|
||||
.build();
|
||||
|
||||
let mut checked = HashSet::<PathBuf>::new();
|
||||
let file_extension = editor.documents
|
||||
[&editor.tree.get(editor.tree.focus).doc]
|
||||
.path()
|
||||
.and_then(|f| f.extension());
|
||||
for doc in editor.documents() {
|
||||
searcher
|
||||
.clone()
|
||||
.search_slice(
|
||||
matcher.clone(),
|
||||
doc.text().to_string().as_bytes(),
|
||||
sinks::UTF8(|line_num, matched| {
|
||||
if let Some(path) = doc.path() {
|
||||
if let Some(extension) = path.extension() {
|
||||
if let Some(file_extension) = file_extension {
|
||||
if file_extension == extension {
|
||||
all_matches_sx
|
||||
.send((
|
||||
path.clone(),
|
||||
line_num as usize - 1,
|
||||
String::from(
|
||||
matched
|
||||
.strip_suffix("\r\n")
|
||||
.or_else(|| {
|
||||
matched
|
||||
.strip_suffix('\n')
|
||||
})
|
||||
.unwrap_or(matched),
|
||||
),
|
||||
))
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
// Exclude from file search
|
||||
checked.insert(path.clone());
|
||||
}
|
||||
Ok(true)
|
||||
}),
|
||||
)
|
||||
.ok();
|
||||
}
|
||||
|
||||
let search_root = std::env::current_dir()
|
||||
.expect("Global search error: Failed to get current dir");
|
||||
let mut wb = WalkBuilder::new(search_root);
|
||||
wb.hidden(file_picker_config.hidden)
|
||||
.parents(file_picker_config.parents)
|
||||
.ignore(file_picker_config.ignore)
|
||||
.git_ignore(file_picker_config.git_ignore)
|
||||
.git_global(file_picker_config.git_global)
|
||||
.git_exclude(file_picker_config.git_exclude)
|
||||
.max_depth(file_picker_config.max_depth);
|
||||
if let Some(file_glob) = &file_glob {
|
||||
wb.types(file_glob.clone());
|
||||
}
|
||||
wb.build_parallel().run(|| {
|
||||
let mut searcher = searcher.clone();
|
||||
let matcher = matcher.clone();
|
||||
let all_matches_sx = all_matches_sx.clone();
|
||||
let checked = checked.clone();
|
||||
Box::new(move |entry: Result<DirEntry, ignore::Error>| -> WalkState {
|
||||
let entry = match entry {
|
||||
Ok(entry) => entry,
|
||||
Err(_) => return WalkState::Continue,
|
||||
};
|
||||
|
||||
match entry.file_type() {
|
||||
Some(entry) if entry.is_file() => {}
|
||||
// skip everything else
|
||||
_ => return WalkState::Continue,
|
||||
};
|
||||
|
||||
let result = searcher.search_path(
|
||||
&matcher,
|
||||
entry.path(),
|
||||
sinks::UTF8(|line_num, matched| {
|
||||
let path = entry.clone().into_path();
|
||||
if !checked.contains(&path) {
|
||||
all_matches_sx
|
||||
.send((
|
||||
path,
|
||||
line_num as usize - 1,
|
||||
String::from(
|
||||
matched
|
||||
.strip_suffix("\r\n")
|
||||
.or_else(|| matched.strip_suffix('\n'))
|
||||
.unwrap_or(matched),
|
||||
),
|
||||
))
|
||||
.unwrap();
|
||||
}
|
||||
Ok(true)
|
||||
}),
|
||||
);
|
||||
|
||||
if let Err(err) = result {
|
||||
log::error!(
|
||||
"Global search error: {}, {}",
|
||||
entry.path().display(),
|
||||
err
|
||||
);
|
||||
}
|
||||
WalkState::Continue
|
||||
})
|
||||
});
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
let show_refactor = async move {
|
||||
let all_matches: Vec<(PathBuf, usize, String)> =
|
||||
UnboundedReceiverStream::new(all_matches_rx).collect().await;
|
||||
let call: job::Callback = Callback::Editor(Box::new(move |editor: &mut Editor| {
|
||||
if all_matches.is_empty() {
|
||||
editor.set_status("No matches found");
|
||||
return;
|
||||
}
|
||||
let mut matches: HashMap<PathBuf, Vec<(usize, String)>> = HashMap::new();
|
||||
for (path, line, text) in all_matches {
|
||||
if let Some(vec) = matches.get_mut(&path) {
|
||||
vec.push((line, text));
|
||||
} else {
|
||||
let v = Vec::from([(line, text)]);
|
||||
matches.insert(path, v);
|
||||
}
|
||||
}
|
||||
|
||||
let language_id = doc!(editor).language_id().map(String::from);
|
||||
|
||||
let mut doc_text = Rope::new();
|
||||
let mut line_map = HashMap::new();
|
||||
|
||||
let mut count = 0;
|
||||
for (key, value) in &matches {
|
||||
for (line, text) in value {
|
||||
doc_text.insert(doc_text.len_chars(), text);
|
||||
doc_text.insert(doc_text.len_chars(), "\n");
|
||||
line_map.insert((key.clone(), *line), count);
|
||||
count += 1;
|
||||
}
|
||||
}
|
||||
doc_text.split_off(doc_text.len_chars().saturating_sub(1));
|
||||
let mut doc = Document::refactor(
|
||||
doc_text,
|
||||
matches,
|
||||
line_map,
|
||||
encoding,
|
||||
editor.config.clone(),
|
||||
);
|
||||
if let Some(language_id) = language_id {
|
||||
doc.set_language_by_language_id(&language_id, editor.syn_loader.clone())
|
||||
.ok();
|
||||
};
|
||||
editor.new_file_from_document(Action::Replace, doc);
|
||||
}));
|
||||
Ok(call)
|
||||
};
|
||||
cx.jobs.callback(show_refactor);
|
||||
}
|
||||
helix_view::document::DocumentType::Refactor { matches, line_map } => {
|
||||
let refactor_id = doc!(cx.editor).id();
|
||||
let replace_text = doc!(cx.editor).text().clone();
|
||||
let view = view!(cx.editor).clone();
|
||||
let mut documents: usize = 0;
|
||||
let mut count: usize = 0;
|
||||
for (key, value) in matches {
|
||||
let mut changes = Vec::<(usize, usize, String)>::new();
|
||||
for (line, text) in value {
|
||||
if let Some(re_line) = line_map.get(&(key.clone(), *line)) {
|
||||
let mut replace = replace_text
|
||||
.get_line(*re_line)
|
||||
.unwrap_or_else(|| "\n".into())
|
||||
.to_string()
|
||||
.clone();
|
||||
replace = replace.strip_suffix('\n').unwrap_or(&replace).to_string();
|
||||
if text != &replace {
|
||||
changes.push((*line, text.chars().count(), replace));
|
||||
}
|
||||
}
|
||||
}
|
||||
if !changes.is_empty() {
|
||||
if let Some(doc) = cx
|
||||
.editor
|
||||
.open(key, Action::Load)
|
||||
.ok()
|
||||
.and_then(|id| cx.editor.document_mut(id))
|
||||
{
|
||||
documents += 1;
|
||||
let mut applychanges = Vec::<(usize, usize, Option<Tendril>)>::new();
|
||||
for (line, length, text) in changes {
|
||||
if doc.text().len_lines() > line {
|
||||
let start = doc.text().line_to_char(line);
|
||||
applychanges.push((
|
||||
start,
|
||||
start + length,
|
||||
Some(Tendril::from(text.to_string())),
|
||||
));
|
||||
count += 1;
|
||||
}
|
||||
}
|
||||
let transaction = Transaction::change(doc.text(), applychanges.into_iter());
|
||||
doc.apply(&transaction, view.id);
|
||||
}
|
||||
}
|
||||
}
|
||||
cx.editor.set_status(format!(
|
||||
"Refactored {} documents, {} lines changed.",
|
||||
documents, count
|
||||
));
|
||||
cx.editor.close_document(refactor_id, true).ok();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum Extend {
|
||||
Above,
|
||||
Below,
|
||||
|
@ -3144,7 +3414,40 @@ fn buffer_picker(cx: &mut Context) {
|
|||
path: Option<PathBuf>,
|
||||
is_modified: bool,
|
||||
is_current: bool,
|
||||
<<<<<<< HEAD
|
||||
focused_at: std::time::Instant,
|
||||
=======
|
||||
is_refactor: bool,
|
||||
}
|
||||
|
||||
impl ui::menu::Item for BufferMeta {
|
||||
type Data = ();
|
||||
|
||||
fn format(&self, _data: &Self::Data) -> Row {
|
||||
let path = self
|
||||
.path
|
||||
.as_deref()
|
||||
.map(helix_core::path::get_relative_path);
|
||||
let path = if self.is_refactor {
|
||||
REFACTOR_BUFFER_NAME
|
||||
} else {
|
||||
match path.as_deref().and_then(Path::to_str) {
|
||||
Some(path) => path,
|
||||
None => SCRATCH_BUFFER_NAME,
|
||||
}
|
||||
};
|
||||
|
||||
let mut flags = String::new();
|
||||
if self.is_modified {
|
||||
flags.push('+');
|
||||
}
|
||||
if self.is_current {
|
||||
flags.push('*');
|
||||
}
|
||||
|
||||
Row::new([self.id.to_string(), flags, path.to_string()])
|
||||
}
|
||||
>>>>>>> f55507e4 (Impl refactoring view)
|
||||
}
|
||||
|
||||
let new_meta = |doc: &Document| BufferMeta {
|
||||
|
@ -3152,7 +3455,17 @@ fn buffer_picker(cx: &mut Context) {
|
|||
path: doc.path().cloned(),
|
||||
is_modified: doc.is_modified(),
|
||||
is_current: doc.id() == current,
|
||||
<<<<<<< HEAD
|
||||
focused_at: doc.focused_at,
|
||||
=======
|
||||
is_refactor: matches!(
|
||||
&doc.document_type,
|
||||
helix_view::document::DocumentType::Refactor {
|
||||
matches: _,
|
||||
line_map: _
|
||||
}
|
||||
),
|
||||
>>>>>>> f55507e4 (Impl refactoring view)
|
||||
};
|
||||
|
||||
let mut items = cx
|
||||
|
|
|
@ -583,13 +583,19 @@ impl EditorView {
|
|||
let current_doc = view!(editor).doc;
|
||||
|
||||
for doc in editor.documents() {
|
||||
let fname = doc
|
||||
let fname = match &doc.document_type {
|
||||
helix_view::document::DocumentType::File => doc
|
||||
.path()
|
||||
.unwrap_or(&scratch)
|
||||
.file_name()
|
||||
.unwrap_or_default()
|
||||
.to_str()
|
||||
.unwrap_or_default();
|
||||
.unwrap_or_default(),
|
||||
helix_view::document::DocumentType::Refactor {
|
||||
matches: _,
|
||||
line_map: _,
|
||||
} => helix_view::document::REFACTOR_BUFFER_NAME,
|
||||
};
|
||||
|
||||
let style = if current_doc == doc.id() {
|
||||
bufferline_active
|
||||
|
|
|
@ -5,7 +5,7 @@ use helix_core::{coords_at_pos, encoding, Position};
|
|||
use helix_lsp::lsp::DiagnosticSeverity;
|
||||
use helix_view::document::DEFAULT_LANGUAGE_NAME;
|
||||
use helix_view::{
|
||||
document::{Mode, SCRATCH_BUFFER_NAME},
|
||||
document::{Mode, REFACTOR_BUFFER_NAME, SCRATCH_BUFFER_NAME},
|
||||
graphics::Rect,
|
||||
theme::Style,
|
||||
Document, Editor, View,
|
||||
|
@ -450,12 +450,20 @@ where
|
|||
F: Fn(&mut RenderContext<'a>, Span<'a>) + Copy,
|
||||
{
|
||||
let title = {
|
||||
match &context.doc.document_type {
|
||||
helix_view::document::DocumentType::File => {
|
||||
let rel_path = context.doc.relative_path();
|
||||
let path = rel_path
|
||||
.as_ref()
|
||||
.map(|p| p.to_string_lossy())
|
||||
.unwrap_or_else(|| SCRATCH_BUFFER_NAME.into());
|
||||
format!(" {} ", path)
|
||||
}
|
||||
helix_view::document::DocumentType::Refactor {
|
||||
matches: _,
|
||||
line_map: _,
|
||||
} => REFACTOR_BUFFER_NAME.into(),
|
||||
}
|
||||
};
|
||||
|
||||
write(context, title.into());
|
||||
|
|
|
@ -61,6 +61,8 @@ pub const DEFAULT_LANGUAGE_NAME: &str = "text";
|
|||
|
||||
pub const SCRATCH_BUFFER_NAME: &str = "[scratch]";
|
||||
|
||||
pub const REFACTOR_BUFFER_NAME: &str = "[refactor]";
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub enum Mode {
|
||||
Normal = 0,
|
||||
|
@ -138,6 +140,15 @@ pub enum DocumentOpenError {
|
|||
IoError(#[from] io::Error),
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum DocumentType {
|
||||
File,
|
||||
Refactor {
|
||||
matches: HashMap<PathBuf, Vec<(usize, String)>>,
|
||||
line_map: HashMap<(PathBuf, usize), usize>,
|
||||
},
|
||||
}
|
||||
|
||||
pub struct Document {
|
||||
pub(crate) id: DocumentId,
|
||||
text: Rope,
|
||||
|
@ -221,6 +232,7 @@ pub struct DocumentColorSwatches {
|
|||
pub color_swatches: Vec<InlineAnnotation>,
|
||||
pub colors: Vec<syntax::Highlight>,
|
||||
pub color_swatches_padding: Vec<InlineAnnotation>,
|
||||
pub document_type: DocumentType,
|
||||
}
|
||||
|
||||
/// Inlay hints for a single `(Document, View)` combo.
|
||||
|
@ -728,6 +740,48 @@ impl Document {
|
|||
color_swatches: None,
|
||||
color_swatch_controller: TaskController::new(),
|
||||
syn_loader,
|
||||
document_type: DocumentType::File,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn refactor(
|
||||
text: Rope,
|
||||
matches: HashMap<PathBuf, Vec<(usize, String)>>,
|
||||
line_map: HashMap<(PathBuf, usize), usize>,
|
||||
encoding: Option<&'static encoding::Encoding>,
|
||||
config: Arc<dyn DynAccess<Config>>,
|
||||
) -> Self {
|
||||
let encoding = encoding.unwrap_or(encoding::UTF_8);
|
||||
let changes = ChangeSet::new(&text);
|
||||
let old_state = None;
|
||||
|
||||
Self {
|
||||
id: DocumentId::default(),
|
||||
path: None,
|
||||
encoding,
|
||||
text,
|
||||
selections: HashMap::default(),
|
||||
inlay_hints: HashMap::default(),
|
||||
inlay_hints_oudated: false,
|
||||
indent_style: DEFAULT_INDENT,
|
||||
line_ending: DEFAULT_LINE_ENDING,
|
||||
restore_cursor: false,
|
||||
syntax: None,
|
||||
language: None,
|
||||
changes,
|
||||
old_state,
|
||||
diagnostics: Vec::new(),
|
||||
version: 0,
|
||||
history: Cell::new(History::default()),
|
||||
savepoints: Vec::new(),
|
||||
last_saved_time: SystemTime::now(),
|
||||
last_saved_revision: 0,
|
||||
modified_since_accessed: false,
|
||||
language_server: None,
|
||||
diff_handle: None,
|
||||
config,
|
||||
version_control_head: None,
|
||||
document_type: DocumentType::Refactor { matches, line_map },
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1730,6 +1784,8 @@ impl Document {
|
|||
|
||||
/// If there are unsaved modifications.
|
||||
pub fn is_modified(&self) -> bool {
|
||||
match self.document_type {
|
||||
DocumentType::File => {
|
||||
let history = self.history.take();
|
||||
let current_revision = history.current_revision();
|
||||
self.history.set(history);
|
||||
|
@ -1741,6 +1797,12 @@ impl Document {
|
|||
);
|
||||
current_revision != self.last_saved_revision || !self.changes.is_empty()
|
||||
}
|
||||
DocumentType::Refactor {
|
||||
matches: _,
|
||||
line_map: _,
|
||||
} => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Save modifications to history, and so [`Self::is_modified`] will return false.
|
||||
pub fn reset_modified(&mut self) {
|
||||
|
|
|
@ -1746,7 +1746,7 @@ impl Editor {
|
|||
id
|
||||
}
|
||||
|
||||
fn new_file_from_document(&mut self, action: Action, doc: Document) -> DocumentId {
|
||||
pub fn new_file_from_document(&mut self, action: Action, doc: Document) -> DocumentId {
|
||||
let id = self.new_document(doc);
|
||||
self.switch(id, action);
|
||||
id
|
||||
|
|
Loading…
Reference in New Issue