mirror of https://github.com/helix-editor/helix
Compare commits
17 Commits
843a3b76a7
...
e23a3f35a2
Author | SHA1 | Date |
---|---|---|
|
e23a3f35a2 | |
|
fed3edcab7 | |
|
4099465632 | |
|
9100bce9aa | |
|
f5dc8245ea | |
|
362e97e927 | |
|
74a4c613e8 | |
|
6113359b8f | |
|
ccbaadda45 | |
|
42a70b2f35 | |
|
8fba25bb86 | |
|
e2768a8b44 | |
|
b21e6748d1 | |
|
fc7955094d | |
|
e37265e16f | |
|
68d7b5cda1 | |
|
9dacf06fb0 |
|
@ -2810,9 +2810,9 @@ checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tree-house"
|
name = "tree-house"
|
||||||
version = "0.2.0"
|
version = "0.3.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "679e3296e503901cd9f6e116be5a43a9270222215bf6c78b4b1f4af5c3dcc62d"
|
checksum = "d00ea55222392f171ae004dd13b62edd09d995633abf0c13406a8df3547fb999"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arc-swap",
|
"arc-swap",
|
||||||
"hashbrown 0.15.4",
|
"hashbrown 0.15.4",
|
||||||
|
|
|
@ -37,7 +37,7 @@ package.helix-tui.opt-level = 2
|
||||||
package.helix-term.opt-level = 2
|
package.helix-term.opt-level = 2
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
tree-house = { version = "0.2.0", default-features = false }
|
tree-house = { version = "0.3.0", default-features = false }
|
||||||
nucleo = "0.5.0"
|
nucleo = "0.5.0"
|
||||||
slotmap = "1.0.7"
|
slotmap = "1.0.7"
|
||||||
thiserror = "2.0"
|
thiserror = "2.0"
|
||||||
|
|
|
@ -80,6 +80,8 @@
|
||||||
| `search_selection_detect_word_boundaries` | Use current selection as the search pattern, automatically wrapping with `\b` on word boundaries | normal: `` * ``, select: `` * `` |
|
| `search_selection_detect_word_boundaries` | Use current selection as the search pattern, automatically wrapping with `\b` on word boundaries | normal: `` * ``, select: `` * `` |
|
||||||
| `make_search_word_bounded` | Modify current search to make it word bounded | |
|
| `make_search_word_bounded` | Modify current search to make it word bounded | |
|
||||||
| `global_search` | Global search in workspace folder | normal: `` <space>/ ``, select: `` <space>/ `` |
|
| `global_search` | Global search in workspace folder | normal: `` <space>/ ``, select: `` <space>/ `` |
|
||||||
|
| `local_search_grep` | Local search in buffer | normal: `` <space>l ``, select: `` <space>l `` |
|
||||||
|
| `local_search_fuzzy` | Fuzzy local search in buffer | normal: `` <space>L ``, select: `` <space>L `` |
|
||||||
| `extend_line` | Select current line, if already selected, extend to another line based on the anchor | |
|
| `extend_line` | Select current line, if already selected, extend to another line based on the anchor | |
|
||||||
| `extend_line_below` | Select current line, if already selected, extend to next line | normal: `` x ``, select: `` x `` |
|
| `extend_line_below` | Select current line, if already selected, extend to next line | normal: `` x ``, select: `` x `` |
|
||||||
| `extend_line_above` | Select current line, if already selected, extend to previous line | |
|
| `extend_line_above` | Select current line, if already selected, extend to previous line | |
|
||||||
|
|
|
@ -135,7 +135,9 @@ pub trait RopeSliceExt<'a>: Sized {
|
||||||
/// let graphemes: Vec<_> = text.graphemes().collect();
|
/// let graphemes: Vec<_> = text.graphemes().collect();
|
||||||
/// assert_eq!(graphemes.as_slice(), &["😶🌫️", "🏴☠️", "🖼️"]);
|
/// assert_eq!(graphemes.as_slice(), &["😶🌫️", "🏴☠️", "🖼️"]);
|
||||||
/// ```
|
/// ```
|
||||||
fn graphemes(self) -> RopeGraphemes<'a>;
|
fn graphemes(self) -> RopeGraphemes<'a> {
|
||||||
|
self.graphemes_at(0)
|
||||||
|
}
|
||||||
/// Returns an iterator over the grapheme clusters in the slice, reversed.
|
/// Returns an iterator over the grapheme clusters in the slice, reversed.
|
||||||
///
|
///
|
||||||
/// The returned iterator starts at the end of the slice and ends at the beginning of the
|
/// The returned iterator starts at the end of the slice and ends at the beginning of the
|
||||||
|
@ -150,7 +152,127 @@ pub trait RopeSliceExt<'a>: Sized {
|
||||||
/// let graphemes: Vec<_> = text.graphemes_rev().collect();
|
/// let graphemes: Vec<_> = text.graphemes_rev().collect();
|
||||||
/// assert_eq!(graphemes.as_slice(), &["🖼️", "🏴☠️", "😶🌫️"]);
|
/// assert_eq!(graphemes.as_slice(), &["🖼️", "🏴☠️", "😶🌫️"]);
|
||||||
/// ```
|
/// ```
|
||||||
fn graphemes_rev(self) -> RevRopeGraphemes<'a>;
|
fn graphemes_rev(self) -> RopeGraphemes<'a>;
|
||||||
|
/// Returns an iterator over the grapheme clusters in the slice at the given byte index.
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// # use ropey::Rope;
|
||||||
|
/// # use helix_stdx::rope::RopeSliceExt;
|
||||||
|
/// let text = Rope::from_str("😶🌫️🏴☠️🖼️");
|
||||||
|
/// // 14 is the byte index of the pirate flag's starting cluster boundary.
|
||||||
|
/// let graphemes: Vec<_> = text.slice(..).graphemes_at(14).collect();
|
||||||
|
/// assert_eq!(graphemes.as_slice(), &["🏴☠️", "🖼️"]);
|
||||||
|
/// // 27 is the byte index of the pirate flag's ending cluster boundary.
|
||||||
|
/// let graphemes: Vec<_> = text.slice(..).graphemes_at(27).reversed().collect();
|
||||||
|
/// assert_eq!(graphemes.as_slice(), &["🏴☠️", "😶🌫️"]);
|
||||||
|
/// ```
|
||||||
|
fn graphemes_at(self, byte_idx: usize) -> RopeGraphemes<'a>;
|
||||||
|
/// Returns an iterator over the grapheme clusters in a rope and the byte index where each
|
||||||
|
/// grapheme cluster starts.
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// # use ropey::Rope;
|
||||||
|
/// # use helix_stdx::rope::RopeSliceExt;
|
||||||
|
/// let text = Rope::from_str("😶🌫️🏴☠️🖼️");
|
||||||
|
/// let slice = text.slice(..);
|
||||||
|
/// let graphemes: Vec<_> = slice.grapheme_indices_at(0).collect();
|
||||||
|
/// assert_eq!(
|
||||||
|
/// graphemes.as_slice(),
|
||||||
|
/// &[(0, "😶🌫️".into()), (14, "🏴☠️".into()), (27, "🖼️".into())]
|
||||||
|
/// );
|
||||||
|
/// let graphemes: Vec<_> = slice.grapheme_indices_at(slice.len_bytes()).reversed().collect();
|
||||||
|
/// assert_eq!(
|
||||||
|
/// graphemes.as_slice(),
|
||||||
|
/// &[(27, "🖼️".into()), (14, "🏴☠️".into()), (0, "😶🌫️".into())]
|
||||||
|
/// );
|
||||||
|
/// ```
|
||||||
|
fn grapheme_indices_at(self, byte_idx: usize) -> RopeGraphemeIndices<'a>;
|
||||||
|
/// Finds the byte index of the next grapheme boundary after `byte_idx`.
|
||||||
|
///
|
||||||
|
/// If the byte index lies on the last grapheme cluster in the slice then this function
|
||||||
|
/// returns `RopeSlice::len_bytes`.
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// # use ropey::Rope;
|
||||||
|
/// # use helix_stdx::rope::RopeSliceExt;
|
||||||
|
/// let text = Rope::from_str("😶🌫️🏴☠️🖼️");
|
||||||
|
/// let slice = text.slice(..);
|
||||||
|
/// let mut byte_idx = 0;
|
||||||
|
/// assert_eq!(slice.graphemes_at(byte_idx).next(), Some("😶🌫️".into()));
|
||||||
|
/// byte_idx = slice.next_grapheme_boundary(byte_idx);
|
||||||
|
/// assert_eq!(slice.graphemes_at(byte_idx).next(), Some("🏴☠️".into()));
|
||||||
|
///
|
||||||
|
/// // If `byte_idx` does not lie on a character or grapheme boundary then this function is
|
||||||
|
/// // functionally the same as `ceil_grapheme_boundary`.
|
||||||
|
/// assert_eq!(slice.next_grapheme_boundary(byte_idx - 1), byte_idx);
|
||||||
|
/// assert_eq!(slice.next_grapheme_boundary(byte_idx - 2), byte_idx);
|
||||||
|
/// assert_eq!(slice.next_grapheme_boundary(byte_idx + 1), slice.next_grapheme_boundary(byte_idx));
|
||||||
|
/// assert_eq!(slice.next_grapheme_boundary(byte_idx + 2), slice.next_grapheme_boundary(byte_idx));
|
||||||
|
///
|
||||||
|
/// byte_idx = slice.next_grapheme_boundary(byte_idx);
|
||||||
|
/// assert_eq!(slice.graphemes_at(byte_idx).next(), Some("🖼️".into()));
|
||||||
|
/// byte_idx = slice.next_grapheme_boundary(byte_idx);
|
||||||
|
/// assert_eq!(slice.graphemes_at(byte_idx).next(), None);
|
||||||
|
/// assert_eq!(byte_idx, slice.len_bytes());
|
||||||
|
/// ```
|
||||||
|
fn next_grapheme_boundary(self, byte_idx: usize) -> usize {
|
||||||
|
self.nth_next_grapheme_boundary(byte_idx, 1)
|
||||||
|
}
|
||||||
|
/// Finds the byte index of the `n`th grapheme cluster after the given `byte_idx`.
|
||||||
|
///
|
||||||
|
/// If there are fewer than `n` grapheme clusters after `byte_idx` in the rope then this
|
||||||
|
/// function returns `RopeSlice::len_bytes`.
|
||||||
|
///
|
||||||
|
/// This is functionally equivalent to calling `next_grapheme_boundary` `n` times but is more
|
||||||
|
/// efficient.
|
||||||
|
fn nth_next_grapheme_boundary(self, byte_idx: usize, n: usize) -> usize;
|
||||||
|
/// Finds the byte index of the previous grapheme boundary before `byte_idx`.
|
||||||
|
///
|
||||||
|
/// If the byte index lies on the first grapheme cluster in the slice then this function
|
||||||
|
/// returns zero.
|
||||||
|
///
|
||||||
|
/// # Example
|
||||||
|
///
|
||||||
|
/// ```
|
||||||
|
/// # use ropey::Rope;
|
||||||
|
/// # use helix_stdx::rope::RopeSliceExt;
|
||||||
|
/// let text = Rope::from_str("😶🌫️🏴☠️🖼️");
|
||||||
|
/// let slice = text.slice(..);
|
||||||
|
/// let mut byte_idx = text.len_bytes();
|
||||||
|
/// assert_eq!(slice.graphemes_at(byte_idx).prev(), Some("🖼️".into()));
|
||||||
|
/// byte_idx = slice.prev_grapheme_boundary(byte_idx);
|
||||||
|
/// assert_eq!(slice.graphemes_at(byte_idx).prev(), Some("🏴☠️".into()));
|
||||||
|
///
|
||||||
|
/// // If `byte_idx` does not lie on a character or grapheme boundary then this function is
|
||||||
|
/// // functionally the same as `floor_grapheme_boundary`.
|
||||||
|
/// assert_eq!(slice.prev_grapheme_boundary(byte_idx + 1), byte_idx);
|
||||||
|
/// assert_eq!(slice.prev_grapheme_boundary(byte_idx + 2), byte_idx);
|
||||||
|
/// assert_eq!(slice.prev_grapheme_boundary(byte_idx - 1), slice.prev_grapheme_boundary(byte_idx));
|
||||||
|
/// assert_eq!(slice.prev_grapheme_boundary(byte_idx - 2), slice.prev_grapheme_boundary(byte_idx));
|
||||||
|
///
|
||||||
|
/// byte_idx = slice.prev_grapheme_boundary(byte_idx);
|
||||||
|
/// assert_eq!(slice.graphemes_at(byte_idx).prev(), Some("😶🌫️".into()));
|
||||||
|
/// byte_idx = slice.prev_grapheme_boundary(byte_idx);
|
||||||
|
/// assert_eq!(slice.graphemes_at(byte_idx).prev(), None);
|
||||||
|
/// assert_eq!(byte_idx, 0);
|
||||||
|
/// ```
|
||||||
|
fn prev_grapheme_boundary(self, byte_idx: usize) -> usize {
|
||||||
|
self.nth_prev_grapheme_boundary(byte_idx, 1)
|
||||||
|
}
|
||||||
|
/// Finds the byte index of the `n`th grapheme cluster before the given `byte_idx`.
|
||||||
|
///
|
||||||
|
/// If there are fewer than `n` grapheme clusters before `byte_idx` in the rope then this
|
||||||
|
/// function returns zero.
|
||||||
|
///
|
||||||
|
/// This is functionally equivalent to calling `prev_grapheme_boundary` `n` times but is more
|
||||||
|
/// efficient.
|
||||||
|
fn nth_prev_grapheme_boundary(self, byte_idx: usize, n: usize) -> usize;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> RopeSliceExt<'a> for RopeSlice<'a> {
|
impl<'a> RopeSliceExt<'a> for RopeSlice<'a> {
|
||||||
|
@ -335,31 +457,111 @@ impl<'a> RopeSliceExt<'a> for RopeSlice<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn graphemes(self) -> RopeGraphemes<'a> {
|
fn graphemes_rev(self) -> RopeGraphemes<'a> {
|
||||||
let mut chunks = self.chunks();
|
self.graphemes_at(self.len_bytes()).reversed()
|
||||||
let first_chunk = chunks.next().unwrap_or("");
|
}
|
||||||
|
|
||||||
|
fn graphemes_at(self, byte_idx: usize) -> RopeGraphemes<'a> {
|
||||||
|
// Bounds check
|
||||||
|
assert!(byte_idx <= self.len_bytes());
|
||||||
|
|
||||||
|
let (mut chunks, chunk_byte_idx, _, _) = self.chunks_at_byte(byte_idx);
|
||||||
|
let current_chunk = chunks.next().unwrap_or("");
|
||||||
|
|
||||||
RopeGraphemes {
|
RopeGraphemes {
|
||||||
text: self,
|
text: self,
|
||||||
chunks,
|
chunks,
|
||||||
cur_chunk: first_chunk,
|
current_chunk,
|
||||||
cur_chunk_start: 0,
|
chunk_byte_idx,
|
||||||
cursor: GraphemeCursor::new(0, self.len_bytes(), true),
|
cursor: GraphemeCursor::new(byte_idx, self.len_bytes(), true),
|
||||||
|
is_reversed: false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn graphemes_rev(self) -> RevRopeGraphemes<'a> {
|
fn grapheme_indices_at(self, byte_idx: usize) -> RopeGraphemeIndices<'a> {
|
||||||
let (mut chunks, mut cur_chunk_start, _, _) = self.chunks_at_byte(self.len_bytes());
|
// Bounds check
|
||||||
chunks.reverse();
|
assert!(byte_idx <= self.len_bytes());
|
||||||
let first_chunk = chunks.next().unwrap_or("");
|
RopeGraphemeIndices {
|
||||||
cur_chunk_start -= first_chunk.len();
|
front_offset: byte_idx,
|
||||||
RevRopeGraphemes {
|
iter: self.graphemes_at(byte_idx),
|
||||||
text: self,
|
is_reversed: false,
|
||||||
chunks,
|
|
||||||
cur_chunk: first_chunk,
|
|
||||||
cur_chunk_start,
|
|
||||||
cursor: GraphemeCursor::new(self.len_bytes(), self.len_bytes(), true),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn nth_next_grapheme_boundary(self, mut byte_idx: usize, n: usize) -> usize {
|
||||||
|
// Bounds check
|
||||||
|
assert!(byte_idx <= self.len_bytes());
|
||||||
|
|
||||||
|
byte_idx = self.floor_char_boundary(byte_idx);
|
||||||
|
|
||||||
|
// Get the chunk with our byte index in it.
|
||||||
|
let (mut chunk, mut chunk_byte_idx, _, _) = self.chunk_at_byte(byte_idx);
|
||||||
|
|
||||||
|
// Set up the grapheme cursor.
|
||||||
|
let mut gc = GraphemeCursor::new(byte_idx, self.len_bytes(), true);
|
||||||
|
|
||||||
|
// Find the nth next grapheme cluster boundary.
|
||||||
|
for _ in 0..n {
|
||||||
|
loop {
|
||||||
|
match gc.next_boundary(chunk, chunk_byte_idx) {
|
||||||
|
Ok(None) => return self.len_bytes(),
|
||||||
|
Ok(Some(boundary)) => {
|
||||||
|
byte_idx = boundary;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
Err(GraphemeIncomplete::NextChunk) => {
|
||||||
|
chunk_byte_idx += chunk.len();
|
||||||
|
let (a, _, _, _) = self.chunk_at_byte(chunk_byte_idx);
|
||||||
|
chunk = a;
|
||||||
|
}
|
||||||
|
Err(GraphemeIncomplete::PreContext(n)) => {
|
||||||
|
let ctx_chunk = self.chunk_at_byte(n - 1).0;
|
||||||
|
gc.provide_context(ctx_chunk, n - ctx_chunk.len());
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
byte_idx
|
||||||
|
}
|
||||||
|
|
||||||
|
fn nth_prev_grapheme_boundary(self, mut byte_idx: usize, n: usize) -> usize {
|
||||||
|
// Bounds check
|
||||||
|
assert!(byte_idx <= self.len_bytes());
|
||||||
|
|
||||||
|
byte_idx = self.ceil_char_boundary(byte_idx);
|
||||||
|
|
||||||
|
// Get the chunk with our byte index in it.
|
||||||
|
let (mut chunk, mut chunk_byte_idx, _, _) = self.chunk_at_byte(byte_idx);
|
||||||
|
|
||||||
|
// Set up the grapheme cursor.
|
||||||
|
let mut gc = GraphemeCursor::new(byte_idx, self.len_bytes(), true);
|
||||||
|
|
||||||
|
for _ in 0..n {
|
||||||
|
loop {
|
||||||
|
match gc.prev_boundary(chunk, chunk_byte_idx) {
|
||||||
|
Ok(None) => return 0,
|
||||||
|
Ok(Some(boundary)) => {
|
||||||
|
byte_idx = boundary;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
Err(GraphemeIncomplete::PrevChunk) => {
|
||||||
|
let (a, b, _, _) = self.chunk_at_byte(chunk_byte_idx - 1);
|
||||||
|
chunk = a;
|
||||||
|
chunk_byte_idx = b;
|
||||||
|
}
|
||||||
|
Err(GraphemeIncomplete::PreContext(n)) => {
|
||||||
|
let ctx_chunk = self.chunk_at_byte(n - 1).0;
|
||||||
|
gc.provide_context(ctx_chunk, n - ctx_chunk.len());
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
byte_idx
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// copied from std
|
// copied from std
|
||||||
|
@ -370,13 +572,19 @@ const fn is_utf8_char_boundary(b: u8) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// An iterator over the graphemes of a `RopeSlice`.
|
/// An iterator over the graphemes of a `RopeSlice`.
|
||||||
|
///
|
||||||
|
/// This iterator is cursor-like: rather than implementing DoubleEndedIterator it can be reversed
|
||||||
|
/// like a cursor. This style matches `Bytes` and `Chars` iterator types in Ropey and is more
|
||||||
|
/// natural and useful for wrapping `GraphemeCursor`.
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct RopeGraphemes<'a> {
|
pub struct RopeGraphemes<'a> {
|
||||||
text: RopeSlice<'a>,
|
text: RopeSlice<'a>,
|
||||||
chunks: Chunks<'a>,
|
chunks: Chunks<'a>,
|
||||||
cur_chunk: &'a str,
|
current_chunk: &'a str,
|
||||||
cur_chunk_start: usize,
|
/// Byte index of the start of the current chunk.
|
||||||
|
chunk_byte_idx: usize,
|
||||||
cursor: GraphemeCursor,
|
cursor: GraphemeCursor,
|
||||||
|
is_reversed: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Debug for RopeGraphemes<'_> {
|
impl fmt::Debug for RopeGraphemes<'_> {
|
||||||
|
@ -384,112 +592,178 @@ impl fmt::Debug for RopeGraphemes<'_> {
|
||||||
f.debug_struct("RopeGraphemes")
|
f.debug_struct("RopeGraphemes")
|
||||||
.field("text", &self.text)
|
.field("text", &self.text)
|
||||||
.field("chunks", &self.chunks)
|
.field("chunks", &self.chunks)
|
||||||
.field("cur_chunk", &self.cur_chunk)
|
.field("current_chunk", &self.current_chunk)
|
||||||
.field("cur_chunk_start", &self.cur_chunk_start)
|
.field("chunk_byte_idx", &self.chunk_byte_idx)
|
||||||
// .field("cursor", &self.cursor)
|
// .field("cursor", &self.cursor)
|
||||||
|
.field("is_reversed", &self.is_reversed)
|
||||||
.finish()
|
.finish()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<'a> RopeGraphemes<'a> {
|
||||||
|
#[allow(clippy::should_implement_trait)]
|
||||||
|
pub fn next(&mut self) -> Option<RopeSlice<'a>> {
|
||||||
|
if self.is_reversed {
|
||||||
|
self.prev_impl()
|
||||||
|
} else {
|
||||||
|
self.next_impl()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn prev(&mut self) -> Option<RopeSlice<'a>> {
|
||||||
|
if self.is_reversed {
|
||||||
|
self.next_impl()
|
||||||
|
} else {
|
||||||
|
self.prev_impl()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn reverse(&mut self) {
|
||||||
|
self.is_reversed = !self.is_reversed;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub fn reversed(mut self) -> Self {
|
||||||
|
self.reverse();
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
fn next_impl(&mut self) -> Option<RopeSlice<'a>> {
|
||||||
|
let a = self.cursor.cur_cursor();
|
||||||
|
let b;
|
||||||
|
loop {
|
||||||
|
match self
|
||||||
|
.cursor
|
||||||
|
.next_boundary(self.current_chunk, self.chunk_byte_idx)
|
||||||
|
{
|
||||||
|
Ok(None) => return None,
|
||||||
|
Ok(Some(boundary)) => {
|
||||||
|
b = boundary;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
Err(GraphemeIncomplete::NextChunk) => {
|
||||||
|
self.chunk_byte_idx += self.current_chunk.len();
|
||||||
|
self.current_chunk = self.chunks.next().unwrap_or("");
|
||||||
|
}
|
||||||
|
Err(GraphemeIncomplete::PreContext(idx)) => {
|
||||||
|
let (chunk, byte_idx, _, _) = self.text.chunk_at_byte(idx.saturating_sub(1));
|
||||||
|
self.cursor.provide_context(chunk, byte_idx);
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if a < self.chunk_byte_idx {
|
||||||
|
Some(self.text.byte_slice(a..b))
|
||||||
|
} else {
|
||||||
|
let a2 = a - self.chunk_byte_idx;
|
||||||
|
let b2 = b - self.chunk_byte_idx;
|
||||||
|
Some((&self.current_chunk[a2..b2]).into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn prev_impl(&mut self) -> Option<RopeSlice<'a>> {
|
||||||
|
let a = self.cursor.cur_cursor();
|
||||||
|
let b;
|
||||||
|
loop {
|
||||||
|
match self
|
||||||
|
.cursor
|
||||||
|
.prev_boundary(self.current_chunk, self.chunk_byte_idx)
|
||||||
|
{
|
||||||
|
Ok(None) => return None,
|
||||||
|
Ok(Some(boundary)) => {
|
||||||
|
b = boundary;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
Err(GraphemeIncomplete::PrevChunk) => {
|
||||||
|
self.current_chunk = self.chunks.prev().unwrap_or("");
|
||||||
|
self.chunk_byte_idx -= self.current_chunk.len();
|
||||||
|
}
|
||||||
|
Err(GraphemeIncomplete::PreContext(idx)) => {
|
||||||
|
let (chunk, byte_idx, _, _) = self.text.chunk_at_byte(idx.saturating_sub(1));
|
||||||
|
self.cursor.provide_context(chunk, byte_idx);
|
||||||
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if a >= self.chunk_byte_idx + self.current_chunk.len() {
|
||||||
|
Some(self.text.byte_slice(b..a))
|
||||||
|
} else {
|
||||||
|
let a2 = a - self.chunk_byte_idx;
|
||||||
|
let b2 = b - self.chunk_byte_idx;
|
||||||
|
Some((&self.current_chunk[b2..a2]).into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<'a> Iterator for RopeGraphemes<'a> {
|
impl<'a> Iterator for RopeGraphemes<'a> {
|
||||||
type Item = RopeSlice<'a>;
|
type Item = RopeSlice<'a>;
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
let a = self.cursor.cur_cursor();
|
RopeGraphemes::next(self)
|
||||||
let b;
|
|
||||||
loop {
|
|
||||||
match self
|
|
||||||
.cursor
|
|
||||||
.next_boundary(self.cur_chunk, self.cur_chunk_start)
|
|
||||||
{
|
|
||||||
Ok(None) => {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
Ok(Some(n)) => {
|
|
||||||
b = n;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
Err(GraphemeIncomplete::NextChunk) => {
|
|
||||||
self.cur_chunk_start += self.cur_chunk.len();
|
|
||||||
self.cur_chunk = self.chunks.next().unwrap_or("");
|
|
||||||
}
|
|
||||||
Err(GraphemeIncomplete::PreContext(idx)) => {
|
|
||||||
let (chunk, byte_idx, _, _) = self.text.chunk_at_byte(idx.saturating_sub(1));
|
|
||||||
self.cursor.provide_context(chunk, byte_idx);
|
|
||||||
}
|
|
||||||
_ => unreachable!(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if a < self.cur_chunk_start {
|
/// An iterator over the grapheme clusters in a rope and the byte indices where each grapheme
|
||||||
Some(self.text.byte_slice(a..b))
|
/// cluster starts.
|
||||||
|
///
|
||||||
|
/// This iterator wraps `RopeGraphemes` and is also cursor-like. Use `reverse` or `reversed` to
|
||||||
|
/// toggle the direction of the iterator. See [RopeGraphemes].
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct RopeGraphemeIndices<'a> {
|
||||||
|
front_offset: usize,
|
||||||
|
iter: RopeGraphemes<'a>,
|
||||||
|
is_reversed: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> RopeGraphemeIndices<'a> {
|
||||||
|
#[allow(clippy::should_implement_trait)]
|
||||||
|
pub fn next(&mut self) -> Option<(usize, RopeSlice<'a>)> {
|
||||||
|
if self.is_reversed {
|
||||||
|
self.prev_impl()
|
||||||
} else {
|
} else {
|
||||||
let a2 = a - self.cur_chunk_start;
|
self.next_impl()
|
||||||
let b2 = b - self.cur_chunk_start;
|
|
||||||
Some((&self.cur_chunk[a2..b2]).into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// An iterator over the graphemes of a `RopeSlice` in reverse.
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct RevRopeGraphemes<'a> {
|
|
||||||
text: RopeSlice<'a>,
|
|
||||||
chunks: Chunks<'a>,
|
|
||||||
cur_chunk: &'a str,
|
|
||||||
cur_chunk_start: usize,
|
|
||||||
cursor: GraphemeCursor,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Debug for RevRopeGraphemes<'_> {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
f.debug_struct("RevRopeGraphemes")
|
|
||||||
.field("text", &self.text)
|
|
||||||
.field("chunks", &self.chunks)
|
|
||||||
.field("cur_chunk", &self.cur_chunk)
|
|
||||||
.field("cur_chunk_start", &self.cur_chunk_start)
|
|
||||||
// .field("cursor", &self.cursor)
|
|
||||||
.finish()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Iterator for RevRopeGraphemes<'a> {
|
pub fn prev(&mut self) -> Option<(usize, RopeSlice<'a>)> {
|
||||||
type Item = RopeSlice<'a>;
|
if self.is_reversed {
|
||||||
|
self.next_impl()
|
||||||
|
} else {
|
||||||
|
self.prev_impl()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn reverse(&mut self) {
|
||||||
|
self.is_reversed = !self.is_reversed;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[must_use]
|
||||||
|
pub fn reversed(mut self) -> Self {
|
||||||
|
self.reverse();
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
fn next_impl(&mut self) -> Option<(usize, RopeSlice<'a>)> {
|
||||||
|
let slice = self.iter.next()?;
|
||||||
|
let idx = self.front_offset;
|
||||||
|
self.front_offset += slice.len_bytes();
|
||||||
|
Some((idx, slice))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn prev_impl(&mut self) -> Option<(usize, RopeSlice<'a>)> {
|
||||||
|
let slice = self.iter.prev()?;
|
||||||
|
self.front_offset -= slice.len_bytes();
|
||||||
|
Some((self.front_offset, slice))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Iterator for RopeGraphemeIndices<'a> {
|
||||||
|
type Item = (usize, RopeSlice<'a>);
|
||||||
|
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
let a = self.cursor.cur_cursor();
|
RopeGraphemeIndices::next(self)
|
||||||
let b;
|
|
||||||
loop {
|
|
||||||
match self
|
|
||||||
.cursor
|
|
||||||
.prev_boundary(self.cur_chunk, self.cur_chunk_start)
|
|
||||||
{
|
|
||||||
Ok(None) => {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
Ok(Some(n)) => {
|
|
||||||
b = n;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
Err(GraphemeIncomplete::PrevChunk) => {
|
|
||||||
self.cur_chunk = self.chunks.next().unwrap_or("");
|
|
||||||
self.cur_chunk_start -= self.cur_chunk.len();
|
|
||||||
}
|
|
||||||
Err(GraphemeIncomplete::PreContext(idx)) => {
|
|
||||||
let (chunk, byte_idx, _, _) = self.text.chunk_at_byte(idx.saturating_sub(1));
|
|
||||||
self.cursor.provide_context(chunk, byte_idx);
|
|
||||||
}
|
|
||||||
_ => unreachable!(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if a >= self.cur_chunk_start + self.cur_chunk.len() {
|
|
||||||
Some(self.text.byte_slice(b..a))
|
|
||||||
} else {
|
|
||||||
let a2 = a - self.cur_chunk_start;
|
|
||||||
let b2 = b - self.cur_chunk_start;
|
|
||||||
Some((&self.cur_chunk[b2..a2]).into())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -378,6 +378,8 @@ impl MappableCommand {
|
||||||
search_selection_detect_word_boundaries, "Use current selection as the search pattern, automatically wrapping with `\\b` on word boundaries",
|
search_selection_detect_word_boundaries, "Use current selection as the search pattern, automatically wrapping with `\\b` on word boundaries",
|
||||||
make_search_word_bounded, "Modify current search to make it word bounded",
|
make_search_word_bounded, "Modify current search to make it word bounded",
|
||||||
global_search, "Global search in workspace folder",
|
global_search, "Global search in workspace folder",
|
||||||
|
local_search_grep, "Local search in buffer",
|
||||||
|
local_search_fuzzy, "Fuzzy local search in buffer",
|
||||||
extend_line, "Select current line, if already selected, extend to another line based on the anchor",
|
extend_line, "Select current line, if already selected, extend to another line based on the anchor",
|
||||||
extend_line_below, "Select current line, if already selected, extend to next line",
|
extend_line_below, "Select current line, if already selected, extend to next line",
|
||||||
extend_line_above, "Select current line, if already selected, extend to previous line",
|
extend_line_above, "Select current line, if already selected, extend to previous line",
|
||||||
|
@ -2668,6 +2670,395 @@ fn global_search(cx: &mut Context) {
|
||||||
cx.push_layer(Box::new(overlaid(picker)));
|
cx.push_layer(Box::new(overlaid(picker)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Local grep search in buffer
|
||||||
|
fn local_search_grep(cx: &mut Context) {
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct FileResult {
|
||||||
|
path: PathBuf,
|
||||||
|
line_num: usize,
|
||||||
|
line_content: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FileResult {
|
||||||
|
fn new(path: &Path, line_num: usize, line_content: String) -> Self {
|
||||||
|
Self {
|
||||||
|
path: path.to_path_buf(),
|
||||||
|
line_num,
|
||||||
|
line_content,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct LocalSearchConfig {
|
||||||
|
smart_case: bool,
|
||||||
|
file_picker_config: helix_view::editor::FilePickerConfig,
|
||||||
|
number_style: Style,
|
||||||
|
}
|
||||||
|
|
||||||
|
let editor_config = cx.editor.config();
|
||||||
|
let config = LocalSearchConfig {
|
||||||
|
smart_case: editor_config.search.smart_case,
|
||||||
|
file_picker_config: editor_config.file_picker.clone(),
|
||||||
|
number_style: cx.editor.theme.get("constant.numeric.integer"),
|
||||||
|
};
|
||||||
|
|
||||||
|
let columns = [
|
||||||
|
PickerColumn::new("line", |item: &FileResult, config: &LocalSearchConfig| {
|
||||||
|
let line_num = (item.line_num + 1).to_string();
|
||||||
|
// files can never contain more than 99_999_999 lines
|
||||||
|
// thus using maximum line length to be 8 for this formatter is valid
|
||||||
|
let max_line_num_length = 8;
|
||||||
|
// whitespace padding to align results after the line number
|
||||||
|
let padding_length = max_line_num_length - line_num.len();
|
||||||
|
let padding = " ".repeat(padding_length);
|
||||||
|
// create column value to be displayed in the picker
|
||||||
|
Cell::from(Spans::from(vec![
|
||||||
|
Span::styled(line_num, config.number_style),
|
||||||
|
Span::raw(padding),
|
||||||
|
]))
|
||||||
|
}),
|
||||||
|
PickerColumn::new("", |item: &FileResult, _config: &LocalSearchConfig| {
|
||||||
|
// extract line content to be displayed in the picker
|
||||||
|
// create column value to be displayed in the picker
|
||||||
|
Cell::from(Spans::from(vec![Span::raw(&item.line_content)]))
|
||||||
|
}),
|
||||||
|
];
|
||||||
|
|
||||||
|
let get_files = |query: &str,
|
||||||
|
editor: &mut Editor,
|
||||||
|
config: std::sync::Arc<LocalSearchConfig>,
|
||||||
|
injector: &ui::picker::Injector<_, _>| {
|
||||||
|
if query.is_empty() {
|
||||||
|
return async { Ok(()) }.boxed();
|
||||||
|
}
|
||||||
|
|
||||||
|
let search_root = helix_stdx::env::current_working_dir();
|
||||||
|
if !search_root.exists() {
|
||||||
|
return async { Err(anyhow::anyhow!("Current working directory does not exist")) }
|
||||||
|
.boxed();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only read the current document (not other documents opened in the buffer)
|
||||||
|
let doc = doc!(editor);
|
||||||
|
let documents = vec![(doc.path().cloned(), doc.text().to_owned())];
|
||||||
|
|
||||||
|
let matcher = match RegexMatcherBuilder::new()
|
||||||
|
.case_smart(config.smart_case)
|
||||||
|
.build(query)
|
||||||
|
{
|
||||||
|
Ok(matcher) => {
|
||||||
|
// Clear any "Failed to compile regex" errors out of the statusline.
|
||||||
|
editor.clear_status();
|
||||||
|
matcher
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
log::info!("Failed to compile search pattern in global search: {}", err);
|
||||||
|
return async { Err(anyhow::anyhow!("Failed to compile regex")) }.boxed();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let dedup_symlinks = config.file_picker_config.deduplicate_links;
|
||||||
|
let absolute_root = search_root
|
||||||
|
.canonicalize()
|
||||||
|
.unwrap_or_else(|_| search_root.clone());
|
||||||
|
|
||||||
|
let injector = injector.clone();
|
||||||
|
async move {
|
||||||
|
let searcher = SearcherBuilder::new()
|
||||||
|
.binary_detection(BinaryDetection::quit(b'\x00'))
|
||||||
|
.build();
|
||||||
|
WalkBuilder::new(search_root)
|
||||||
|
.hidden(config.file_picker_config.hidden)
|
||||||
|
.parents(config.file_picker_config.parents)
|
||||||
|
.ignore(config.file_picker_config.ignore)
|
||||||
|
.follow_links(config.file_picker_config.follow_symlinks)
|
||||||
|
.git_ignore(config.file_picker_config.git_ignore)
|
||||||
|
.git_global(config.file_picker_config.git_global)
|
||||||
|
.git_exclude(config.file_picker_config.git_exclude)
|
||||||
|
.max_depth(config.file_picker_config.max_depth)
|
||||||
|
.filter_entry(move |entry| {
|
||||||
|
filter_picker_entry(entry, &absolute_root, dedup_symlinks)
|
||||||
|
})
|
||||||
|
.add_custom_ignore_filename(helix_loader::config_dir().join("ignore"))
|
||||||
|
.add_custom_ignore_filename(".helix/ignore")
|
||||||
|
.build_parallel()
|
||||||
|
.run(|| {
|
||||||
|
let mut searcher = searcher.clone();
|
||||||
|
let matcher = matcher.clone();
|
||||||
|
let injector = injector.clone();
|
||||||
|
let documents = &documents;
|
||||||
|
Box::new(move |entry: Result<DirEntry, ignore::Error>| -> WalkState {
|
||||||
|
let entry = match entry {
|
||||||
|
Ok(entry) => entry,
|
||||||
|
Err(_) => return WalkState::Continue,
|
||||||
|
};
|
||||||
|
|
||||||
|
match entry.file_type() {
|
||||||
|
Some(entry) if entry.is_file() => {}
|
||||||
|
// skip everything else
|
||||||
|
_ => return WalkState::Continue,
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut stop = false;
|
||||||
|
|
||||||
|
// Maximum line length of the content displayed within the result picker.
|
||||||
|
// User should be allowed to control this to accomodate their monitor width.
|
||||||
|
// TODO: Expose this setting to the user so they can control it.
|
||||||
|
let local_search_result_line_length = 80;
|
||||||
|
|
||||||
|
let sink = sinks::UTF8(|line_num, line_content| {
|
||||||
|
stop = injector
|
||||||
|
.push(FileResult::new(
|
||||||
|
entry.path(),
|
||||||
|
line_num as usize - 1,
|
||||||
|
line_content[0..std::cmp::min(
|
||||||
|
local_search_result_line_length,
|
||||||
|
line_content.len(),
|
||||||
|
)]
|
||||||
|
.to_string(),
|
||||||
|
))
|
||||||
|
.is_err();
|
||||||
|
|
||||||
|
Ok(!stop)
|
||||||
|
});
|
||||||
|
let doc = documents.iter().find(|&(doc_path, _)| {
|
||||||
|
doc_path
|
||||||
|
.as_ref()
|
||||||
|
.is_some_and(|doc_path| doc_path == entry.path())
|
||||||
|
});
|
||||||
|
|
||||||
|
// search in current document
|
||||||
|
let result = if let Some((_, doc)) = doc {
|
||||||
|
// there is already a buffer for this file
|
||||||
|
// search the buffer instead of the file because it's faster
|
||||||
|
// and captures new edits without requiring a save
|
||||||
|
if searcher.multi_line_with_matcher(&matcher) {
|
||||||
|
// in this case a continuous buffer is required
|
||||||
|
// convert the rope to a string
|
||||||
|
let text = doc.to_string();
|
||||||
|
searcher.search_slice(&matcher, text.as_bytes(), sink)
|
||||||
|
} else {
|
||||||
|
searcher.search_reader(
|
||||||
|
&matcher,
|
||||||
|
RopeReader::new(doc.slice(..)),
|
||||||
|
sink,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Note: This is a hack!
|
||||||
|
// We ignore all other files.
|
||||||
|
// We only search an empty string (to satisfy rust's return type).
|
||||||
|
searcher.search_slice(&matcher, "".to_owned().as_bytes(), sink)
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Err(err) = result {
|
||||||
|
log::error!("Local search error: {}, {}", entry.path().display(), err);
|
||||||
|
}
|
||||||
|
if stop {
|
||||||
|
WalkState::Quit
|
||||||
|
} else {
|
||||||
|
WalkState::Continue
|
||||||
|
}
|
||||||
|
})
|
||||||
|
});
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
.boxed()
|
||||||
|
};
|
||||||
|
|
||||||
|
let reg = cx.register.unwrap_or('/');
|
||||||
|
cx.editor.registers.last_search_register = reg;
|
||||||
|
|
||||||
|
let picker = Picker::new(
|
||||||
|
columns,
|
||||||
|
1, // contents
|
||||||
|
[],
|
||||||
|
config,
|
||||||
|
move |cx, FileResult { path, line_num, .. }, action| {
|
||||||
|
let doc = match cx.editor.open(path, action) {
|
||||||
|
Ok(id) => doc_mut!(cx.editor, &id),
|
||||||
|
Err(e) => {
|
||||||
|
cx.editor
|
||||||
|
.set_error(format!("Failed to open file '{}': {}", path.display(), e));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let line_num = *line_num;
|
||||||
|
let view = view_mut!(cx.editor);
|
||||||
|
let text = doc.text();
|
||||||
|
if line_num >= text.len_lines() {
|
||||||
|
cx.editor.set_error(
|
||||||
|
"The line you jumped to does not exist anymore because the file has changed.",
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let start = text.line_to_char(line_num);
|
||||||
|
let end = text.line_to_char((line_num + 1).min(text.len_lines()));
|
||||||
|
|
||||||
|
doc.set_selection(view.id, Selection::single(start, end));
|
||||||
|
if action.align_view(view, doc.id()) {
|
||||||
|
align_view(doc, view, Align::Center);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.with_preview(|_editor, FileResult { path, line_num, .. }| {
|
||||||
|
Some((path.as_path().into(), Some((*line_num, *line_num))))
|
||||||
|
})
|
||||||
|
.with_history_register(Some(reg))
|
||||||
|
.with_dynamic_query(get_files, Some(275));
|
||||||
|
|
||||||
|
cx.push_layer(Box::new(overlaid(picker)));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn local_search_fuzzy(cx: &mut Context) {
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct FileResult {
|
||||||
|
path: std::sync::Arc<PathBuf>,
|
||||||
|
line_num: usize,
|
||||||
|
file_contents_byte_start: usize,
|
||||||
|
file_contents_byte_end: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
struct LocalSearchData {
|
||||||
|
file_contents: String,
|
||||||
|
number_style: Style,
|
||||||
|
}
|
||||||
|
|
||||||
|
let current_document = doc!(cx.editor);
|
||||||
|
let Some(current_document_path) = current_document.path() else {
|
||||||
|
cx.editor.set_error("Failed to get current document path");
|
||||||
|
return;
|
||||||
|
};
|
||||||
|
|
||||||
|
let file_contents = std::fs::read_to_string(current_document_path).unwrap();
|
||||||
|
|
||||||
|
let current_document_path = std::sync::Arc::new(current_document_path.clone());
|
||||||
|
|
||||||
|
let file_results: Vec<FileResult> = file_contents
|
||||||
|
.lines()
|
||||||
|
.enumerate()
|
||||||
|
.filter_map(|(line_num, line)| {
|
||||||
|
if !line.trim().is_empty() {
|
||||||
|
// SAFETY: The offsets will be used to index back into the original `file_contents` String
|
||||||
|
// as a byte slice. Since the `file_contents` will be moved into the `Picker` as part of
|
||||||
|
// `editor_data`, we know that the `Picker` will take ownership of the underlying String,
|
||||||
|
// so it will be valid for displaying the `Span` as long as the user uses the `Picker`
|
||||||
|
// (the `Picker` gets dropped only when a new `Picker` is created). Furthermore, the
|
||||||
|
// process of reconstructing a `&str` back requires that we have access to the original
|
||||||
|
// `String` anyways so we can index into it, as is the case when we construct the `Span`
|
||||||
|
// when creating the `PickerColumn`s, so we know that we are returning the correct
|
||||||
|
// substring from the original `file_contents`.
|
||||||
|
// In fact, since we only store offsets, and accessing them from safe rust, there is
|
||||||
|
// no risk of memory safety (like our &str not living long enough). The only real
|
||||||
|
// bug would be moving out the original underlying `String` (which we obviously
|
||||||
|
// don't do). This would lead to an out of bounds crash in the `PickerColumn` function
|
||||||
|
// call, or a crash when we recreate back the &str if the new underlying `String`
|
||||||
|
// makes it so that our byte offsets index into the middle of a Unicode grapheme cluster.
|
||||||
|
// Last but not least, it could make it so that we do display the lines correctly,
|
||||||
|
// but these are from a different underlying `String` than the original, which would be
|
||||||
|
// different from the lines in the current buffer.
|
||||||
|
let beg =
|
||||||
|
unsafe { line.as_ptr().byte_offset_from(file_contents.as_ptr()) } as usize;
|
||||||
|
let end = beg + line.len();
|
||||||
|
let result = FileResult {
|
||||||
|
path: current_document_path.clone(),
|
||||||
|
line_num,
|
||||||
|
file_contents_byte_start: beg,
|
||||||
|
file_contents_byte_end: end,
|
||||||
|
};
|
||||||
|
Some(result)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let config = LocalSearchData {
|
||||||
|
number_style: cx.editor.theme.get("constant.numeric.integer"),
|
||||||
|
file_contents,
|
||||||
|
};
|
||||||
|
|
||||||
|
let columns = [
|
||||||
|
PickerColumn::new("line", |item: &FileResult, config: &LocalSearchData| {
|
||||||
|
let line_num = (item.line_num + 1).to_string();
|
||||||
|
// files can never contain more than 99_999_999 lines
|
||||||
|
// thus using maximum line length to be 8 for this formatter is valid
|
||||||
|
let max_line_num_length = 8;
|
||||||
|
// whitespace padding to align results after the line number
|
||||||
|
let padding_length = max_line_num_length - line_num.len();
|
||||||
|
let padding = " ".repeat(padding_length);
|
||||||
|
// create column value to be displayed in the picker
|
||||||
|
Cell::from(Spans::from(vec![
|
||||||
|
Span::styled(line_num, config.number_style),
|
||||||
|
Span::raw(padding),
|
||||||
|
]))
|
||||||
|
}),
|
||||||
|
PickerColumn::new("", |item: &FileResult, config: &LocalSearchData| {
|
||||||
|
// extract line content to be displayed in the picker
|
||||||
|
let slice = &config.file_contents.as_bytes()
|
||||||
|
[item.file_contents_byte_start..item.file_contents_byte_end];
|
||||||
|
let content = std::str::from_utf8(slice).unwrap();
|
||||||
|
// create column value to be displayed in the picker
|
||||||
|
Cell::from(Spans::from(vec![Span::raw(content)]))
|
||||||
|
}),
|
||||||
|
];
|
||||||
|
|
||||||
|
let reg = cx.register.unwrap_or('/');
|
||||||
|
cx.editor.registers.last_search_register = reg;
|
||||||
|
|
||||||
|
let picker = Picker::new(
|
||||||
|
columns,
|
||||||
|
1, // contents
|
||||||
|
[],
|
||||||
|
config,
|
||||||
|
move |cx, FileResult { path, line_num, .. }, action| {
|
||||||
|
let doc = match cx.editor.open(path, action) {
|
||||||
|
Ok(id) => doc_mut!(cx.editor, &id),
|
||||||
|
Err(e) => {
|
||||||
|
cx.editor
|
||||||
|
.set_error(format!("Failed to open file '{}': {}", path.display(), e));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let line_num = *line_num;
|
||||||
|
let view = view_mut!(cx.editor);
|
||||||
|
let text = doc.text();
|
||||||
|
if line_num >= text.len_lines() {
|
||||||
|
cx.editor.set_error(
|
||||||
|
"The line you jumped to does not exist anymore because the file has changed.",
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let start = text.line_to_char(line_num);
|
||||||
|
let end = text.line_to_char((line_num + 1).min(text.len_lines()));
|
||||||
|
|
||||||
|
doc.set_selection(view.id, Selection::single(start, end));
|
||||||
|
if action.align_view(view, doc.id()) {
|
||||||
|
align_view(doc, view, Align::Center);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.with_preview(|_editor, FileResult { path, line_num, .. }| {
|
||||||
|
Some((path.as_path().into(), Some((*line_num, *line_num))))
|
||||||
|
})
|
||||||
|
.with_history_register(Some(reg));
|
||||||
|
|
||||||
|
let injector = picker.injector();
|
||||||
|
let timeout = std::time::Instant::now() + std::time::Duration::from_millis(30);
|
||||||
|
for file_result in file_results {
|
||||||
|
if injector.push(file_result).is_err() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if std::time::Instant::now() >= timeout {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
cx.push_layer(Box::new(overlaid(picker)));
|
||||||
|
}
|
||||||
|
|
||||||
enum Extend {
|
enum Extend {
|
||||||
Above,
|
Above,
|
||||||
Below,
|
Below,
|
||||||
|
|
|
@ -283,6 +283,8 @@ pub fn default() -> HashMap<Mode, KeyTrie> {
|
||||||
"P" => paste_clipboard_before,
|
"P" => paste_clipboard_before,
|
||||||
"R" => replace_selections_with_clipboard,
|
"R" => replace_selections_with_clipboard,
|
||||||
"/" => global_search,
|
"/" => global_search,
|
||||||
|
"l" => local_search_grep,
|
||||||
|
"L" => local_search_fuzzy,
|
||||||
"k" => hover,
|
"k" => hover,
|
||||||
"r" => rename_symbol,
|
"r" => rename_symbol,
|
||||||
"h" => select_references_to_symbol_under_cursor,
|
"h" => select_references_to_symbol_under_cursor,
|
||||||
|
|
|
@ -65,7 +65,7 @@
|
||||||
] @punctuation
|
] @punctuation
|
||||||
|
|
||||||
(string_value) @string
|
(string_value) @string
|
||||||
((color_value) "#") @string.special
|
(color_value "#" @string.special)
|
||||||
(color_value) @string.special
|
(color_value) @string.special
|
||||||
|
|
||||||
(integer_value) @constant.numeric.integer
|
(integer_value) @constant.numeric.integer
|
||||||
|
|
Loading…
Reference in New Issue