mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-26 04:53:34 +00:00
Refactor TokenBuffer for reduc cloning
This commit is contained in:
parent
f1ffd14922
commit
af3d75ad2e
5 changed files with 99 additions and 53 deletions
|
@ -58,7 +58,7 @@ impl ProcMacroExpander {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn eat_punct(cursor: &mut Cursor, c: char) -> bool {
|
fn eat_punct(cursor: &mut Cursor, c: char) -> bool {
|
||||||
if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = cursor.token_tree() {
|
if let Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(punct), _)) = cursor.token_tree() {
|
||||||
if punct.char == c {
|
if punct.char == c {
|
||||||
*cursor = cursor.bump();
|
*cursor = cursor.bump();
|
||||||
return true;
|
return true;
|
||||||
|
@ -68,7 +68,7 @@ fn eat_punct(cursor: &mut Cursor, c: char) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn eat_subtree(cursor: &mut Cursor, kind: tt::DelimiterKind) -> bool {
|
fn eat_subtree(cursor: &mut Cursor, kind: tt::DelimiterKind) -> bool {
|
||||||
if let Some(tt::TokenTree::Subtree(subtree)) = cursor.token_tree() {
|
if let Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) = cursor.token_tree() {
|
||||||
if Some(kind) == subtree.delimiter_kind() {
|
if Some(kind) == subtree.delimiter_kind() {
|
||||||
*cursor = cursor.bump_subtree();
|
*cursor = cursor.bump_subtree();
|
||||||
return true;
|
return true;
|
||||||
|
@ -78,7 +78,7 @@ fn eat_subtree(cursor: &mut Cursor, kind: tt::DelimiterKind) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn eat_ident(cursor: &mut Cursor, t: &str) -> bool {
|
fn eat_ident(cursor: &mut Cursor, t: &str) -> bool {
|
||||||
if let Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) = cursor.token_tree() {
|
if let Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Ident(ident), _)) = cursor.token_tree() {
|
||||||
if t == ident.text.as_str() {
|
if t == ident.text.as_str() {
|
||||||
*cursor = cursor.bump();
|
*cursor = cursor.bump();
|
||||||
return true;
|
return true;
|
||||||
|
@ -88,7 +88,7 @@ fn eat_ident(cursor: &mut Cursor, t: &str) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn remove_derive_attrs(tt: &tt::Subtree) -> Option<tt::Subtree> {
|
fn remove_derive_attrs(tt: &tt::Subtree) -> Option<tt::Subtree> {
|
||||||
let buffer = TokenBuffer::new(&tt.token_trees);
|
let buffer = TokenBuffer::from_tokens(&tt.token_trees);
|
||||||
let mut p = buffer.begin();
|
let mut p = buffer.begin();
|
||||||
let mut result = tt::Subtree::default();
|
let mut result = tt::Subtree::default();
|
||||||
|
|
||||||
|
@ -106,7 +106,7 @@ fn remove_derive_attrs(tt: &tt::Subtree) -> Option<tt::Subtree> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
result.token_trees.push(curr.token_tree()?.clone());
|
result.token_trees.push(curr.token_tree()?.cloned());
|
||||||
p = curr.bump();
|
p = curr.bump();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -309,7 +309,7 @@ impl<'a> TtIter<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let buffer = TokenBuffer::new(&self.inner.as_slice());
|
let buffer = TokenBuffer::from_tokens(&self.inner.as_slice());
|
||||||
let mut src = SubtreeTokenSource::new(&buffer);
|
let mut src = SubtreeTokenSource::new(&buffer);
|
||||||
let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false };
|
let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false };
|
||||||
|
|
||||||
|
@ -336,11 +336,11 @@ impl<'a> TtIter<'a> {
|
||||||
err = Some(err!("no tokens consumed"));
|
err = Some(err!("no tokens consumed"));
|
||||||
}
|
}
|
||||||
let res = match res.len() {
|
let res = match res.len() {
|
||||||
1 => Some(res[0].clone()),
|
1 => Some(res[0].cloned()),
|
||||||
0 => None,
|
0 => None,
|
||||||
_ => Some(tt::TokenTree::Subtree(tt::Subtree {
|
_ => Some(tt::TokenTree::Subtree(tt::Subtree {
|
||||||
delimiter: None,
|
delimiter: None,
|
||||||
token_trees: res.into_iter().cloned().collect(),
|
token_trees: res.into_iter().map(|it| it.cloned()).collect(),
|
||||||
})),
|
})),
|
||||||
};
|
};
|
||||||
ExpandResult { value: res, err }
|
ExpandResult { value: res, err }
|
||||||
|
|
|
@ -53,10 +53,12 @@ impl<'a> SubtreeTokenSource<'a> {
|
||||||
fn is_lifetime(c: Cursor) -> Option<(Cursor, SmolStr)> {
|
fn is_lifetime(c: Cursor) -> Option<(Cursor, SmolStr)> {
|
||||||
let tkn = c.token_tree();
|
let tkn = c.token_tree();
|
||||||
|
|
||||||
if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = tkn {
|
if let Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(punct), _)) = tkn {
|
||||||
if punct.char == '\'' {
|
if punct.char == '\'' {
|
||||||
let next = c.bump();
|
let next = c.bump();
|
||||||
if let Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) = next.token_tree() {
|
if let Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Ident(ident), _)) =
|
||||||
|
next.token_tree()
|
||||||
|
{
|
||||||
let res_cursor = next.bump();
|
let res_cursor = next.bump();
|
||||||
let text = SmolStr::new("'".to_string() + &ident.to_string());
|
let text = SmolStr::new("'".to_string() + &ident.to_string());
|
||||||
|
|
||||||
|
@ -94,11 +96,11 @@ impl<'a> SubtreeTokenSource<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
match cursor.token_tree() {
|
match cursor.token_tree() {
|
||||||
Some(tt::TokenTree::Leaf(leaf)) => {
|
Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => {
|
||||||
cached.push(Some(convert_leaf(&leaf)));
|
cached.push(Some(convert_leaf(&leaf)));
|
||||||
self.cached_cursor.set(cursor.bump());
|
self.cached_cursor.set(cursor.bump());
|
||||||
}
|
}
|
||||||
Some(tt::TokenTree::Subtree(subtree)) => {
|
Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
|
||||||
self.cached_cursor.set(cursor.subtree().unwrap());
|
self.cached_cursor.set(cursor.subtree().unwrap());
|
||||||
cached.push(Some(convert_delim(subtree.delimiter_kind(), false)));
|
cached.push(Some(convert_delim(subtree.delimiter_kind(), false)));
|
||||||
}
|
}
|
||||||
|
|
|
@ -70,15 +70,12 @@ pub fn token_tree_to_syntax_node(
|
||||||
tt: &tt::Subtree,
|
tt: &tt::Subtree,
|
||||||
fragment_kind: FragmentKind,
|
fragment_kind: FragmentKind,
|
||||||
) -> Result<(Parse<SyntaxNode>, TokenMap), ExpandError> {
|
) -> Result<(Parse<SyntaxNode>, TokenMap), ExpandError> {
|
||||||
let tmp;
|
let buffer = match tt {
|
||||||
let tokens = match tt {
|
tt::Subtree { delimiter: None, token_trees } => {
|
||||||
tt::Subtree { delimiter: None, token_trees } => token_trees.as_slice(),
|
TokenBuffer::from_tokens(token_trees.as_slice())
|
||||||
_ => {
|
|
||||||
tmp = [tt.clone().into()];
|
|
||||||
&tmp[..]
|
|
||||||
}
|
}
|
||||||
|
_ => TokenBuffer::from_subtree(tt),
|
||||||
};
|
};
|
||||||
let buffer = TokenBuffer::new(&tokens);
|
|
||||||
let mut token_source = SubtreeTokenSource::new(&buffer);
|
let mut token_source = SubtreeTokenSource::new(&buffer);
|
||||||
let mut tree_sink = TtTreeSink::new(buffer.begin());
|
let mut tree_sink = TtTreeSink::new(buffer.begin());
|
||||||
parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind);
|
parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind);
|
||||||
|
@ -631,7 +628,7 @@ impl<'a> TreeSink for TtTreeSink<'a> {
|
||||||
}
|
}
|
||||||
last = self.cursor;
|
last = self.cursor;
|
||||||
let text: &str = match self.cursor.token_tree() {
|
let text: &str = match self.cursor.token_tree() {
|
||||||
Some(tt::TokenTree::Leaf(leaf)) => {
|
Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => {
|
||||||
// Mark the range if needed
|
// Mark the range if needed
|
||||||
let (text, id) = match leaf {
|
let (text, id) = match leaf {
|
||||||
tt::Leaf::Ident(ident) => (&ident.text, ident.id),
|
tt::Leaf::Ident(ident) => (&ident.text, ident.id),
|
||||||
|
@ -650,7 +647,7 @@ impl<'a> TreeSink for TtTreeSink<'a> {
|
||||||
self.cursor = self.cursor.bump();
|
self.cursor = self.cursor.bump();
|
||||||
text
|
text
|
||||||
}
|
}
|
||||||
Some(tt::TokenTree::Subtree(subtree)) => {
|
Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
|
||||||
self.cursor = self.cursor.subtree().unwrap();
|
self.cursor = self.cursor.subtree().unwrap();
|
||||||
if let Some(id) = subtree.delimiter.map(|it| it.id) {
|
if let Some(id) = subtree.delimiter.map(|it| it.id) {
|
||||||
self.open_delims.insert(id, self.text_pos);
|
self.open_delims.insert(id, self.text_pos);
|
||||||
|
@ -684,8 +681,8 @@ impl<'a> TreeSink for TtTreeSink<'a> {
|
||||||
// Add whitespace between adjoint puncts
|
// Add whitespace between adjoint puncts
|
||||||
let next = last.bump();
|
let next = last.bump();
|
||||||
if let (
|
if let (
|
||||||
Some(tt::TokenTree::Leaf(tt::Leaf::Punct(curr))),
|
Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(curr), _)),
|
||||||
Some(tt::TokenTree::Leaf(tt::Leaf::Punct(_))),
|
Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(_), _)),
|
||||||
) = (last.token_tree(), next.token_tree())
|
) = (last.token_tree(), next.token_tree())
|
||||||
{
|
{
|
||||||
// Note: We always assume the semi-colon would be the last token in
|
// Note: We always assume the semi-colon would be the last token in
|
||||||
|
@ -744,7 +741,7 @@ mod tests {
|
||||||
)
|
)
|
||||||
.expand_tt("literals!(foo);");
|
.expand_tt("literals!(foo);");
|
||||||
let tts = &[expansion.into()];
|
let tts = &[expansion.into()];
|
||||||
let buffer = tt::buffer::TokenBuffer::new(tts);
|
let buffer = tt::buffer::TokenBuffer::from_tokens(tts);
|
||||||
let mut tt_src = SubtreeTokenSource::new(&buffer);
|
let mut tt_src = SubtreeTokenSource::new(&buffer);
|
||||||
let mut tokens = vec![];
|
let mut tokens = vec![];
|
||||||
while tt_src.current().kind != EOF {
|
while tt_src.current().kind != EOF {
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
//! FIXME: write short doc here
|
//! FIXME: write short doc here
|
||||||
|
|
||||||
use crate::{Subtree, TokenTree};
|
use crate::{Leaf, Subtree, TokenTree};
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||||
struct EntryId(usize);
|
struct EntryId(usize);
|
||||||
|
@ -13,7 +13,7 @@ struct EntryPtr(EntryId, usize);
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
enum Entry<'t> {
|
enum Entry<'t> {
|
||||||
// Mimicking types from proc-macro.
|
// Mimicking types from proc-macro.
|
||||||
Subtree(&'t TokenTree, EntryId),
|
Subtree(Option<&'t TokenTree>, &'t Subtree, EntryId),
|
||||||
Leaf(&'t TokenTree),
|
Leaf(&'t TokenTree),
|
||||||
// End entries contain a pointer to the entry from the containing
|
// End entries contain a pointer to the entry from the containing
|
||||||
// token tree, or None if this is the outermost level.
|
// token tree, or None if this is the outermost level.
|
||||||
|
@ -27,37 +27,64 @@ pub struct TokenBuffer<'t> {
|
||||||
buffers: Vec<Box<[Entry<'t>]>>,
|
buffers: Vec<Box<[Entry<'t>]>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'t> TokenBuffer<'t> {
|
trait TokenList<'a> {
|
||||||
pub fn new(tokens: &'t [TokenTree]) -> TokenBuffer<'t> {
|
fn entries(&self) -> (Vec<(usize, (&'a Subtree, Option<&'a TokenTree>))>, Vec<Entry<'a>>);
|
||||||
let mut buffers = vec![];
|
}
|
||||||
|
|
||||||
let idx = TokenBuffer::new_inner(tokens, &mut buffers, None);
|
impl<'a> TokenList<'a> for &'a [TokenTree] {
|
||||||
assert_eq!(idx, 0);
|
fn entries(&self) -> (Vec<(usize, (&'a Subtree, Option<&'a TokenTree>))>, Vec<Entry<'a>>) {
|
||||||
|
|
||||||
TokenBuffer { buffers }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn new_inner(
|
|
||||||
tokens: &'t [TokenTree],
|
|
||||||
buffers: &mut Vec<Box<[Entry<'t>]>>,
|
|
||||||
next: Option<EntryPtr>,
|
|
||||||
) -> usize {
|
|
||||||
// Must contain everything in tokens and then the Entry::End
|
// Must contain everything in tokens and then the Entry::End
|
||||||
let start_capacity = tokens.len() + 1;
|
let start_capacity = self.len() + 1;
|
||||||
let mut entries = Vec::with_capacity(start_capacity);
|
let mut entries = Vec::with_capacity(start_capacity);
|
||||||
let mut children = vec![];
|
let mut children = vec![];
|
||||||
|
for (idx, tt) in self.iter().enumerate() {
|
||||||
for (idx, tt) in tokens.iter().enumerate() {
|
|
||||||
match tt {
|
match tt {
|
||||||
TokenTree::Leaf(_) => {
|
TokenTree::Leaf(_) => {
|
||||||
entries.push(Entry::Leaf(tt));
|
entries.push(Entry::Leaf(tt));
|
||||||
}
|
}
|
||||||
TokenTree::Subtree(subtree) => {
|
TokenTree::Subtree(subtree) => {
|
||||||
entries.push(Entry::End(None));
|
entries.push(Entry::End(None));
|
||||||
children.push((idx, (subtree, tt)));
|
children.push((idx, (subtree, Some(tt))));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
(children, entries)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> TokenList<'a> for &'a Subtree {
|
||||||
|
fn entries(&self) -> (Vec<(usize, (&'a Subtree, Option<&'a TokenTree>))>, Vec<Entry<'a>>) {
|
||||||
|
// Must contain everything in tokens and then the Entry::End
|
||||||
|
let mut entries = vec![];
|
||||||
|
let mut children = vec![];
|
||||||
|
entries.push(Entry::End(None));
|
||||||
|
children.push((0usize, (*self, None)));
|
||||||
|
(children, entries)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'t> TokenBuffer<'t> {
|
||||||
|
pub fn from_tokens(tokens: &'t [TokenTree]) -> TokenBuffer<'t> {
|
||||||
|
Self::new(tokens)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_subtree(subtree: &'t Subtree) -> TokenBuffer<'t> {
|
||||||
|
Self::new(subtree)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn new<T: TokenList<'t>>(tokens: T) -> TokenBuffer<'t> {
|
||||||
|
let mut buffers = vec![];
|
||||||
|
let idx = TokenBuffer::new_inner(tokens, &mut buffers, None);
|
||||||
|
assert_eq!(idx, 0);
|
||||||
|
TokenBuffer { buffers }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn new_inner<T: TokenList<'t>>(
|
||||||
|
tokens: T,
|
||||||
|
buffers: &mut Vec<Box<[Entry<'t>]>>,
|
||||||
|
next: Option<EntryPtr>,
|
||||||
|
) -> usize {
|
||||||
|
let (children, mut entries) = tokens.entries();
|
||||||
|
|
||||||
entries.push(Entry::End(next));
|
entries.push(Entry::End(next));
|
||||||
let res = buffers.len();
|
let res = buffers.len();
|
||||||
|
@ -65,11 +92,11 @@ impl<'t> TokenBuffer<'t> {
|
||||||
|
|
||||||
for (child_idx, (subtree, tt)) in children {
|
for (child_idx, (subtree, tt)) in children {
|
||||||
let idx = TokenBuffer::new_inner(
|
let idx = TokenBuffer::new_inner(
|
||||||
&subtree.token_trees,
|
subtree.token_trees.as_slice(),
|
||||||
buffers,
|
buffers,
|
||||||
Some(EntryPtr(EntryId(res), child_idx + 1)),
|
Some(EntryPtr(EntryId(res), child_idx + 1)),
|
||||||
);
|
);
|
||||||
buffers[res].as_mut()[child_idx] = Entry::Subtree(tt, EntryId(idx));
|
buffers[res].as_mut()[child_idx] = Entry::Subtree(tt, subtree, EntryId(idx));
|
||||||
}
|
}
|
||||||
|
|
||||||
res
|
res
|
||||||
|
@ -87,6 +114,24 @@ impl<'t> TokenBuffer<'t> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum TokenTreeRef<'a> {
|
||||||
|
Subtree(&'a Subtree, Option<&'a TokenTree>),
|
||||||
|
Leaf(&'a Leaf, &'a TokenTree),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> TokenTreeRef<'a> {
|
||||||
|
pub fn cloned(&self) -> TokenTree {
|
||||||
|
match &self {
|
||||||
|
TokenTreeRef::Subtree(subtree, tt) => match tt {
|
||||||
|
Some(it) => (*it).clone(),
|
||||||
|
None => (*subtree).clone().into(),
|
||||||
|
},
|
||||||
|
TokenTreeRef::Leaf(_, tt) => (*tt).clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// A safe version of `Cursor` from `syn` crate https://github.com/dtolnay/syn/blob/6533607f91686545cb034d2838beea338d9d0742/src/buffer.rs#L125
|
/// A safe version of `Cursor` from `syn` crate https://github.com/dtolnay/syn/blob/6533607f91686545cb034d2838beea338d9d0742/src/buffer.rs#L125
|
||||||
#[derive(Copy, Clone, Debug)]
|
#[derive(Copy, Clone, Debug)]
|
||||||
pub struct Cursor<'a> {
|
pub struct Cursor<'a> {
|
||||||
|
@ -114,12 +159,11 @@ impl<'a> Cursor<'a> {
|
||||||
match self.entry() {
|
match self.entry() {
|
||||||
Some(Entry::End(Some(ptr))) => {
|
Some(Entry::End(Some(ptr))) => {
|
||||||
let idx = ptr.1;
|
let idx = ptr.1;
|
||||||
if let Some(Entry::Subtree(TokenTree::Subtree(subtree), _)) =
|
if let Some(Entry::Subtree(_, subtree, _)) =
|
||||||
self.buffer.entry(&EntryPtr(ptr.0, idx - 1))
|
self.buffer.entry(&EntryPtr(ptr.0, idx - 1))
|
||||||
{
|
{
|
||||||
return Some(subtree);
|
return Some(subtree);
|
||||||
}
|
}
|
||||||
|
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
|
@ -134,7 +178,7 @@ impl<'a> Cursor<'a> {
|
||||||
/// a cursor into that subtree
|
/// a cursor into that subtree
|
||||||
pub fn subtree(self) -> Option<Cursor<'a>> {
|
pub fn subtree(self) -> Option<Cursor<'a>> {
|
||||||
match self.entry() {
|
match self.entry() {
|
||||||
Some(Entry::Subtree(_, entry_id)) => {
|
Some(Entry::Subtree(_, _, entry_id)) => {
|
||||||
Some(Cursor::create(self.buffer, EntryPtr(*entry_id, 0)))
|
Some(Cursor::create(self.buffer, EntryPtr(*entry_id, 0)))
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
|
@ -142,10 +186,13 @@ impl<'a> Cursor<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// If the cursor is pointing at a `TokenTree`, returns it
|
/// If the cursor is pointing at a `TokenTree`, returns it
|
||||||
pub fn token_tree(self) -> Option<&'a TokenTree> {
|
pub fn token_tree(self) -> Option<TokenTreeRef<'a>> {
|
||||||
match self.entry() {
|
match self.entry() {
|
||||||
Some(Entry::Leaf(tt)) => Some(tt),
|
Some(Entry::Leaf(tt)) => match tt {
|
||||||
Some(Entry::Subtree(tt, _)) => Some(tt),
|
TokenTree::Leaf(leaf) => Some(TokenTreeRef::Leaf(leaf, *tt)),
|
||||||
|
TokenTree::Subtree(subtree) => Some(TokenTreeRef::Subtree(subtree, Some(tt))),
|
||||||
|
},
|
||||||
|
Some(Entry::Subtree(tt, subtree, _)) => Some(TokenTreeRef::Subtree(subtree, *tt)),
|
||||||
Some(Entry::End(_)) => None,
|
Some(Entry::End(_)) => None,
|
||||||
None => None,
|
None => None,
|
||||||
}
|
}
|
||||||
|
@ -172,7 +219,7 @@ impl<'a> Cursor<'a> {
|
||||||
/// a cursor into that subtree
|
/// a cursor into that subtree
|
||||||
pub fn bump_subtree(self) -> Cursor<'a> {
|
pub fn bump_subtree(self) -> Cursor<'a> {
|
||||||
match self.entry() {
|
match self.entry() {
|
||||||
Some(Entry::Subtree(_, _)) => self.subtree().unwrap(),
|
Some(Entry::Subtree(_, _, _)) => self.subtree().unwrap(),
|
||||||
_ => self.bump(),
|
_ => self.bump(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue