Add handling token seperator in mbe

This commit is contained in:
Edwin Cheng 2019-04-24 23:01:32 +08:00
parent dfab545d5d
commit 299d97b6d9
8 changed files with 219 additions and 226 deletions

1
Cargo.lock generated
View file

@ -1063,6 +1063,7 @@ dependencies = [
"ra_syntax 0.1.0", "ra_syntax 0.1.0",
"ra_tt 0.1.0", "ra_tt 0.1.0",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"smallvec 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]

View file

@ -10,3 +10,4 @@ ra_parser = { path = "../ra_parser" }
tt = { path = "../ra_tt", package = "ra_tt" } tt = { path = "../ra_tt", package = "ra_tt" }
itertools = "0.8.0" itertools = "0.8.0"
rustc-hash = "1.0.0" rustc-hash = "1.0.0"
smallvec = "0.6.9"

View file

@ -24,6 +24,7 @@ mod subtree_source;
mod subtree_parser; mod subtree_parser;
use ra_syntax::SmolStr; use ra_syntax::SmolStr;
use smallvec::SmallVec;
pub use tt::{Delimiter, Punct}; pub use tt::{Delimiter, Punct};
@ -98,11 +99,18 @@ pub(crate) struct Subtree {
pub(crate) token_trees: Vec<TokenTree>, pub(crate) token_trees: Vec<TokenTree>,
} }
#[derive(Clone, Debug, PartialEq, Eq)]
pub(crate) enum Separator {
Literal(tt::Literal),
Ident(tt::Ident),
Puncts(SmallVec<[tt::Punct; 3]>),
}
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
pub(crate) struct Repeat { pub(crate) struct Repeat {
pub(crate) subtree: Subtree, pub(crate) subtree: Subtree,
pub(crate) kind: RepeatKind, pub(crate) kind: RepeatKind,
pub(crate) separator: Option<char>, pub(crate) separator: Option<Separator>,
} }
#[derive(Clone, Debug, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
@ -175,8 +183,8 @@ impl_froms!(TokenTree: Leaf, Subtree);
let expansion = rules.expand(&invocation_tt).unwrap(); let expansion = rules.expand(&invocation_tt).unwrap();
assert_eq!( assert_eq!(
expansion.to_string(), expansion.to_string(),
"impl From < Leaf > for TokenTree {fn from (it : Leaf) -> TokenTree {TokenTree :: Leaf (it)}} \ "impl From <Leaf > for TokenTree {fn from (it : Leaf) -> TokenTree {TokenTree ::Leaf (it)}} \
impl From < Subtree > for TokenTree {fn from (it : Subtree) -> TokenTree {TokenTree :: Subtree (it)}}" impl From <Subtree > for TokenTree {fn from (it : Subtree) -> TokenTree {TokenTree ::Subtree (it)}}"
) )
} }
@ -384,7 +392,7 @@ impl_froms!(TokenTree: Leaf, Subtree);
"#, "#,
); );
assert_expansion(&rules, "foo! { foo, bar }", "fn baz {foo () ; bar ()}"); assert_expansion(&rules, "foo! { foo, bar }", "fn baz {foo () ;bar ()}");
} }
#[test] #[test]
@ -416,6 +424,18 @@ impl_froms!(TokenTree: Leaf, Subtree);
assert_expansion(&rules, "foo! {fn baz {a b} }", "fn baz () {a () ; b () ;}"); assert_expansion(&rules, "foo! {fn baz {a b} }", "fn baz () {a () ; b () ;}");
} }
#[test]
fn test_match_group_with_multichar_sep() {
let rules = create_rules(
r#"
macro_rules! foo {
(fn $name:ident {$($i:literal)*} ) => ( fn $name() -> bool { $($i)&&*} );
}"#,
);
assert_expansion(&rules, "foo! (fn baz {true true} )", "fn baz () -> bool {true &&true}");
}
#[test] #[test]
fn test_expand_to_item_list() { fn test_expand_to_item_list() {
let rules = create_rules( let rules = create_rules(
@ -597,7 +617,7 @@ MACRO_ITEMS@[0; 40)
assert_expansion( assert_expansion(
&rules, &rules,
"foo! { bar::<u8>::baz::<u8> }", "foo! { bar::<u8>::baz::<u8> }",
"fn foo () {let a = bar :: < u8 > :: baz :: < u8 > ;}", "fn foo () {let a = bar ::< u8 >:: baz ::< u8 > ;}",
); );
} }
@ -891,7 +911,7 @@ MACRO_ITEMS@[0; 40)
} }
"#, "#,
); );
assert_expansion(&rules, r#"foo!{'a}"#, r#"struct Ref < 'a > {s : & 'a str}"#); assert_expansion(&rules, r#"foo!{'a}"#, r#"struct Ref <'a > {s : &'a str}"#);
} }
#[test] #[test]
@ -1063,7 +1083,7 @@ macro_rules! int_base {
); );
assert_expansion(&rules, r#" int_base!{Binary for isize as usize -> Binary}"#, assert_expansion(&rules, r#" int_base!{Binary for isize as usize -> Binary}"#,
"# [stable (feature = \"rust1\" , since = \"1.0.0\")] impl fmt :: Binary for isize {fn fmt (& self , f : & mut fmt :: Formatter < \'_ >) -> fmt :: Result {Binary . fmt_int (* self as usize , f)}}" "# [stable (feature = \"rust1\" , since = \"1.0.0\")] impl fmt ::Binary for isize {fn fmt (& self , f : & mut fmt :: Formatter < \'_ >) -> fmt :: Result {Binary . fmt_int (* self as usize , f)}}"
); );
} }
@ -1140,186 +1160,4 @@ impl_fn_for_zst ! {
assert_expansion(&rules, r#"impl_nonzero_fmt ! { # [ stable ( feature = "nonzero" , since = "1.28.0" ) ] ( Debug , Display , Binary , Octal , LowerHex , UpperHex ) for NonZeroU8 }"#, assert_expansion(&rules, r#"impl_nonzero_fmt ! { # [ stable ( feature = "nonzero" , since = "1.28.0" ) ] ( Debug , Display , Binary , Octal , LowerHex , UpperHex ) for NonZeroU8 }"#,
"fn foo () {}"); "fn foo () {}");
} }
#[test]
fn test_tuple_impls() {
// from https://github.com/rust-lang/rust/blob/316a391dcb7d66dc25f1f9a4ec9d368ef7615005/src/libcore/num/mod.rs#L12
let rules = create_rules(
r#"
macro_rules! tuple_impls {
($(
$Tuple:ident {
$(($idx:tt) -> $T:ident)+
}
)+) => {
$(
#[stable(feature = "rust1", since = "1.0.0")]
impl<$($T:PartialEq),+> PartialEq for ($($T,)+) where last_type!($($T,)+): ?Sized {
#[inline]
fn eq(&self, other: &($($T,)+)) -> bool {
$(self.$idx == other.$idx)&&+
}
#[inline]
fn ne(&self, other: &($($T,)+)) -> bool {
$(self.$idx != other.$idx)||+
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<$($T:Eq),+> Eq for ($($T,)+) where last_type!($($T,)+): ?Sized {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<$($T:PartialOrd + PartialEq),+> PartialOrd for ($($T,)+)
where last_type!($($T,)+): ?Sized {
#[inline]
fn partial_cmp(&self, other: &($($T,)+)) -> Option<Ordering> {
lexical_partial_cmp!($(self.$idx, other.$idx),+)
}
#[inline]
fn lt(&self, other: &($($T,)+)) -> bool {
lexical_ord!(lt, $(self.$idx, other.$idx),+)
}
#[inline]
fn le(&self, other: &($($T,)+)) -> bool {
lexical_ord!(le, $(self.$idx, other.$idx),+)
}
#[inline]
fn ge(&self, other: &($($T,)+)) -> bool {
lexical_ord!(ge, $(self.$idx, other.$idx),+)
}
#[inline]
fn gt(&self, other: &($($T,)+)) -> bool {
lexical_ord!(gt, $(self.$idx, other.$idx),+)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<$($T:Ord),+> Ord for ($($T,)+) where last_type!($($T,)+): ?Sized {
#[inline]
fn cmp(&self, other: &($($T,)+)) -> Ordering {
lexical_cmp!($(self.$idx, other.$idx),+)
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<$($T:Default),+> Default for ($($T,)+) {
#[inline]
fn default() -> ($($T,)+) {
($({ let x: $T = Default::default(); x},)+)
}
}
)+
}
}"#,
);
assert_expansion(
&rules,
r#"tuple_impls ! {
Tuple1 {
( 0 ) -> A
}
Tuple2 {
( 0 ) -> A
( 1 ) -> B
}
Tuple3 {
( 0 ) -> A
( 1 ) -> B
( 2 ) -> C
}
Tuple4 {
( 0 ) -> A
( 1 ) -> B
( 2 ) -> C
( 3 ) -> D
}
Tuple5 {
( 0 ) -> A
( 1 ) -> B
( 2 ) -> C
( 3 ) -> D
( 4 ) -> E
}
Tuple6 {
( 0 ) -> A
( 1 ) -> B
( 2 ) -> C
( 3 ) -> D
( 4 ) -> E
( 5 ) -> F
}
Tuple7 {
( 0 ) -> A
( 1 ) -> B
( 2 ) -> C
( 3 ) -> D
( 4 ) -> E
( 5 ) -> F
( 6 ) -> G
}
Tuple8 {
( 0 ) -> A
( 1 ) -> B
( 2 ) -> C
( 3 ) -> D
( 4 ) -> E
( 5 ) -> F
( 6 ) -> G
( 7 ) -> H
}
Tuple9 {
( 0 ) -> A
( 1 ) -> B
( 2 ) -> C
( 3 ) -> D
( 4 ) -> E
( 5 ) -> F
( 6 ) -> G
( 7 ) -> H
( 8 ) -> I
}
Tuple10 {
( 0 ) -> A
( 1 ) -> B
( 2 ) -> C
( 3 ) -> D
( 4 ) -> E
( 5 ) -> F
( 6 ) -> G
( 7 ) -> H
( 8 ) -> I
( 9 ) -> J
}
Tuple11 {
( 0 ) -> A
( 1 ) -> B
( 2 ) -> C
( 3 ) -> D
( 4 ) -> E
( 5 ) -> F
( 6 ) -> G
( 7 ) -> H
( 8 ) -> I
( 9 ) -> J
( 10 ) -> K
}
Tuple12 {
( 0 ) -> A
( 1 ) -> B
( 2 ) -> C
( 3 ) -> D
( 4 ) -> E
( 5 ) -> F
( 6 ) -> G
( 7 ) -> H
( 8 ) -> I
( 9 ) -> J
( 10 ) -> K
( 11 ) -> L
}
}"#,
"fn foo () {}",
);
}
} }

View file

@ -196,6 +196,7 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings,
"literal" => { "literal" => {
let literal = let literal =
input.eat_literal().ok_or(ExpandError::UnexpectedToken)?.clone(); input.eat_literal().ok_or(ExpandError::UnexpectedToken)?.clone();
res.inner.insert( res.inner.insert(
text.clone(), text.clone(),
Binding::Simple(tt::Leaf::from(literal).into()), Binding::Simple(tt::Leaf::from(literal).into()),
@ -210,7 +211,7 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings,
} }
} }
crate::Leaf::Punct(punct) => { crate::Leaf::Punct(punct) => {
if input.eat_punct() != Some(punct) { if !input.eat_punct().map(|p| p.char == punct.char).unwrap_or(false) {
return Err(ExpandError::UnexpectedToken); return Err(ExpandError::UnexpectedToken);
} }
} }
@ -246,8 +247,23 @@ fn match_lhs(pattern: &crate::Subtree, input: &mut TtCursor) -> Result<Bindings,
} }
} }
if let Some(separator) = *separator { if let Some(separator) = separator {
if input.eat_punct().map(|p| p.char) != Some(separator) { use crate::Separator::*;
if !input
.eat_seperator()
.map(|sep| match (sep, separator) {
(Ident(ref a), Ident(ref b)) => a.text == b.text,
(Literal(ref a), Literal(ref b)) => a.text == b.text,
(Puncts(ref a), Puncts(ref b)) if a.len() == b.len() => {
let a_iter = a.iter().map(|a| a.char);
let b_iter = b.iter().map(|b| b.char);
a_iter.eq(b_iter)
}
_ => false,
})
.unwrap_or(false)
{
input.rollback(memento); input.rollback(memento);
break; break;
} }
@ -328,7 +344,7 @@ fn expand_tt(
// Dirty hack to make macro-expansion terminate. // Dirty hack to make macro-expansion terminate.
// This should be replaced by a propper macro-by-example implementation // This should be replaced by a propper macro-by-example implementation
let mut limit = 128; let mut limit = 128;
let mut has_sep = false; let mut has_seps = 0;
while let Ok(t) = expand_subtree(&repeat.subtree, bindings, nesting) { while let Ok(t) = expand_subtree(&repeat.subtree, bindings, nesting) {
limit -= 1; limit -= 1;
@ -339,15 +355,28 @@ fn expand_tt(
nesting.push(idx + 1); nesting.push(idx + 1);
token_trees.push(reduce_single_token(t).into()); token_trees.push(reduce_single_token(t).into());
if let Some(sep) = repeat.separator { if let Some(ref sep) = repeat.separator {
let punct = match sep {
tt::Leaf::from(tt::Punct { char: sep, spacing: tt::Spacing::Alone }); crate::Separator::Ident(ident) => {
token_trees.push(punct.into()); has_seps = 1;
has_sep = true; token_trees.push(tt::Leaf::from(ident.clone()).into());
}
crate::Separator::Literal(lit) => {
has_seps = 1;
token_trees.push(tt::Leaf::from(lit.clone()).into());
}
crate::Separator::Puncts(puncts) => {
has_seps = puncts.len();
for punct in puncts {
token_trees.push(tt::Leaf::from(*punct).into());
}
}
}
} }
} }
nesting.pop().unwrap(); nesting.pop().unwrap();
if has_sep { for _ in 0..has_seps {
token_trees.pop(); token_trees.pop();
} }

View file

@ -74,18 +74,11 @@ fn parse_var(p: &mut TtCursor, transcriber: bool) -> Result<crate::Var, ParseErr
Ok(crate::Var { text, kind }) Ok(crate::Var { text, kind })
} }
fn parse_repeat(p: &mut TtCursor, transcriber: bool) -> Result<crate::Repeat, ParseError> { fn mk_repeat(
let subtree = p.eat_subtree().unwrap(); rep: char,
let mut subtree = parse_subtree(subtree, transcriber)?; subtree: crate::Subtree,
subtree.delimiter = crate::Delimiter::None; separator: Option<crate::Separator>,
let sep = p.eat_punct().ok_or(ParseError::Expected(String::from("separator")))?; ) -> Result<crate::Repeat, ParseError> {
let (separator, rep) = match sep.char {
'*' | '+' | '?' => (None, sep.char),
char => {
(Some(char), p.eat_punct().ok_or(ParseError::Expected(String::from("separator")))?.char)
}
};
let kind = match rep { let kind = match rep {
'*' => crate::RepeatKind::ZeroOrMore, '*' => crate::RepeatKind::ZeroOrMore,
'+' => crate::RepeatKind::OneOrMore, '+' => crate::RepeatKind::OneOrMore,
@ -95,6 +88,27 @@ fn parse_repeat(p: &mut TtCursor, transcriber: bool) -> Result<crate::Repeat, Pa
Ok(crate::Repeat { subtree, kind, separator }) Ok(crate::Repeat { subtree, kind, separator })
} }
fn parse_repeat(p: &mut TtCursor, transcriber: bool) -> Result<crate::Repeat, ParseError> {
let subtree = p.eat_subtree().unwrap();
let mut subtree = parse_subtree(subtree, transcriber)?;
subtree.delimiter = crate::Delimiter::None;
if let Some(rep) = p.at_punct() {
match rep.char {
'*' | '+' | '?' => {
p.bump();
return mk_repeat(rep.char, subtree, None);
}
_ => {}
}
}
let sep = p.eat_seperator().ok_or(ParseError::Expected(String::from("separator")))?;
let rep = p.eat_punct().ok_or(ParseError::Expected(String::from("repeat")))?;
mk_repeat(rep.char, subtree, Some(sep))
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use ra_syntax::{ast, AstNode}; use ra_syntax::{ast, AstNode};
@ -109,7 +123,7 @@ mod tests {
is_valid("($i:ident) => ()"); is_valid("($i:ident) => ()");
expect_err("$i:ident => ()", "subtree"); expect_err("$i:ident => ()", "subtree");
expect_err("($i:ident) ()", "`=`"); expect_err("($i:ident) ()", "`=`");
expect_err("($($i:ident)_) => ()", "separator"); expect_err("($($i:ident)_) => ()", "repeat");
} }
fn expect_err(macro_body: &str, expected: &str) { fn expect_err(macro_body: &str, expected: &str) {

View file

@ -342,7 +342,7 @@ impl<'a> TokenSource for SubtreeTokenSource<'a> {
} }
} }
struct TokenPeek<'a, I> pub(crate) struct TokenPeek<'a, I>
where where
I: Iterator<Item = &'a tt::TokenTree>, I: Iterator<Item = &'a tt::TokenTree>,
{ {
@ -365,7 +365,7 @@ where
TokenPeek { iter: itertools::multipeek(iter) } TokenPeek { iter: itertools::multipeek(iter) }
} }
fn current_punct2(&mut self, p: &tt::Punct) -> Option<((char, char), bool)> { pub fn current_punct2(&mut self, p: &tt::Punct) -> Option<((char, char), bool)> {
if p.spacing != tt::Spacing::Joint { if p.spacing != tt::Spacing::Joint {
return None; return None;
} }
@ -375,7 +375,7 @@ where
Some(((p.char, p1.char), p1.spacing == tt::Spacing::Joint)) Some(((p.char, p1.char), p1.spacing == tt::Spacing::Joint))
} }
fn current_punct3(&mut self, p: &tt::Punct) -> Option<((char, char, char), bool)> { pub fn current_punct3(&mut self, p: &tt::Punct) -> Option<((char, char, char), bool)> {
self.current_punct2(p).and_then(|((p0, p1), last_joint)| { self.current_punct2(p).and_then(|((p0, p1), last_joint)| {
if !last_joint { if !last_joint {
None None
@ -437,12 +437,16 @@ fn convert_delim(d: tt::Delimiter, closing: bool) -> TtToken {
} }
fn convert_literal(l: &tt::Literal) -> TtToken { fn convert_literal(l: &tt::Literal) -> TtToken {
TtToken { let kind = classify_literal(&l.text)
kind: classify_literal(&l.text).unwrap().kind, .map(|tkn| tkn.kind)
is_joint_to_next: false, .or_else(|| match l.text.as_ref() {
text: l.text.clone(), "true" => Some(SyntaxKind::TRUE_KW),
n_tokens: 1, "false" => Some(SyntaxKind::FALSE_KW),
} _ => None,
})
.unwrap();
TtToken { kind, is_joint_to_next: false, text: l.text.clone(), n_tokens: 1 }
} }
fn convert_ident(ident: &tt::Ident) -> TtToken { fn convert_ident(ident: &tt::Ident) -> TtToken {

View file

@ -133,7 +133,9 @@ fn convert_tt(
}; };
let mut token_trees = Vec::new(); let mut token_trees = Vec::new();
for child in tt.children_with_tokens().skip(skip_first as usize) { let mut child_iter = tt.children_with_tokens().skip(skip_first as usize).peekable();
while let Some(child) = child_iter.next() {
if (skip_first && (child == first_child || child == last_child)) || child.kind().is_trivia() if (skip_first && (child == first_child || child == last_child)) || child.kind().is_trivia()
{ {
continue; continue;
@ -152,12 +154,25 @@ fn convert_tt(
prev = Some(char) prev = Some(char)
} }
if let Some(char) = prev { if let Some(char) = prev {
token_trees.push( let spacing = match child_iter.peek() {
tt::Leaf::from(tt::Punct { char, spacing: tt::Spacing::Alone }).into(), Some(SyntaxElement::Token(token)) => {
); if token.kind().is_punct() {
tt::Spacing::Joint
} else {
tt::Spacing::Alone
}
}
_ => tt::Spacing::Alone,
};
token_trees.push(tt::Leaf::from(tt::Punct { char, spacing }).into());
} }
} else { } else {
let child: tt::TokenTree = if token.kind().is_keyword() let child: tt::TokenTree = if token.kind() == SyntaxKind::TRUE_KW
|| token.kind() == SyntaxKind::FALSE_KW
{
tt::Leaf::from(tt::Literal { text: token.text().clone() }).into()
} else if token.kind().is_keyword()
|| token.kind() == IDENT || token.kind() == IDENT
|| token.kind() == LIFETIME || token.kind() == LIFETIME
{ {

View file

@ -1,5 +1,7 @@
use crate::ParseError; use crate::ParseError;
use crate::subtree_parser::Parser; use crate::subtree_parser::Parser;
use crate::subtree_source::TokenPeek;
use smallvec::{SmallVec, smallvec};
#[derive(Clone)] #[derive(Clone)]
pub(crate) struct TtCursor<'a> { pub(crate) struct TtCursor<'a> {
@ -162,6 +164,95 @@ impl<'a> TtCursor<'a> {
} }
} }
fn eat_punct3(&mut self, p: &tt::Punct) -> Option<SmallVec<[tt::Punct; 3]>> {
let sec = self.eat_punct()?.clone();
let third = self.eat_punct()?.clone();
Some(smallvec![p.clone(), sec, third])
}
fn eat_punct2(&mut self, p: &tt::Punct) -> Option<SmallVec<[tt::Punct; 3]>> {
let sec = self.eat_punct()?.clone();
Some(smallvec![p.clone(), sec])
}
fn eat_multi_char_punct<'b, I>(
&mut self,
p: &tt::Punct,
iter: &mut TokenPeek<'b, I>,
) -> Option<SmallVec<[tt::Punct; 3]>>
where
I: Iterator<Item = &'b tt::TokenTree>,
{
if let Some((m, _)) = iter.current_punct3(p) {
if let r @ Some(_) = match m {
('<', '<', '=') | ('>', '>', '=') | ('.', '.', '.') | ('.', '.', '=') => {
self.eat_punct3(p)
}
_ => None,
} {
return r;
}
}
if let Some((m, _)) = iter.current_punct2(p) {
if let r @ Some(_) = match m {
('<', '=')
| ('>', '=')
| ('+', '=')
| ('-', '=')
| ('|', '=')
| ('&', '=')
| ('^', '=')
| ('/', '=')
| ('*', '=')
| ('%', '=')
| ('&', '&')
| ('|', '|')
| ('<', '<')
| ('>', '>')
| ('-', '>')
| ('!', '=')
| ('=', '>')
| ('=', '=')
| ('.', '.')
| (':', ':') => self.eat_punct2(p),
_ => None,
} {
return r;
}
}
None
}
pub(crate) fn eat_seperator(&mut self) -> Option<crate::Separator> {
match self.eat()? {
tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
Some(crate::Separator::Literal(lit.clone()))
}
tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
Some(crate::Separator::Ident(ident.clone()))
}
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {
match punct.char {
'*' | '+' | '?' => return None,
_ => {}
};
// FIXME: The parser is only handle some compositeable punct,
// But at this phase, some punct still is jointed.
// So we by pass that check here.
let mut peekable = TokenPeek::new(self.subtree.token_trees[self.pos..].iter());
let puncts = self.eat_multi_char_punct(punct, &mut peekable);
let puncts = puncts.unwrap_or_else(|| smallvec![punct.clone()]);
Some(crate::Separator::Puncts(puncts))
}
_ => None,
}
}
#[must_use] #[must_use]
pub(crate) fn save(&self) -> TtCursorMemento { pub(crate) fn save(&self) -> TtCursorMemento {
TtCursorMemento { pos: self.pos } TtCursorMemento { pos: self.pos }