mirror of
https://github.com/rust-lang/rust-analyzer
synced 2025-01-27 20:35:09 +00:00
Split LIFETIME to two tokens in mbe
This commit is contained in:
parent
f78de3bb95
commit
da18f11307
3 changed files with 89 additions and 14 deletions
|
@ -202,6 +202,13 @@ impl<'a> TtIter<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn expect_tt(&mut self) -> Result<tt::TokenTree, ()> {
|
pub(crate) fn expect_tt(&mut self) -> Result<tt::TokenTree, ()> {
|
||||||
|
match self.peek_n(0) {
|
||||||
|
Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) if punct.char == '\'' => {
|
||||||
|
return self.expect_lifetime();
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
|
||||||
let tt = self.next().ok_or_else(|| ())?.clone();
|
let tt = self.next().ok_or_else(|| ())?.clone();
|
||||||
let punct = match tt {
|
let punct = match tt {
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if punct.spacing == tt::Spacing::Joint => {
|
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if punct.spacing == tt::Spacing::Joint => {
|
||||||
|
@ -255,13 +262,21 @@ impl<'a> TtIter<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn expect_lifetime(&mut self) -> Result<&tt::Ident, ()> {
|
pub(crate) fn expect_lifetime(&mut self) -> Result<tt::TokenTree, ()> {
|
||||||
let ident = self.expect_ident()?;
|
let punct = self.expect_punct()?;
|
||||||
// check if it start from "`"
|
if punct.char != '\'' {
|
||||||
if !ident.text.starts_with('\'') {
|
|
||||||
return Err(());
|
return Err(());
|
||||||
}
|
}
|
||||||
Ok(ident)
|
let ident = self.expect_ident()?;
|
||||||
|
|
||||||
|
Ok(tt::Subtree {
|
||||||
|
delimiter: None,
|
||||||
|
token_trees: vec![
|
||||||
|
tt::Leaf::Punct(punct.clone()).into(),
|
||||||
|
tt::Leaf::Ident(ident.clone()).into(),
|
||||||
|
],
|
||||||
|
}
|
||||||
|
.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn expect_fragment(
|
pub(crate) fn expect_fragment(
|
||||||
|
@ -274,7 +289,10 @@ impl<'a> TtIter<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> TreeSink for OffsetTokenSink<'a> {
|
impl<'a> TreeSink for OffsetTokenSink<'a> {
|
||||||
fn token(&mut self, _kind: SyntaxKind, n_tokens: u8) {
|
fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) {
|
||||||
|
if kind == SyntaxKind::LIFETIME {
|
||||||
|
n_tokens = 2;
|
||||||
|
}
|
||||||
for _ in 0..n_tokens {
|
for _ in 0..n_tokens {
|
||||||
self.cursor = self.cursor.bump_subtree();
|
self.cursor = self.cursor.bump_subtree();
|
||||||
}
|
}
|
||||||
|
@ -286,7 +304,7 @@ impl<'a> TtIter<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let buffer = TokenBuffer::new(self.inner.as_slice());
|
let buffer = TokenBuffer::new(&self.inner.as_slice());
|
||||||
let mut src = SubtreeTokenSource::new(&buffer);
|
let mut src = SubtreeTokenSource::new(&buffer);
|
||||||
let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false };
|
let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false };
|
||||||
|
|
||||||
|
@ -422,7 +440,7 @@ fn match_meta_var(kind: &str, input: &mut TtIter) -> ExpandResult<Option<Fragmen
|
||||||
"tt" => input.expect_tt().map(Some).map_err(|()| err!()),
|
"tt" => input.expect_tt().map(Some).map_err(|()| err!()),
|
||||||
"lifetime" => input
|
"lifetime" => input
|
||||||
.expect_lifetime()
|
.expect_lifetime()
|
||||||
.map(|ident| Some(tt::Leaf::Ident(ident.clone()).into()))
|
.map(|tt| Some(tt))
|
||||||
.map_err(|()| err!("expected lifetime")),
|
.map_err(|()| err!("expected lifetime")),
|
||||||
"literal" => input
|
"literal" => input
|
||||||
.expect_literal()
|
.expect_literal()
|
||||||
|
|
|
@ -50,6 +50,26 @@ impl<'a> SubtreeTokenSource<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get(&self, pos: usize) -> Ref<Option<TtToken>> {
|
fn get(&self, pos: usize) -> Ref<Option<TtToken>> {
|
||||||
|
fn is_lifetime(c: Cursor) -> Option<(Cursor, SmolStr)> {
|
||||||
|
let tkn = c.token_tree();
|
||||||
|
|
||||||
|
if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = tkn {
|
||||||
|
if punct.char == '\'' {
|
||||||
|
let next = c.bump();
|
||||||
|
if let Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) = next.token_tree() {
|
||||||
|
let res_cursor = next.bump();
|
||||||
|
let text = SmolStr::new("'".to_string() + &ident.to_string());
|
||||||
|
|
||||||
|
return Some((res_cursor, text));
|
||||||
|
} else {
|
||||||
|
panic!("Next token must be ident : {:#?}", next.token_tree());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
if pos < self.cached.borrow().len() {
|
if pos < self.cached.borrow().len() {
|
||||||
return Ref::map(self.cached.borrow(), |c| &c[pos]);
|
return Ref::map(self.cached.borrow(), |c| &c[pos]);
|
||||||
}
|
}
|
||||||
|
@ -63,6 +83,12 @@ impl<'a> SubtreeTokenSource<'a> {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some((curr, text)) = is_lifetime(cursor) {
|
||||||
|
cached.push(Some(TtToken { kind: LIFETIME, is_joint_to_next: false, text }));
|
||||||
|
self.cached_cursor.set(curr);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
match cursor.token_tree() {
|
match cursor.token_tree() {
|
||||||
Some(tt::TokenTree::Leaf(leaf)) => {
|
Some(tt::TokenTree::Leaf(leaf)) => {
|
||||||
cached.push(Some(convert_leaf(&leaf)));
|
cached.push(Some(convert_leaf(&leaf)));
|
||||||
|
@ -152,7 +178,11 @@ fn convert_ident(ident: &tt::Ident) -> TtToken {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn convert_punct(p: tt::Punct) -> TtToken {
|
fn convert_punct(p: tt::Punct) -> TtToken {
|
||||||
let kind = SyntaxKind::from_char(p.char).unwrap();
|
let kind = match SyntaxKind::from_char(p.char) {
|
||||||
|
None => panic!("{:#?} is not a valid punct", p),
|
||||||
|
Some(kind) => kind,
|
||||||
|
};
|
||||||
|
|
||||||
let text = {
|
let text = {
|
||||||
let mut buf = [0u8; 4];
|
let mut buf = [0u8; 4];
|
||||||
let s: &str = p.char.encode_utf8(&mut buf);
|
let s: &str = p.char.encode_utf8(&mut buf);
|
||||||
|
|
|
@ -271,7 +271,7 @@ struct RawConvertor<'a> {
|
||||||
inner: std::slice::Iter<'a, RawToken>,
|
inner: std::slice::Iter<'a, RawToken>,
|
||||||
}
|
}
|
||||||
|
|
||||||
trait SrcToken {
|
trait SrcToken: std::fmt::Debug {
|
||||||
fn kind(&self) -> SyntaxKind;
|
fn kind(&self) -> SyntaxKind;
|
||||||
|
|
||||||
fn to_char(&self) -> Option<char>;
|
fn to_char(&self) -> Option<char>;
|
||||||
|
@ -361,8 +361,12 @@ trait TokenConvertor {
|
||||||
Some(next) if next.kind().is_punct() => tt::Spacing::Joint,
|
Some(next) if next.kind().is_punct() => tt::Spacing::Joint,
|
||||||
_ => tt::Spacing::Alone,
|
_ => tt::Spacing::Alone,
|
||||||
};
|
};
|
||||||
let char = token.to_char().expect("Token from lexer must be single char");
|
let char = match token.to_char() {
|
||||||
|
Some(c) => c,
|
||||||
|
None => {
|
||||||
|
panic!("Token from lexer must be single char: token = {:#?}", token);
|
||||||
|
}
|
||||||
|
};
|
||||||
tt::Leaf::from(tt::Punct { char, spacing, id: self.id_alloc().alloc(range) }).into()
|
tt::Leaf::from(tt::Punct { char, spacing, id: self.id_alloc().alloc(range) }).into()
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
@ -373,9 +377,28 @@ trait TokenConvertor {
|
||||||
}
|
}
|
||||||
let leaf: tt::Leaf = match k {
|
let leaf: tt::Leaf = match k {
|
||||||
T![true] | T![false] => make_leaf!(Literal),
|
T![true] | T![false] => make_leaf!(Literal),
|
||||||
IDENT | LIFETIME => make_leaf!(Ident),
|
IDENT => make_leaf!(Ident),
|
||||||
k if k.is_keyword() => make_leaf!(Ident),
|
k if k.is_keyword() => make_leaf!(Ident),
|
||||||
k if k.is_literal() => make_leaf!(Literal),
|
k if k.is_literal() => make_leaf!(Literal),
|
||||||
|
LIFETIME => {
|
||||||
|
let char_unit = TextUnit::from_usize(1);
|
||||||
|
let r = TextRange::offset_len(range.start(), char_unit);
|
||||||
|
let apostrophe = tt::Leaf::from(tt::Punct {
|
||||||
|
char: '\'',
|
||||||
|
spacing: tt::Spacing::Joint,
|
||||||
|
id: self.id_alloc().alloc(r),
|
||||||
|
});
|
||||||
|
result.push(apostrophe.into());
|
||||||
|
|
||||||
|
let r =
|
||||||
|
TextRange::offset_len(range.start() + char_unit, range.len() - char_unit);
|
||||||
|
let ident = tt::Leaf::from(tt::Ident {
|
||||||
|
text: SmolStr::new(&token.to_text()[1..]),
|
||||||
|
id: self.id_alloc().alloc(r),
|
||||||
|
});
|
||||||
|
result.push(ident.into());
|
||||||
|
return;
|
||||||
|
}
|
||||||
_ => return,
|
_ => return,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -455,6 +478,7 @@ impl Convertor {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
enum SynToken {
|
enum SynToken {
|
||||||
Ordiniary(SyntaxToken),
|
Ordiniary(SyntaxToken),
|
||||||
Punch(SyntaxToken, TextUnit),
|
Punch(SyntaxToken, TextUnit),
|
||||||
|
@ -592,11 +616,14 @@ fn delim_to_str(d: Option<tt::DelimiterKind>, closing: bool) -> SmolStr {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> TreeSink for TtTreeSink<'a> {
|
impl<'a> TreeSink for TtTreeSink<'a> {
|
||||||
fn token(&mut self, kind: SyntaxKind, n_tokens: u8) {
|
fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) {
|
||||||
if kind == L_DOLLAR || kind == R_DOLLAR {
|
if kind == L_DOLLAR || kind == R_DOLLAR {
|
||||||
self.cursor = self.cursor.bump_subtree();
|
self.cursor = self.cursor.bump_subtree();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
if kind == LIFETIME {
|
||||||
|
n_tokens = 2;
|
||||||
|
}
|
||||||
|
|
||||||
let mut last = self.cursor;
|
let mut last = self.cursor;
|
||||||
for _ in 0..n_tokens {
|
for _ in 0..n_tokens {
|
||||||
|
|
Loading…
Reference in a new issue