Kill RAW_ literals

Syntactically, they are indistinguishable from non-raw versions, so it
doesn't make sense to separate then *at the syntax* level.
This commit is contained in:
Aleksey Kladov 2020-11-06 22:21:56 +01:00
parent 6725dcf847
commit 5ba4f949c2
53 changed files with 142 additions and 165 deletions

View file

@ -12,7 +12,7 @@ use ide_db::{
};
use syntax::{
algo::{self, find_node_at_offset, SyntaxRewriter},
AstNode, SourceFile, SyntaxElement, SyntaxKind, SyntaxToken, TextRange, TextSize,
AstNode, AstToken, SourceFile, SyntaxElement, SyntaxKind, SyntaxToken, TextRange, TextSize,
TokenAtOffset,
};
use text_edit::{TextEdit, TextEditBuilder};
@ -81,9 +81,12 @@ impl<'a> AssistContext<'a> {
pub(crate) fn token_at_offset(&self) -> TokenAtOffset<SyntaxToken> {
self.source_file.syntax().token_at_offset(self.offset())
}
pub(crate) fn find_token_at_offset(&self, kind: SyntaxKind) -> Option<SyntaxToken> {
pub(crate) fn find_token_syntax_at_offset(&self, kind: SyntaxKind) -> Option<SyntaxToken> {
self.token_at_offset().find(|it| it.kind() == kind)
}
pub(crate) fn find_token_at_offset<T: AstToken>(&self) -> Option<T> {
self.token_at_offset().find_map(T::cast)
}
pub(crate) fn find_node_at_offset<N: AstNode>(&self) -> Option<N> {
find_node_at_offset(self.source_file.syntax(), self.offset())
}

View file

@ -25,7 +25,7 @@ use crate::{
// }
// ```
pub(crate) fn add_turbo_fish(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
let ident = ctx.find_token_at_offset(SyntaxKind::IDENT).or_else(|| {
let ident = ctx.find_token_syntax_at_offset(SyntaxKind::IDENT).or_else(|| {
let arg_list = ctx.find_node_at_offset::<ast::ArgList>()?;
if arg_list.args().count() > 0 {
return None;

View file

@ -41,7 +41,7 @@ use crate::{
// fn qux(bar: Bar, baz: Baz) {}
// ```
pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
let star = ctx.find_token_at_offset(T![*])?;
let star = ctx.find_token_syntax_at_offset(T![*])?;
let (parent, mod_path) = find_parent_and_path(&star)?;
let target_module = match ctx.sema.resolve_path(&mod_path)? {
PathResolution::Def(ModuleDef::Module(it)) => it,

View file

@ -18,7 +18,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
// }
// ```
pub(crate) fn flip_comma(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
let comma = ctx.find_token_at_offset(T![,])?;
let comma = ctx.find_token_syntax_at_offset(T![,])?;
let prev = non_trivia_sibling(comma.clone().into(), Direction::Prev)?;
let next = non_trivia_sibling(comma.clone().into(), Direction::Next)?;

View file

@ -20,7 +20,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
pub(crate) fn flip_trait_bound(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
// We want to replicate the behavior of `flip_binexpr` by only suggesting
// the assist when the cursor is on a `+`
let plus = ctx.find_token_at_offset(T![+])?;
let plus = ctx.find_token_syntax_at_offset(T![+])?;
// Make sure we're in a `TypeBoundList`
if ast::TypeBoundList::cast(plus.parent()).is_none() {

View file

@ -36,7 +36,7 @@ static ASSIST_LABEL: &str = "Introduce named lifetime";
// FIXME: should also add support for the case fun(f: &Foo) -> &<|>Foo
pub(crate) fn introduce_named_lifetime(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
let lifetime_token = ctx
.find_token_at_offset(SyntaxKind::LIFETIME)
.find_token_syntax_at_offset(SyntaxKind::LIFETIME)
.filter(|lifetime| lifetime.text() == "'_")?;
if let Some(fn_def) = lifetime_token.ancestors().find_map(ast::Fn::cast) {
generate_fn_def_assist(acc, &fn_def, lifetime_token.text_range())

View file

@ -29,7 +29,7 @@ use crate::{
// ```
pub(crate) fn invert_if(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
let if_keyword = ctx.find_token_at_offset(T![if])?;
let if_keyword = ctx.find_token_syntax_at_offset(T![if])?;
let expr = ast::IfExpr::cast(if_keyword.parent())?;
let if_range = if_keyword.text_range();
let cursor_in_range = if_range.contains_range(ctx.frange.range);

View file

@ -2,9 +2,7 @@ use std::borrow::Cow;
use syntax::{
ast::{self, HasQuotes, HasStringValue},
AstToken,
SyntaxKind::{RAW_STRING, STRING},
TextRange, TextSize,
AstToken, TextRange, TextSize,
};
use test_utils::mark;
@ -26,7 +24,10 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
// }
// ```
pub(crate) fn make_raw_string(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
let token = ctx.find_token_at_offset(STRING).and_then(ast::String::cast)?;
let token = ctx.find_token_at_offset::<ast::String>()?;
if token.is_raw() {
return None;
}
let value = token.value()?;
let target = token.syntax().text_range();
acc.add(
@ -65,7 +66,10 @@ pub(crate) fn make_raw_string(acc: &mut Assists, ctx: &AssistContext) -> Option<
// }
// ```
pub(crate) fn make_usual_string(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
let token = ctx.find_token_at_offset(RAW_STRING).and_then(ast::RawString::cast)?;
let token = ctx.find_token_at_offset::<ast::String>()?;
if !token.is_raw() {
return None;
}
let value = token.value()?;
let target = token.syntax().text_range();
acc.add(
@ -104,11 +108,15 @@ pub(crate) fn make_usual_string(acc: &mut Assists, ctx: &AssistContext) -> Optio
// }
// ```
pub(crate) fn add_hash(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
let token = ctx.find_token_at_offset(RAW_STRING)?;
let target = token.text_range();
let token = ctx.find_token_at_offset::<ast::String>()?;
if !token.is_raw() {
return None;
}
let text_range = token.syntax().text_range();
let target = text_range;
acc.add(AssistId("add_hash", AssistKind::Refactor), "Add #", target, |edit| {
edit.insert(token.text_range().start() + TextSize::of('r'), "#");
edit.insert(token.text_range().end(), "#");
edit.insert(text_range.start() + TextSize::of('r'), "#");
edit.insert(text_range.end(), "#");
})
}
@ -128,7 +136,10 @@ pub(crate) fn add_hash(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
// }
// ```
pub(crate) fn remove_hash(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
let token = ctx.find_token_at_offset(RAW_STRING).and_then(ast::RawString::cast)?;
let token = ctx.find_token_at_offset::<ast::String>()?;
if !token.is_raw() {
return None;
}
let text = token.text().as_str();
if !text.starts_with("r#") && text.ends_with('#') {

View file

@ -18,7 +18,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
// }
// ```
pub(crate) fn remove_mut(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
let mut_token = ctx.find_token_at_offset(T![mut])?;
let mut_token = ctx.find_token_syntax_at_offset(T![mut])?;
let delete_from = mut_token.text_range().start();
let delete_to = match mut_token.next_token() {
Some(it) if it.kind() == SyntaxKind::WHITESPACE => it.text_range().end(),

View file

@ -37,7 +37,7 @@ use ide_db::ty_filter::TryEnum;
// fn compute() -> Option<i32> { None }
// ```
pub(crate) fn replace_let_with_if_let(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
let let_kw = ctx.find_token_at_offset(T![let])?;
let let_kw = ctx.find_token_syntax_at_offset(T![let])?;
let let_stmt = let_kw.ancestors().find_map(ast::LetStmt::cast)?;
let init = let_stmt.initializer()?;
let original_pat = let_stmt.pat()?;

View file

@ -22,7 +22,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
// }
// ```
pub(crate) fn replace_string_with_char(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
let token = ctx.find_token_at_offset(STRING).and_then(ast::String::cast)?;
let token = ctx.find_token_syntax_at_offset(STRING).and_then(ast::String::cast)?;
let value = token.value()?;
let target = token.syntax().text_range();

View file

@ -16,7 +16,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
// use std::{collections::HashMap};
// ```
pub(crate) fn split_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
let colon_colon = ctx.find_token_at_offset(T![::])?;
let colon_colon = ctx.find_token_syntax_at_offset(T![::])?;
let path = ast::Path::cast(colon_colon.parent())?.qualifier()?;
let top_path = successors(Some(path.clone()), |it| it.parent_path()).last()?;

View file

@ -29,7 +29,7 @@ pub(crate) fn unwrap_block(acc: &mut Assists, ctx: &AssistContext) -> Option<()>
let assist_id = AssistId("unwrap_block", AssistKind::RefactorRewrite);
let assist_label = "Unwrap block";
let l_curly_token = ctx.find_token_at_offset(T!['{'])?;
let l_curly_token = ctx.find_token_syntax_at_offset(T!['{'])?;
let mut block = ast::BlockExpr::cast(l_curly_token.parent())?;
let mut parent = block.syntax().parent()?;
if ast::MatchArm::can_cast(parent.kind()) {

View file

@ -35,7 +35,7 @@ fn try_extend_selection(
) -> Option<TextRange> {
let range = frange.range;
let string_kinds = [COMMENT, STRING, RAW_STRING, BYTE_STRING, RAW_BYTE_STRING];
let string_kinds = [COMMENT, STRING, BYTE_STRING];
let list_kinds = [
RECORD_PAT_FIELD_LIST,
MATCH_ARM_LIST,

View file

@ -179,10 +179,12 @@ pub(crate) fn highlight(
element.clone()
};
if let Some(token) = element.as_token().cloned().and_then(ast::RawString::cast) {
let expanded = element_to_highlight.as_token().unwrap().clone();
if injection::highlight_injection(&mut stack, &sema, token, expanded).is_some() {
continue;
if let Some(token) = element.as_token().cloned().and_then(ast::String::cast) {
if token.is_raw() {
let expanded = element_to_highlight.as_token().unwrap().clone();
if injection::highlight_injection(&mut stack, &sema, token, expanded).is_some() {
continue;
}
}
}
@ -214,10 +216,6 @@ pub(crate) fn highlight(
}
stack.pop_and_inject(None);
}
} else if let Some(string) =
element_to_highlight.as_token().cloned().and_then(ast::RawString::cast)
{
format_string_highlighter.highlight_format_string(&mut stack, &string, range);
}
}
}
@ -532,7 +530,7 @@ fn highlight_element(
None => h.into(),
}
}
STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => HighlightTag::StringLiteral.into(),
STRING | BYTE_STRING => HighlightTag::StringLiteral.into(),
ATTR => HighlightTag::Attribute.into(),
INT_NUMBER | FLOAT_NUMBER => HighlightTag::NumericLiteral.into(),
BYTE => HighlightTag::ByteLiteral.into(),

View file

@ -29,9 +29,7 @@ impl FormatStringHighlighter {
.children_with_tokens()
.filter(|t| t.kind() != SyntaxKind::WHITESPACE)
.nth(1)
.filter(|e| {
ast::String::can_cast(e.kind()) || ast::RawString::can_cast(e.kind())
})
.filter(|e| ast::String::can_cast(e.kind()))
}
_ => {}
}

View file

@ -15,7 +15,7 @@ use super::HighlightedRangeStack;
pub(super) fn highlight_injection(
acc: &mut HighlightedRangeStack,
sema: &Semantics<RootDatabase>,
literal: ast::RawString,
literal: ast::String,
expanded: SyntaxToken,
) -> Option<()> {
let active_parameter = ActiveParameter::at_token(&sema, expanded)?;

View file

@ -1,9 +1,7 @@
use ide_db::base_db::{FileId, SourceDatabase};
use ide_db::RootDatabase;
use syntax::{
algo, AstNode, NodeOrToken, SourceFile,
SyntaxKind::{RAW_STRING, STRING},
SyntaxToken, TextRange, TextSize,
algo, AstNode, NodeOrToken, SourceFile, SyntaxKind::STRING, SyntaxToken, TextRange, TextSize,
};
// Feature: Show Syntax Tree
@ -46,7 +44,7 @@ fn syntax_tree_for_string(token: &SyntaxToken, text_range: TextRange) -> Option<
// we'll attempt parsing it as rust syntax
// to provide the syntax tree of the contents of the string
match token.kind() {
STRING | RAW_STRING => syntax_tree_for_token(token, text_range),
STRING => syntax_tree_for_token(token, text_range),
_ => None,
}
}

View file

@ -236,10 +236,7 @@ fn abi(p: &mut Parser) {
assert!(p.at(T![extern]));
let abi = p.start();
p.bump(T![extern]);
match p.current() {
STRING | RAW_STRING => p.bump_any(),
_ => (),
}
p.eat(STRING);
abi.complete(p, ABI);
}

View file

@ -15,18 +15,8 @@ use super::*;
// let _ = b"e";
// let _ = br"f";
// }
pub(crate) const LITERAL_FIRST: TokenSet = TokenSet::new(&[
TRUE_KW,
FALSE_KW,
INT_NUMBER,
FLOAT_NUMBER,
BYTE,
CHAR,
STRING,
RAW_STRING,
BYTE_STRING,
RAW_BYTE_STRING,
]);
pub(crate) const LITERAL_FIRST: TokenSet =
TokenSet::new(&[TRUE_KW, FALSE_KW, INT_NUMBER, FLOAT_NUMBER, BYTE, CHAR, STRING, BYTE_STRING]);
pub(crate) fn literal(p: &mut Parser) -> Option<CompletedMarker> {
if !p.at_ts(LITERAL_FIRST) {

View file

@ -239,9 +239,7 @@ fn items_without_modifiers(p: &mut Parser, m: Marker) -> Result<(), Marker> {
T![static] => consts::static_(p, m),
// test extern_block
// extern {}
T![extern]
if la == T!['{'] || ((la == STRING || la == RAW_STRING) && p.nth(2) == T!['{']) =>
{
T![extern] if la == T!['{'] || (la == STRING && p.nth(2) == T!['{']) => {
abi(p);
extern_item_list(p);
m.complete(p, EXTERN_BLOCK);

View file

@ -111,9 +111,7 @@ pub enum SyntaxKind {
CHAR,
BYTE,
STRING,
RAW_STRING,
BYTE_STRING,
RAW_BYTE_STRING,
ERROR,
IDENT,
WHITESPACE,
@ -277,8 +275,7 @@ impl SyntaxKind {
}
pub fn is_literal(self) -> bool {
match self {
INT_NUMBER | FLOAT_NUMBER | CHAR | BYTE | STRING | RAW_STRING | BYTE_STRING
| RAW_BYTE_STRING => true,
INT_NUMBER | FLOAT_NUMBER | CHAR | BYTE | STRING | BYTE_STRING => true,
_ => false,
}
}

View file

@ -320,6 +320,13 @@ impl ast::Literal {
ast::IntNumber::cast(self.token())
}
pub fn as_string(&self) -> Option<ast::String> {
ast::String::cast(self.token())
}
pub fn as_byte_string(&self) -> Option<ast::ByteString> {
ast::ByteString::cast(self.token())
}
fn find_suffix(text: &str, possible_suffixes: &[&str]) -> Option<SmolStr> {
possible_suffixes
.iter()
@ -351,10 +358,10 @@ impl ast::Literal {
suffix: Self::find_suffix(&text, &ast::FloatNumber::SUFFIXES),
}
}
STRING | RAW_STRING => LiteralKind::String,
STRING => LiteralKind::String,
T![true] => LiteralKind::Bool(true),
T![false] => LiteralKind::Bool(false),
BYTE_STRING | RAW_BYTE_STRING => LiteralKind::ByteString,
BYTE_STRING => LiteralKind::ByteString,
CHAR => LiteralKind::Char,
BYTE => LiteralKind::Byte,
_ => unreachable!(),

View file

@ -70,16 +70,16 @@ impl AstToken for String {
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct RawString {
pub struct ByteString {
pub(crate) syntax: SyntaxToken,
}
impl std::fmt::Display for RawString {
impl std::fmt::Display for ByteString {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(&self.syntax, f)
}
}
impl AstToken for RawString {
fn can_cast(kind: SyntaxKind) -> bool { kind == RAW_STRING }
impl AstToken for ByteString {
fn can_cast(kind: SyntaxKind) -> bool { kind == BYTE_STRING }
fn cast(syntax: SyntaxToken) -> Option<Self> {
if Self::can_cast(syntax.kind()) {
Some(Self { syntax })

View file

@ -55,13 +55,7 @@ impl ast::Attr {
let key = self.simple_name()?;
let value_token = lit.syntax().first_token()?;
let value: SmolStr = if let Some(s) = ast::String::cast(value_token.clone()) {
s.value()?.into()
} else if let Some(s) = ast::RawString::cast(value_token) {
s.value()?.into()
} else {
return None;
};
let value: SmolStr = ast::String::cast(value_token.clone())?.value()?.into();
Some((key, value))
}

View file

@ -139,14 +139,31 @@ pub trait HasQuotes: AstToken {
}
impl HasQuotes for ast::String {}
impl HasQuotes for ast::RawString {}
pub trait HasStringValue: HasQuotes {
fn value(&self) -> Option<Cow<'_, str>>;
}
impl ast::String {
pub fn is_raw(&self) -> bool {
self.text().starts_with('r')
}
pub fn map_range_up(&self, range: TextRange) -> Option<TextRange> {
let contents_range = self.text_range_between_quotes()?;
assert!(TextRange::up_to(contents_range.len()).contains_range(range));
Some(range + contents_range.start())
}
}
impl HasStringValue for ast::String {
fn value(&self) -> Option<Cow<'_, str>> {
if self.is_raw() {
let text = self.text().as_str();
let text =
&text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
return Some(Cow::Borrowed(text));
}
let text = self.text().as_str();
let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
@ -166,20 +183,9 @@ impl HasStringValue for ast::String {
}
}
// FIXME: merge `ast::RawString` and `ast::String`.
impl HasStringValue for ast::RawString {
fn value(&self) -> Option<Cow<'_, str>> {
let text = self.text().as_str();
let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
Some(Cow::Borrowed(text))
}
}
impl ast::RawString {
pub fn map_range_up(&self, range: TextRange) -> Option<TextRange> {
let contents_range = self.text_range_between_quotes()?;
assert!(TextRange::up_to(contents_range.len()).contains_range(range));
Some(range + contents_range.start())
impl ast::ByteString {
pub fn is_raw(&self) -> bool {
self.text().starts_with("br")
}
}
@ -522,22 +528,6 @@ impl HasFormatSpecifier for ast::String {
}
}
impl HasFormatSpecifier for ast::RawString {
fn char_ranges(
&self,
) -> Option<Vec<(TextRange, Result<char, rustc_lexer::unescape::EscapeError>)>> {
let text = self.text().as_str();
let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
let offset = self.text_range_between_quotes()?.start() - self.syntax().text_range().start();
let mut res = Vec::with_capacity(text.len());
for (idx, c) in text.char_indices() {
res.push((TextRange::at(idx.try_into().unwrap(), TextSize::of(c)) + offset, Ok(c)));
}
Some(res)
}
}
impl ast::IntNumber {
#[rustfmt::skip]
pub(crate) const SUFFIXES: &'static [&'static str] = &[

View file

@ -235,7 +235,7 @@ fn rustc_token_kind_to_syntax_kind(
RawStrError::TooManyDelimiters { .. } => "Too many `#` symbols: raw strings may be delimited by up to 65535 `#` symbols",
};
};
RAW_STRING
STRING
}
rustc_lexer::LiteralKind::RawByteStr { err: raw_str_err, .. } => {
if let Some(raw_str_err) = raw_str_err {
@ -250,7 +250,7 @@ fn rustc_token_kind_to_syntax_kind(
};
};
RAW_BYTE_STRING
BYTE_STRING
}
};

View file

@ -44,7 +44,7 @@ fn reparse_token<'node>(
let prev_token = algo::find_covering_element(root, edit.delete).as_token()?.clone();
let prev_token_kind = prev_token.kind();
match prev_token_kind {
WHITESPACE | COMMENT | IDENT | STRING | RAW_STRING => {
WHITESPACE | COMMENT | IDENT | STRING => {
if prev_token_kind == WHITESPACE || prev_token_kind == COMMENT {
// removing a new line may extends previous token
let deleted_range = edit.delete - prev_token.text_range().start();

View file

@ -4,7 +4,7 @@ mod block;
use crate::{
algo, ast, match_ast, AstNode, SyntaxError,
SyntaxKind::{BYTE, BYTE_STRING, CHAR, CONST, FN, INT_NUMBER, STRING, TYPE_ALIAS},
SyntaxKind::{BYTE, CHAR, CONST, FN, INT_NUMBER, TYPE_ALIAS},
SyntaxNode, SyntaxToken, TextSize, T,
};
use rowan::Direction;
@ -121,6 +121,29 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
acc.push(SyntaxError::new_at_offset(rustc_unescape_error_to_string(err), off));
};
if let Some(s) = literal.as_string() {
if !s.is_raw() {
if let Some(without_quotes) = unquote(text, 1, '"') {
unescape_literal(without_quotes, Mode::Str, &mut |range, char| {
if let Err(err) = char {
push_err(1, (range.start, err));
}
})
}
}
}
if let Some(s) = literal.as_byte_string() {
if !s.is_raw() {
if let Some(without_quotes) = unquote(text, 2, '"') {
unescape_byte_literal(without_quotes, Mode::ByteStr, &mut |range, char| {
if let Err(err) = char {
push_err(2, (range.start, err));
}
})
}
}
}
match token.kind() {
BYTE => {
if let Some(Err(e)) = unquote(text, 2, '\'').map(unescape_byte) {
@ -132,24 +155,6 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) {
push_err(1, e);
}
}
BYTE_STRING => {
if let Some(without_quotes) = unquote(text, 2, '"') {
unescape_byte_literal(without_quotes, Mode::ByteStr, &mut |range, char| {
if let Err(err) = char {
push_err(2, (range.start, err));
}
})
}
}
STRING => {
if let Some(without_quotes) = unquote(text, 1, '"') {
unescape_literal(without_quotes, Mode::Str, &mut |range, char| {
if let Err(err) = char {
push_err(1, (range.start, err));
}
})
}
}
_ => (),
}
}

View file

@ -1,2 +1,2 @@
RAW_STRING 4 "r##\""
STRING 4 "r##\""
> error0..4 token("r##\"") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal)

View file

@ -1,2 +1,2 @@
RAW_STRING 8 "r##\"🦀"
STRING 8 "r##\"🦀"
> error0..8 token("r##\"🦀") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal)

View file

@ -1,2 +1,2 @@
RAW_STRING 8 "r##\"\\x7f"
STRING 8 "r##\"\\x7f"
> error0..8 token("r##\"\\x7f") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal)

View file

@ -1,2 +1,2 @@
RAW_STRING 12 "r##\"\\u{20AA}"
STRING 12 "r##\"\\u{20AA}"
> error0..12 token("r##\"\\u{20AA}") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal)

View file

@ -1,2 +1,2 @@
RAW_STRING 5 "r##\" "
STRING 5 "r##\" "
> error0..5 token("r##\" ") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal)

View file

@ -1,2 +1,2 @@
RAW_STRING 5 "r##\"\\"
STRING 5 "r##\"\\"
> error0..5 token("r##\"\\") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal)

View file

@ -1,2 +1,2 @@
RAW_STRING 6 "r##\"\\n"
STRING 6 "r##\"\\n"
> error0..6 token("r##\"\\n") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal)

View file

@ -1,2 +1,2 @@
RAW_BYTE_STRING 5 "br##\""
BYTE_STRING 5 "br##\""
> error0..5 token("br##\"") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal)

View file

@ -1,2 +1,2 @@
RAW_BYTE_STRING 9 "br##\"🦀"
BYTE_STRING 9 "br##\"🦀"
> error0..9 token("br##\"🦀") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal)

View file

@ -1,2 +1,2 @@
RAW_BYTE_STRING 9 "br##\"\\x7f"
BYTE_STRING 9 "br##\"\\x7f"
> error0..9 token("br##\"\\x7f") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal)

View file

@ -1,2 +1,2 @@
RAW_BYTE_STRING 13 "br##\"\\u{20AA}"
BYTE_STRING 13 "br##\"\\u{20AA}"
> error0..13 token("br##\"\\u{20AA}") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal)

View file

@ -1,2 +1,2 @@
RAW_BYTE_STRING 6 "br##\" "
BYTE_STRING 6 "br##\" "
> error0..6 token("br##\" ") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal)

View file

@ -1,2 +1,2 @@
RAW_BYTE_STRING 6 "br##\"\\"
BYTE_STRING 6 "br##\"\\"
> error0..6 token("br##\"\\") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal)

View file

@ -1,2 +1,2 @@
RAW_BYTE_STRING 7 "br##\"\\n"
BYTE_STRING 7 "br##\"\\n"
> error0..7 token("br##\"\\n") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal)

View file

@ -1,2 +1,2 @@
RAW_STRING 3 "r##"
STRING 3 "r##"
> error0..3 token("r##") msg(Missing `"` symbol after `#` symbols to begin the raw string literal)

View file

@ -1,2 +1,2 @@
RAW_BYTE_STRING 4 "br##"
BYTE_STRING 4 "br##"
> error0..4 token("br##") msg(Missing `"` symbol after `#` symbols to begin the raw byte string literal)

View file

@ -1,4 +1,4 @@
RAW_STRING 4 "r## "
STRING 4 "r## "
IDENT 1 "I"
WHITESPACE 1 " "
IDENT 4 "lack"

View file

@ -1,4 +1,4 @@
RAW_BYTE_STRING 5 "br## "
BYTE_STRING 5 "br## "
IDENT 1 "I"
WHITESPACE 1 " "
IDENT 4 "lack"

View file

@ -4,13 +4,13 @@ BYTE 4 "b\'x\'"
WHITESPACE 1 " "
BYTE_STRING 6 "b\"foo\""
WHITESPACE 1 " "
RAW_BYTE_STRING 4 "br\"\""
BYTE_STRING 4 "br\"\""
WHITESPACE 1 "\n"
BYTE 6 "b\'\'suf"
WHITESPACE 1 " "
BYTE_STRING 5 "b\"\"ix"
WHITESPACE 1 " "
RAW_BYTE_STRING 6 "br\"\"br"
BYTE_STRING 6 "br\"\"br"
WHITESPACE 1 "\n"
BYTE 5 "b\'\\n\'"
WHITESPACE 1 " "

View file

@ -1,6 +1,6 @@
STRING 7 "\"hello\""
WHITESPACE 1 " "
RAW_STRING 8 "r\"world\""
STRING 8 "r\"world\""
WHITESPACE 1 " "
STRING 17 "\"\\n\\\"\\\\no escape\""
WHITESPACE 1 " "

View file

@ -1,2 +1,2 @@
RAW_STRING 36 "r###\"this is a r##\"raw\"## string\"###"
STRING 36 "r###\"this is a r##\"raw\"## string\"###"
WHITESPACE 1 "\n"

View file

@ -104,7 +104,7 @@ SOURCE_FILE@0..189
EQ@142..143 "="
WHITESPACE@143..144 " "
LITERAL@144..148
RAW_STRING@144..148 "r\"d\""
STRING@144..148 "r\"d\""
SEMICOLON@148..149 ";"
WHITESPACE@149..154 "\n "
LET_STMT@154..167
@ -128,7 +128,7 @@ SOURCE_FILE@0..189
EQ@178..179 "="
WHITESPACE@179..180 " "
LITERAL@180..185
RAW_BYTE_STRING@180..185 "br\"f\""
BYTE_STRING@180..185 "br\"f\""
SEMICOLON@185..186 ";"
WHITESPACE@186..187 "\n"
R_CURLY@187..188 "}"

View file

@ -71,16 +71,7 @@ pub(crate) const KINDS_SRC: KindsSrc = KindsSrc {
"trait", "true", "try", "type", "unsafe", "use", "where", "while",
],
contextual_keywords: &["auto", "default", "existential", "union", "raw"],
literals: &[
"INT_NUMBER",
"FLOAT_NUMBER",
"CHAR",
"BYTE",
"STRING",
"RAW_STRING",
"BYTE_STRING",
"RAW_BYTE_STRING",
],
literals: &["INT_NUMBER", "FLOAT_NUMBER", "CHAR", "BYTE", "STRING", "BYTE_STRING"],
tokens: &[
"ERROR",
"IDENT",

View file

@ -505,7 +505,7 @@ impl Field {
fn lower(grammar: &Grammar) -> AstSrc {
let mut res = AstSrc::default();
res.tokens = "Whitespace Comment String RawString IntNumber FloatNumber"
res.tokens = "Whitespace Comment String ByteString IntNumber FloatNumber"
.split_ascii_whitespace()
.map(|it| it.to_string())
.collect::<Vec<_>>();