1706: Normalize line endings to Unix flavor on IO bounraries r=matklad a=matklad



1708: update yanked getrandom r=matklad a=matklad



Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
bors[bot] 2019-08-20 16:56:24 +00:00 committed by GitHub
commit bdf16d1b67
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 231 additions and 238 deletions

24
Cargo.lock generated
View file

@ -534,7 +534,7 @@ dependencies = [
[[package]] [[package]]
name = "getrandom" name = "getrandom"
version = "0.1.9" version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1092,7 +1092,7 @@ dependencies = [
"ra_hir 0.1.0", "ra_hir 0.1.0",
"ra_ide_api 0.1.0", "ra_ide_api 0.1.0",
"ra_project_model 0.1.0", "ra_project_model 0.1.0",
"ra_vfs 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "ra_vfs 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_vfs_glob 0.1.0", "ra_vfs_glob 0.1.0",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -1198,7 +1198,7 @@ dependencies = [
"ra_project_model 0.1.0", "ra_project_model 0.1.0",
"ra_syntax 0.1.0", "ra_syntax 0.1.0",
"ra_text_edit 0.1.0", "ra_text_edit 0.1.0",
"ra_vfs 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "ra_vfs 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_vfs_glob 0.1.0", "ra_vfs_glob 0.1.0",
"relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1256,7 +1256,7 @@ dependencies = [
[[package]] [[package]]
name = "ra_rustc_lexer" name = "ra_rustc_lexer"
version = "0.1.0-pre.2" version = "0.1.0-pre.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1268,7 +1268,7 @@ version = "0.1.0"
dependencies = [ dependencies = [
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_parser 0.1.0", "ra_parser 0.1.0",
"ra_rustc_lexer 0.1.0-pre.2 (registry+https://github.com/rust-lang/crates.io-index)", "ra_rustc_lexer 0.1.0-pre.3 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_text_edit 0.1.0", "ra_text_edit 0.1.0",
"rowan 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)", "rowan 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)",
"smol_str 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", "smol_str 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1307,7 +1307,7 @@ dependencies = [
[[package]] [[package]]
name = "ra_vfs" name = "ra_vfs"
version = "0.2.6" version = "0.2.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"crossbeam-channel 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)", "crossbeam-channel 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1324,7 +1324,7 @@ name = "ra_vfs_glob"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"globset 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", "globset 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_vfs 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", "ra_vfs 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -1350,7 +1350,7 @@ name = "rand"
version = "0.7.0" version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"getrandom 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", "getrandom 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", "rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_core 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "rand_core 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
@ -1394,7 +1394,7 @@ name = "rand_core"
version = "0.5.0" version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"getrandom 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", "getrandom 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -2073,7 +2073,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82" "checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82"
"checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" "checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7"
"checksum generic-array 0.12.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c68f0274ae0e023facc3c97b2e00f076be70e254bc851d972503b328db79b2ec" "checksum generic-array 0.12.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c68f0274ae0e023facc3c97b2e00f076be70e254bc851d972503b328db79b2ec"
"checksum getrandom 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "2512b3191f22e2763a5db387f1c9409379772e2050841722eb4a8c4f497bf096" "checksum getrandom 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "6171a6cc63fbabbe27c2b5ee268e8b7fe5dc1eb0dd2dfad537c1dfed6f69117e"
"checksum glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574" "checksum glob 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9b919933a397b79c37e33b77bb2aa3dc8eb6e165ad809e58ff75bc7db2e34574"
"checksum globset 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "925aa2cac82d8834e2b2a4415b6f6879757fb5c0928fc445ae76461a12eed8f2" "checksum globset 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)" = "925aa2cac82d8834e2b2a4415b6f6879757fb5c0928fc445ae76461a12eed8f2"
"checksum heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205" "checksum heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205"
@ -2134,8 +2134,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
"checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0" "checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0"
"checksum quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1" "checksum quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1"
"checksum quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe" "checksum quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe"
"checksum ra_rustc_lexer 0.1.0-pre.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6baccda91574dfadd7f8a0bc8f9f110f874b6b484289b2536d3dbf4f0d5d97bb" "checksum ra_rustc_lexer 0.1.0-pre.3 (registry+https://github.com/rust-lang/crates.io-index)" = "04371af481820ff8d35c7d12b503eb09cf9e1bd246269bf4a33e3d8c54fa3a4a"
"checksum ra_vfs 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "865bb9e0f71916f7c7527af4843a2a67d1b0789f7c91c512a6b4ded69af98249" "checksum ra_vfs 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "6918c38f6ab45101f1ddd6110eda831a735b5e9ca6c96d1ceedb7d13ecaeb0f4"
"checksum rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca" "checksum rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca"
"checksum rand 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d47eab0e83d9693d40f825f86948aa16eff6750ead4bdffc4ab95b8b3a7f052c" "checksum rand 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d47eab0e83d9693d40f825f86948aa16eff6750ead4bdffc4ab95b8b3a7f052c"
"checksum rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef" "checksum rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef"

View file

@ -56,10 +56,7 @@ pub(crate) fn introduce_variable(mut ctx: AssistCtx<impl HirDatabase>) -> Option
// but we do not want to duplicate possible // but we do not want to duplicate possible
// extra newlines in the indent block // extra newlines in the indent block
let text = indent.text(); let text = indent.text();
if text.starts_with("\r\n") { if text.starts_with('\n') {
buf.push_str("\r\n");
buf.push_str(text.trim_start_matches("\r\n"));
} else if text.starts_with('\n') {
buf.push_str("\n"); buf.push_str("\n");
buf.push_str(text.trim_start_matches('\n')); buf.push_str(text.trim_start_matches('\n'));
} else { } else {

View file

@ -16,7 +16,7 @@ lsp-types = { version = "0.60.0", features = ["proposed"] }
rustc-hash = "1.0" rustc-hash = "1.0"
parking_lot = "0.9.0" parking_lot = "0.9.0"
ra_vfs = "0.2.0" ra_vfs = "0.2.7"
thread_worker = { path = "../thread_worker" } thread_worker = { path = "../thread_worker" }
ra_syntax = { path = "../ra_syntax" } ra_syntax = { path = "../ra_syntax" }
ra_text_edit = { path = "../ra_text_edit" } ra_text_edit = { path = "../ra_text_edit" }

View file

@ -11,6 +11,7 @@ use ra_ide_api::{
}; };
use ra_syntax::{SyntaxKind, TextRange, TextUnit}; use ra_syntax::{SyntaxKind, TextRange, TextUnit};
use ra_text_edit::{AtomTextEdit, TextEdit}; use ra_text_edit::{AtomTextEdit, TextEdit};
use ra_vfs::LineEndings;
use crate::{req, world::WorldSnapshot, Result}; use crate::{req, world::WorldSnapshot, Result};
@ -19,16 +20,14 @@ pub trait Conv {
fn conv(self) -> Self::Output; fn conv(self) -> Self::Output;
} }
pub trait ConvWith { pub trait ConvWith<CTX> {
type Ctx;
type Output; type Output;
fn conv_with(self, ctx: &Self::Ctx) -> Self::Output; fn conv_with(self, ctx: CTX) -> Self::Output;
} }
pub trait TryConvWith { pub trait TryConvWith<CTX> {
type Ctx;
type Output; type Output;
fn try_conv_with(self, ctx: &Self::Ctx) -> Result<Self::Output>; fn try_conv_with(self, ctx: CTX) -> Result<Self::Output>;
} }
impl Conv for SyntaxKind { impl Conv for SyntaxKind {
@ -89,11 +88,10 @@ impl Conv for Severity {
} }
} }
impl ConvWith for CompletionItem { impl ConvWith<(&LineIndex, LineEndings)> for CompletionItem {
type Ctx = LineIndex;
type Output = ::lsp_types::CompletionItem; type Output = ::lsp_types::CompletionItem;
fn conv_with(self, ctx: &LineIndex) -> ::lsp_types::CompletionItem { fn conv_with(self, ctx: (&LineIndex, LineEndings)) -> ::lsp_types::CompletionItem {
let mut additional_text_edits = Vec::new(); let mut additional_text_edits = Vec::new();
let mut text_edit = None; let mut text_edit = None;
// LSP does not allow arbitrary edits in completion, so we have to do a // LSP does not allow arbitrary edits in completion, so we have to do a
@ -138,8 +136,7 @@ impl ConvWith for CompletionItem {
} }
} }
impl ConvWith for Position { impl ConvWith<&LineIndex> for Position {
type Ctx = LineIndex;
type Output = TextUnit; type Output = TextUnit;
fn conv_with(self, line_index: &LineIndex) -> TextUnit { fn conv_with(self, line_index: &LineIndex) -> TextUnit {
@ -148,8 +145,7 @@ impl ConvWith for Position {
} }
} }
impl ConvWith for TextUnit { impl ConvWith<&LineIndex> for TextUnit {
type Ctx = LineIndex;
type Output = Position; type Output = Position;
fn conv_with(self, line_index: &LineIndex) -> Position { fn conv_with(self, line_index: &LineIndex) -> Position {
@ -158,8 +154,7 @@ impl ConvWith for TextUnit {
} }
} }
impl ConvWith for TextRange { impl ConvWith<&LineIndex> for TextRange {
type Ctx = LineIndex;
type Output = Range; type Output = Range;
fn conv_with(self, line_index: &LineIndex) -> Range { fn conv_with(self, line_index: &LineIndex) -> Range {
@ -167,8 +162,7 @@ impl ConvWith for TextRange {
} }
} }
impl ConvWith for Range { impl ConvWith<&LineIndex> for Range {
type Ctx = LineIndex;
type Output = TextRange; type Output = TextRange;
fn conv_with(self, line_index: &LineIndex) -> TextRange { fn conv_with(self, line_index: &LineIndex) -> TextRange {
@ -208,77 +202,73 @@ impl Conv for ra_ide_api::FunctionSignature {
} }
} }
impl ConvWith for TextEdit { impl ConvWith<(&LineIndex, LineEndings)> for TextEdit {
type Ctx = LineIndex;
type Output = Vec<lsp_types::TextEdit>; type Output = Vec<lsp_types::TextEdit>;
fn conv_with(self, line_index: &LineIndex) -> Vec<lsp_types::TextEdit> { fn conv_with(self, ctx: (&LineIndex, LineEndings)) -> Vec<lsp_types::TextEdit> {
self.as_atoms().iter().map_conv_with(line_index).collect() self.as_atoms().iter().map_conv_with(ctx).collect()
} }
} }
impl<'a> ConvWith for &'a AtomTextEdit { impl ConvWith<(&LineIndex, LineEndings)> for &AtomTextEdit {
type Ctx = LineIndex;
type Output = lsp_types::TextEdit; type Output = lsp_types::TextEdit;
fn conv_with(self, line_index: &LineIndex) -> lsp_types::TextEdit { fn conv_with(
lsp_types::TextEdit { self,
range: self.delete.conv_with(line_index), (line_index, line_endings): (&LineIndex, LineEndings),
new_text: self.insert.clone(), ) -> lsp_types::TextEdit {
let mut new_text = self.insert.clone();
if line_endings == LineEndings::Dos {
new_text = new_text.replace('\n', "\r\n");
} }
lsp_types::TextEdit { range: self.delete.conv_with(line_index), new_text }
} }
} }
impl<T: ConvWith> ConvWith for Option<T> { impl<T: ConvWith<CTX>, CTX> ConvWith<CTX> for Option<T> {
type Ctx = <T as ConvWith>::Ctx; type Output = Option<T::Output>;
type Output = Option<<T as ConvWith>::Output>;
fn conv_with(self, ctx: &Self::Ctx) -> Self::Output { fn conv_with(self, ctx: CTX) -> Self::Output {
self.map(|x| ConvWith::conv_with(x, ctx)) self.map(|x| ConvWith::conv_with(x, ctx))
} }
} }
impl<'a> TryConvWith for &'a Url { impl TryConvWith<&WorldSnapshot> for &Url {
type Ctx = WorldSnapshot;
type Output = FileId; type Output = FileId;
fn try_conv_with(self, world: &WorldSnapshot) -> Result<FileId> { fn try_conv_with(self, world: &WorldSnapshot) -> Result<FileId> {
world.uri_to_file_id(self) world.uri_to_file_id(self)
} }
} }
impl TryConvWith for FileId { impl TryConvWith<&WorldSnapshot> for FileId {
type Ctx = WorldSnapshot;
type Output = Url; type Output = Url;
fn try_conv_with(self, world: &WorldSnapshot) -> Result<Url> { fn try_conv_with(self, world: &WorldSnapshot) -> Result<Url> {
world.file_id_to_uri(self) world.file_id_to_uri(self)
} }
} }
impl<'a> TryConvWith for &'a TextDocumentItem { impl TryConvWith<&WorldSnapshot> for &TextDocumentItem {
type Ctx = WorldSnapshot;
type Output = FileId; type Output = FileId;
fn try_conv_with(self, world: &WorldSnapshot) -> Result<FileId> { fn try_conv_with(self, world: &WorldSnapshot) -> Result<FileId> {
self.uri.try_conv_with(world) self.uri.try_conv_with(world)
} }
} }
impl<'a> TryConvWith for &'a VersionedTextDocumentIdentifier { impl TryConvWith<&WorldSnapshot> for &VersionedTextDocumentIdentifier {
type Ctx = WorldSnapshot;
type Output = FileId; type Output = FileId;
fn try_conv_with(self, world: &WorldSnapshot) -> Result<FileId> { fn try_conv_with(self, world: &WorldSnapshot) -> Result<FileId> {
self.uri.try_conv_with(world) self.uri.try_conv_with(world)
} }
} }
impl<'a> TryConvWith for &'a TextDocumentIdentifier { impl TryConvWith<&WorldSnapshot> for &TextDocumentIdentifier {
type Ctx = WorldSnapshot;
type Output = FileId; type Output = FileId;
fn try_conv_with(self, world: &WorldSnapshot) -> Result<FileId> { fn try_conv_with(self, world: &WorldSnapshot) -> Result<FileId> {
world.uri_to_file_id(&self.uri) world.uri_to_file_id(&self.uri)
} }
} }
impl<'a> TryConvWith for &'a TextDocumentPositionParams { impl TryConvWith<&WorldSnapshot> for &TextDocumentPositionParams {
type Ctx = WorldSnapshot;
type Output = FilePosition; type Output = FilePosition;
fn try_conv_with(self, world: &WorldSnapshot) -> Result<FilePosition> { fn try_conv_with(self, world: &WorldSnapshot) -> Result<FilePosition> {
let file_id = self.text_document.try_conv_with(world)?; let file_id = self.text_document.try_conv_with(world)?;
@ -288,8 +278,7 @@ impl<'a> TryConvWith for &'a TextDocumentPositionParams {
} }
} }
impl<'a> TryConvWith for (&'a TextDocumentIdentifier, Range) { impl TryConvWith<&WorldSnapshot> for (&TextDocumentIdentifier, Range) {
type Ctx = WorldSnapshot;
type Output = FileRange; type Output = FileRange;
fn try_conv_with(self, world: &WorldSnapshot) -> Result<FileRange> { fn try_conv_with(self, world: &WorldSnapshot) -> Result<FileRange> {
let file_id = self.0.try_conv_with(world)?; let file_id = self.0.try_conv_with(world)?;
@ -299,10 +288,9 @@ impl<'a> TryConvWith for (&'a TextDocumentIdentifier, Range) {
} }
} }
impl<T: TryConvWith> TryConvWith for Vec<T> { impl<T: TryConvWith<CTX>, CTX: Copy> TryConvWith<CTX> for Vec<T> {
type Ctx = <T as TryConvWith>::Ctx; type Output = Vec<<T as TryConvWith<CTX>>::Output>;
type Output = Vec<<T as TryConvWith>::Output>; fn try_conv_with(self, ctx: CTX) -> Result<Self::Output> {
fn try_conv_with(self, ctx: &Self::Ctx) -> Result<Self::Output> {
let mut res = Vec::with_capacity(self.len()); let mut res = Vec::with_capacity(self.len());
for item in self { for item in self {
res.push(item.try_conv_with(ctx)?); res.push(item.try_conv_with(ctx)?);
@ -311,8 +299,7 @@ impl<T: TryConvWith> TryConvWith for Vec<T> {
} }
} }
impl TryConvWith for SourceChange { impl TryConvWith<&WorldSnapshot> for SourceChange {
type Ctx = WorldSnapshot;
type Output = req::SourceChange; type Output = req::SourceChange;
fn try_conv_with(self, world: &WorldSnapshot) -> Result<req::SourceChange> { fn try_conv_with(self, world: &WorldSnapshot) -> Result<req::SourceChange> {
let cursor_position = match self.cursor_position { let cursor_position = match self.cursor_position {
@ -351,8 +338,7 @@ impl TryConvWith for SourceChange {
} }
} }
impl TryConvWith for SourceFileEdit { impl TryConvWith<&WorldSnapshot> for SourceFileEdit {
type Ctx = WorldSnapshot;
type Output = TextDocumentEdit; type Output = TextDocumentEdit;
fn try_conv_with(self, world: &WorldSnapshot) -> Result<TextDocumentEdit> { fn try_conv_with(self, world: &WorldSnapshot) -> Result<TextDocumentEdit> {
let text_document = VersionedTextDocumentIdentifier { let text_document = VersionedTextDocumentIdentifier {
@ -360,13 +346,14 @@ impl TryConvWith for SourceFileEdit {
version: None, version: None,
}; };
let line_index = world.analysis().file_line_index(self.file_id)?; let line_index = world.analysis().file_line_index(self.file_id)?;
let edits = self.edit.as_atoms().iter().map_conv_with(&line_index).collect(); let line_endings = world.file_line_endings(self.file_id);
let edits =
self.edit.as_atoms().iter().map_conv_with((&line_index, line_endings)).collect();
Ok(TextDocumentEdit { text_document, edits }) Ok(TextDocumentEdit { text_document, edits })
} }
} }
impl TryConvWith for FileSystemEdit { impl TryConvWith<&WorldSnapshot> for FileSystemEdit {
type Ctx = WorldSnapshot;
type Output = ResourceOp; type Output = ResourceOp;
fn try_conv_with(self, world: &WorldSnapshot) -> Result<ResourceOp> { fn try_conv_with(self, world: &WorldSnapshot) -> Result<ResourceOp> {
let res = match self { let res = match self {
@ -384,8 +371,7 @@ impl TryConvWith for FileSystemEdit {
} }
} }
impl TryConvWith for &NavigationTarget { impl TryConvWith<&WorldSnapshot> for &NavigationTarget {
type Ctx = WorldSnapshot;
type Output = Location; type Output = Location;
fn try_conv_with(self, world: &WorldSnapshot) -> Result<Location> { fn try_conv_with(self, world: &WorldSnapshot) -> Result<Location> {
let line_index = world.analysis().file_line_index(self.file_id())?; let line_index = world.analysis().file_line_index(self.file_id())?;
@ -394,8 +380,7 @@ impl TryConvWith for &NavigationTarget {
} }
} }
impl TryConvWith for (FileId, RangeInfo<NavigationTarget>) { impl TryConvWith<&WorldSnapshot> for (FileId, RangeInfo<NavigationTarget>) {
type Ctx = WorldSnapshot;
type Output = LocationLink; type Output = LocationLink;
fn try_conv_with(self, world: &WorldSnapshot) -> Result<LocationLink> { fn try_conv_with(self, world: &WorldSnapshot) -> Result<LocationLink> {
let (src_file_id, target) = self; let (src_file_id, target) = self;
@ -422,8 +407,7 @@ impl TryConvWith for (FileId, RangeInfo<NavigationTarget>) {
} }
} }
impl TryConvWith for (FileId, RangeInfo<Vec<NavigationTarget>>) { impl TryConvWith<&WorldSnapshot> for (FileId, RangeInfo<Vec<NavigationTarget>>) {
type Ctx = WorldSnapshot;
type Output = req::GotoDefinitionResponse; type Output = req::GotoDefinitionResponse;
fn try_conv_with(self, world: &WorldSnapshot) -> Result<req::GotoTypeDefinitionResponse> { fn try_conv_with(self, world: &WorldSnapshot) -> Result<req::GotoTypeDefinitionResponse> {
let (file_id, RangeInfo { range, info: navs }) = self; let (file_id, RangeInfo { range, info: navs }) = self;
@ -454,57 +438,55 @@ pub fn to_location(
Ok(loc) Ok(loc)
} }
pub trait MapConvWith<'a>: Sized + 'a { pub trait MapConvWith<CTX>: Sized {
type Ctx;
type Output; type Output;
fn map_conv_with(self, ctx: &'a Self::Ctx) -> ConvWithIter<'a, Self, Self::Ctx> { fn map_conv_with(self, ctx: CTX) -> ConvWithIter<Self, CTX> {
ConvWithIter { iter: self, ctx } ConvWithIter { iter: self, ctx }
} }
} }
impl<'a, I> MapConvWith<'a> for I impl<CTX, I> MapConvWith<CTX> for I
where
I: Iterator + 'a,
I::Item: ConvWith,
{
type Ctx = <I::Item as ConvWith>::Ctx;
type Output = <I::Item as ConvWith>::Output;
}
pub struct ConvWithIter<'a, I, Ctx: 'a> {
iter: I,
ctx: &'a Ctx,
}
impl<'a, I, Ctx> Iterator for ConvWithIter<'a, I, Ctx>
where where
I: Iterator, I: Iterator,
I::Item: ConvWith<Ctx = Ctx>, I::Item: ConvWith<CTX>,
{ {
type Item = <I::Item as ConvWith>::Output; type Output = <I::Item as ConvWith<CTX>>::Output;
}
pub struct ConvWithIter<I, CTX> {
iter: I,
ctx: CTX,
}
impl<I, CTX> Iterator for ConvWithIter<I, CTX>
where
I: Iterator,
I::Item: ConvWith<CTX>,
CTX: Copy,
{
type Item = <I::Item as ConvWith<CTX>>::Output;
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
self.iter.next().map(|item| item.conv_with(self.ctx)) self.iter.next().map(|item| item.conv_with(self.ctx))
} }
} }
pub trait TryConvWithToVec<'a>: Sized + 'a { pub trait TryConvWithToVec<CTX>: Sized {
type Ctx;
type Output; type Output;
fn try_conv_with_to_vec(self, ctx: &'a Self::Ctx) -> Result<Vec<Self::Output>>; fn try_conv_with_to_vec(self, ctx: CTX) -> Result<Vec<Self::Output>>;
} }
impl<'a, I> TryConvWithToVec<'a> for I impl<I, CTX> TryConvWithToVec<CTX> for I
where where
I: Iterator + 'a, I: Iterator,
I::Item: TryConvWith, I::Item: TryConvWith<CTX>,
CTX: Copy,
{ {
type Ctx = <I::Item as TryConvWith>::Ctx; type Output = <I::Item as TryConvWith<CTX>>::Output;
type Output = <I::Item as TryConvWith>::Output;
fn try_conv_with_to_vec(self, ctx: &'a Self::Ctx) -> Result<Vec<Self::Output>> { fn try_conv_with_to_vec(self, ctx: CTX) -> Result<Vec<Self::Output>> {
self.map(|it| it.try_conv_with(ctx)).collect() self.map(|it| it.try_conv_with(ctx)).collect()
} }
} }

View file

@ -138,6 +138,7 @@ pub fn handle_on_type_formatting(
let _p = profile("handle_on_type_formatting"); let _p = profile("handle_on_type_formatting");
let mut position = params.text_document_position.try_conv_with(&world)?; let mut position = params.text_document_position.try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(position.file_id)?; let line_index = world.analysis().file_line_index(position.file_id)?;
let line_endings = world.file_line_endings(position.file_id);
// in `ra_ide_api`, the `on_type` invariant is that // in `ra_ide_api`, the `on_type` invariant is that
// `text.char_at(position) == typed_char`. // `text.char_at(position) == typed_char`.
@ -156,7 +157,7 @@ pub fn handle_on_type_formatting(
// This should be a single-file edit // This should be a single-file edit
let edit = edit.source_file_edits.pop().unwrap(); let edit = edit.source_file_edits.pop().unwrap();
let change: Vec<TextEdit> = edit.edit.conv_with(&line_index); let change: Vec<TextEdit> = edit.edit.conv_with((&line_index, line_endings));
Ok(Some(change)) Ok(Some(change))
} }
@ -370,8 +371,9 @@ pub fn handle_completion(
Some(items) => items, Some(items) => items,
}; };
let line_index = world.analysis().file_line_index(position.file_id)?; let line_index = world.analysis().file_line_index(position.file_id)?;
let line_endings = world.file_line_endings(position.file_id);
let items: Vec<CompletionItem> = let items: Vec<CompletionItem> =
items.into_iter().map(|item| item.conv_with(&line_index)).collect(); items.into_iter().map(|item| item.conv_with((&line_index, line_endings))).collect();
Ok(Some(items.into())) Ok(Some(items.into()))
} }

View file

@ -9,7 +9,7 @@ use parking_lot::RwLock;
use ra_ide_api::{ use ra_ide_api::{
Analysis, AnalysisChange, AnalysisHost, CrateGraph, FileId, LibraryData, SourceRootId, Analysis, AnalysisChange, AnalysisHost, CrateGraph, FileId, LibraryData, SourceRootId,
}; };
use ra_vfs::{RootEntry, Vfs, VfsChange, VfsFile, VfsRoot}; use ra_vfs::{LineEndings, RootEntry, Vfs, VfsChange, VfsFile, VfsRoot};
use ra_vfs_glob::{Glob, RustPackageFilterBuilder}; use ra_vfs_glob::{Glob, RustPackageFilterBuilder};
use relative_path::RelativePathBuf; use relative_path::RelativePathBuf;
@ -210,6 +210,10 @@ impl WorldSnapshot {
Ok(url) Ok(url)
} }
pub fn file_line_endings(&self, id: FileId) -> LineEndings {
self.vfs.read().file_line_endings(VfsFile(id.0))
}
pub fn path_to_uri(&self, root: SourceRootId, path: &RelativePathBuf) -> Result<Url> { pub fn path_to_uri(&self, root: SourceRootId, path: &RelativePathBuf) -> Result<Url> {
let base = self.vfs.read().root2path(VfsRoot(root.0)); let base = self.vfs.read().root2path(VfsRoot(root.0));
let path = path.to_path(base); let path = path.to_path(base);

View file

@ -208,7 +208,7 @@ pub use std::collections::HashMap;
"range": { "range": {
"end": { "end": {
"character": 0, "character": 0,
"line": 6 "line": 7
}, },
"start": { "start": {
"character": 0, "character": 0,
@ -414,3 +414,49 @@ fn main() {{}}
let elapsed = start.elapsed(); let elapsed = start.elapsed();
assert!(elapsed.as_millis() < 2000, "typing enter took {:?}", elapsed); assert!(elapsed.as_millis() < 2000, "typing enter took {:?}", elapsed);
} }
#[test]
fn preserves_dos_line_endings() {
let server = Project::with_fixture(
&"
//- Cargo.toml
[package]
name = \"foo\"
version = \"0.0.0\"
//- src/main.rs
/// Some Docs\r\nfn main() {}
",
)
.server();
server.request::<OnEnter>(
TextDocumentPositionParams {
text_document: server.doc_id("src/main.rs"),
position: Position { line: 0, character: 8 },
},
json!({
"cursorPosition": {
"position": { "line": 1, "character": 4 },
"textDocument": { "uri": "file:///[..]src/main.rs" }
},
"label": "on enter",
"workspaceEdit": {
"documentChanges": [
{
"edits": [
{
"newText": "\r\n/// ",
"range": {
"end": { "line": 0, "character": 8 },
"start": { "line": 0, "character": 8 }
}
}
],
"textDocument": { "uri": "file:///[..]src/main.rs", "version": null }
}
]
}
}),
);
}

View file

@ -10,7 +10,7 @@ repository = "https://github.com/rust-analyzer/rust-analyzer"
[dependencies] [dependencies]
itertools = "0.8.0" itertools = "0.8.0"
rowan = "0.6.1" rowan = "0.6.1"
ra_rustc_lexer = { version = "0.1.0-pre.2" } ra_rustc_lexer = { version = "0.1.0-pre.3", features = ["unicode-xid"] }
# ideally, `serde` should be enabled by `ra_lsp_server`, but we enable it here # ideally, `serde` should be enabled by `ra_lsp_server`, but we enable it here
# to reduce number of compilations # to reduce number of compilations

View file

@ -12,16 +12,16 @@ pub struct Token {
pub len: TextUnit, pub len: TextUnit,
} }
fn match_literal_kind(kind: ra_rustc_lexer::LiteralKind) -> SyntaxKind { fn match_literal_kind(kind: rustc_lexer::LiteralKind) -> SyntaxKind {
match kind { match kind {
ra_rustc_lexer::LiteralKind::Int { .. } => INT_NUMBER, rustc_lexer::LiteralKind::Int { .. } => INT_NUMBER,
ra_rustc_lexer::LiteralKind::Float { .. } => FLOAT_NUMBER, rustc_lexer::LiteralKind::Float { .. } => FLOAT_NUMBER,
ra_rustc_lexer::LiteralKind::Char { .. } => CHAR, rustc_lexer::LiteralKind::Char { .. } => CHAR,
ra_rustc_lexer::LiteralKind::Byte { .. } => BYTE, rustc_lexer::LiteralKind::Byte { .. } => BYTE,
ra_rustc_lexer::LiteralKind::Str { .. } => STRING, rustc_lexer::LiteralKind::Str { .. } => STRING,
ra_rustc_lexer::LiteralKind::ByteStr { .. } => BYTE_STRING, rustc_lexer::LiteralKind::ByteStr { .. } => BYTE_STRING,
ra_rustc_lexer::LiteralKind::RawStr { .. } => RAW_STRING, rustc_lexer::LiteralKind::RawStr { .. } => RAW_STRING,
ra_rustc_lexer::LiteralKind::RawByteStr { .. } => RAW_BYTE_STRING, rustc_lexer::LiteralKind::RawByteStr { .. } => RAW_BYTE_STRING,
} }
} }
@ -32,32 +32,17 @@ pub fn tokenize(text: &str) -> Vec<Token> {
} }
let mut text = text; let mut text = text;
let mut acc = Vec::new(); let mut acc = Vec::new();
if let Some(len) = ra_rustc_lexer::strip_shebang(text) { if let Some(len) = rustc_lexer::strip_shebang(text) {
acc.push(Token { kind: SHEBANG, len: TextUnit::from_usize(len) }); acc.push(Token { kind: SHEBANG, len: TextUnit::from_usize(len) });
text = &text[len..]; text = &text[len..];
} }
while !text.is_empty() { while !text.is_empty() {
let rustc_token = ra_rustc_lexer::first_token(text); let rustc_token = rustc_lexer::first_token(text);
macro_rules! decompose {
($t1:expr, $t2:expr) => {{
acc.push(Token { kind: $t1, len: 1.into() });
acc.push(Token { kind: $t2, len: 1.into() });
text = &text[2..];
continue;
}};
($t1:expr, $t2:expr, $t3:expr) => {{
acc.push(Token { kind: $t1, len: 1.into() });
acc.push(Token { kind: $t2, len: 1.into() });
acc.push(Token { kind: $t3, len: 1.into() });
text = &text[3..];
continue;
}};
}
let kind = match rustc_token.kind { let kind = match rustc_token.kind {
ra_rustc_lexer::TokenKind::LineComment => COMMENT, rustc_lexer::TokenKind::LineComment => COMMENT,
ra_rustc_lexer::TokenKind::BlockComment { .. } => COMMENT, rustc_lexer::TokenKind::BlockComment { .. } => COMMENT,
ra_rustc_lexer::TokenKind::Whitespace => WHITESPACE, rustc_lexer::TokenKind::Whitespace => WHITESPACE,
ra_rustc_lexer::TokenKind::Ident => { rustc_lexer::TokenKind::Ident => {
let token_text = &text[..rustc_token.len]; let token_text = &text[..rustc_token.len];
if token_text == "_" { if token_text == "_" {
UNDERSCORE UNDERSCORE
@ -65,62 +50,37 @@ pub fn tokenize(text: &str) -> Vec<Token> {
SyntaxKind::from_keyword(&text[..rustc_token.len]).unwrap_or(IDENT) SyntaxKind::from_keyword(&text[..rustc_token.len]).unwrap_or(IDENT)
} }
} }
ra_rustc_lexer::TokenKind::RawIdent => IDENT, rustc_lexer::TokenKind::RawIdent => IDENT,
ra_rustc_lexer::TokenKind::Literal { kind, .. } => match_literal_kind(kind), rustc_lexer::TokenKind::Literal { kind, .. } => match_literal_kind(kind),
ra_rustc_lexer::TokenKind::Lifetime { .. } => LIFETIME, rustc_lexer::TokenKind::Lifetime { .. } => LIFETIME,
ra_rustc_lexer::TokenKind::Semi => SEMI, rustc_lexer::TokenKind::Semi => SEMI,
ra_rustc_lexer::TokenKind::Comma => COMMA, rustc_lexer::TokenKind::Comma => COMMA,
ra_rustc_lexer::TokenKind::DotDotDot => decompose!(DOT, DOT, DOT), rustc_lexer::TokenKind::Dot => DOT,
ra_rustc_lexer::TokenKind::DotDotEq => decompose!(DOT, DOT, EQ), rustc_lexer::TokenKind::OpenParen => L_PAREN,
ra_rustc_lexer::TokenKind::DotDot => decompose!(DOT, DOT), rustc_lexer::TokenKind::CloseParen => R_PAREN,
ra_rustc_lexer::TokenKind::Dot => DOT, rustc_lexer::TokenKind::OpenBrace => L_CURLY,
ra_rustc_lexer::TokenKind::OpenParen => L_PAREN, rustc_lexer::TokenKind::CloseBrace => R_CURLY,
ra_rustc_lexer::TokenKind::CloseParen => R_PAREN, rustc_lexer::TokenKind::OpenBracket => L_BRACK,
ra_rustc_lexer::TokenKind::OpenBrace => L_CURLY, rustc_lexer::TokenKind::CloseBracket => R_BRACK,
ra_rustc_lexer::TokenKind::CloseBrace => R_CURLY, rustc_lexer::TokenKind::At => AT,
ra_rustc_lexer::TokenKind::OpenBracket => L_BRACK, rustc_lexer::TokenKind::Pound => POUND,
ra_rustc_lexer::TokenKind::CloseBracket => R_BRACK, rustc_lexer::TokenKind::Tilde => TILDE,
ra_rustc_lexer::TokenKind::At => AT, rustc_lexer::TokenKind::Question => QUESTION,
ra_rustc_lexer::TokenKind::Pound => POUND, rustc_lexer::TokenKind::Colon => COLON,
ra_rustc_lexer::TokenKind::Tilde => TILDE, rustc_lexer::TokenKind::Dollar => DOLLAR,
ra_rustc_lexer::TokenKind::Question => QUESTION, rustc_lexer::TokenKind::Eq => EQ,
ra_rustc_lexer::TokenKind::ColonColon => decompose!(COLON, COLON), rustc_lexer::TokenKind::Not => EXCL,
ra_rustc_lexer::TokenKind::Colon => COLON, rustc_lexer::TokenKind::Lt => L_ANGLE,
ra_rustc_lexer::TokenKind::Dollar => DOLLAR, rustc_lexer::TokenKind::Gt => R_ANGLE,
ra_rustc_lexer::TokenKind::EqEq => decompose!(EQ, EQ), rustc_lexer::TokenKind::Minus => MINUS,
ra_rustc_lexer::TokenKind::Eq => EQ, rustc_lexer::TokenKind::And => AMP,
ra_rustc_lexer::TokenKind::FatArrow => decompose!(EQ, R_ANGLE), rustc_lexer::TokenKind::Or => PIPE,
ra_rustc_lexer::TokenKind::Ne => decompose!(EXCL, EQ), rustc_lexer::TokenKind::Plus => PLUS,
ra_rustc_lexer::TokenKind::Not => EXCL, rustc_lexer::TokenKind::Star => STAR,
ra_rustc_lexer::TokenKind::Le => decompose!(L_ANGLE, EQ), rustc_lexer::TokenKind::Slash => SLASH,
ra_rustc_lexer::TokenKind::LArrow => decompose!(COLON, MINUS), rustc_lexer::TokenKind::Caret => CARET,
ra_rustc_lexer::TokenKind::Lt => L_ANGLE, rustc_lexer::TokenKind::Percent => PERCENT,
ra_rustc_lexer::TokenKind::ShlEq => decompose!(L_ANGLE, L_ANGLE, EQ), rustc_lexer::TokenKind::Unknown => ERROR,
ra_rustc_lexer::TokenKind::Shl => decompose!(L_ANGLE, L_ANGLE),
ra_rustc_lexer::TokenKind::Ge => decompose!(R_ANGLE, EQ),
ra_rustc_lexer::TokenKind::Gt => R_ANGLE,
ra_rustc_lexer::TokenKind::ShrEq => decompose!(R_ANGLE, R_ANGLE, EQ),
ra_rustc_lexer::TokenKind::Shr => decompose!(R_ANGLE, R_ANGLE),
ra_rustc_lexer::TokenKind::RArrow => decompose!(MINUS, R_ANGLE),
ra_rustc_lexer::TokenKind::Minus => MINUS,
ra_rustc_lexer::TokenKind::MinusEq => decompose!(MINUS, EQ),
ra_rustc_lexer::TokenKind::And => AMP,
ra_rustc_lexer::TokenKind::AndAnd => decompose!(AMP, AMP),
ra_rustc_lexer::TokenKind::AndEq => decompose!(AMP, EQ),
ra_rustc_lexer::TokenKind::Or => PIPE,
ra_rustc_lexer::TokenKind::OrOr => decompose!(PIPE, PIPE),
ra_rustc_lexer::TokenKind::OrEq => decompose!(PIPE, EQ),
ra_rustc_lexer::TokenKind::PlusEq => decompose!(PLUS, EQ),
ra_rustc_lexer::TokenKind::Plus => PLUS,
ra_rustc_lexer::TokenKind::StarEq => decompose!(STAR, EQ),
ra_rustc_lexer::TokenKind::Star => STAR,
ra_rustc_lexer::TokenKind::SlashEq => decompose!(SLASH, EQ),
ra_rustc_lexer::TokenKind::Slash => SLASH,
ra_rustc_lexer::TokenKind::CaretEq => decompose!(CARET, EQ),
ra_rustc_lexer::TokenKind::Caret => CARET,
ra_rustc_lexer::TokenKind::PercentEq => decompose!(PERCENT, EQ),
ra_rustc_lexer::TokenKind::Percent => PERCENT,
ra_rustc_lexer::TokenKind::Unknown => ERROR,
}; };
let token = Token { kind, len: TextUnit::from_usize(rustc_token.len) }; let token = Token { kind, len: TextUnit::from_usize(rustc_token.len) };
acc.push(token); acc.push(token);
@ -130,12 +90,12 @@ pub fn tokenize(text: &str) -> Vec<Token> {
} }
pub fn classify_literal(text: &str) -> Option<Token> { pub fn classify_literal(text: &str) -> Option<Token> {
let t = ra_rustc_lexer::first_token(text); let t = rustc_lexer::first_token(text);
if t.len != text.len() { if t.len != text.len() {
return None; return None;
} }
let kind = match t.kind { let kind = match t.kind {
ra_rustc_lexer::TokenKind::Literal { kind, .. } => match_literal_kind(kind), rustc_lexer::TokenKind::Literal { kind, .. } => match_literal_kind(kind),
_ => return None, _ => return None,
}; };
Some(Token { kind, len: TextUnit::from_usize(t.len) }) Some(Token { kind, len: TextUnit::from_usize(t.len) })

View file

@ -1,6 +1,6 @@
mod block; mod block;
use ra_rustc_lexer::unescape; use rustc_lexer::unescape;
use crate::{ use crate::{
algo::visit::{visitor_ctx, VisitorCtx}, algo::visit::{visitor_ctx, VisitorCtx},
@ -32,64 +32,62 @@ pub enum EscapeError {
NonAsciiCharInByte, NonAsciiCharInByte,
} }
impl From<ra_rustc_lexer::unescape::EscapeError> for EscapeError { impl From<rustc_lexer::unescape::EscapeError> for EscapeError {
fn from(err: ra_rustc_lexer::unescape::EscapeError) -> Self { fn from(err: rustc_lexer::unescape::EscapeError) -> Self {
match err { match err {
ra_rustc_lexer::unescape::EscapeError::ZeroChars => EscapeError::ZeroChars, rustc_lexer::unescape::EscapeError::ZeroChars => EscapeError::ZeroChars,
ra_rustc_lexer::unescape::EscapeError::MoreThanOneChar => EscapeError::MoreThanOneChar, rustc_lexer::unescape::EscapeError::MoreThanOneChar => EscapeError::MoreThanOneChar,
ra_rustc_lexer::unescape::EscapeError::LoneSlash => EscapeError::LoneSlash, rustc_lexer::unescape::EscapeError::LoneSlash => EscapeError::LoneSlash,
ra_rustc_lexer::unescape::EscapeError::InvalidEscape => EscapeError::InvalidEscape, rustc_lexer::unescape::EscapeError::InvalidEscape => EscapeError::InvalidEscape,
ra_rustc_lexer::unescape::EscapeError::BareCarriageReturn rustc_lexer::unescape::EscapeError::BareCarriageReturn
| ra_rustc_lexer::unescape::EscapeError::BareCarriageReturnInRawString => { | rustc_lexer::unescape::EscapeError::BareCarriageReturnInRawString => {
EscapeError::BareCarriageReturn EscapeError::BareCarriageReturn
} }
ra_rustc_lexer::unescape::EscapeError::EscapeOnlyChar => EscapeError::EscapeOnlyChar, rustc_lexer::unescape::EscapeError::EscapeOnlyChar => EscapeError::EscapeOnlyChar,
ra_rustc_lexer::unescape::EscapeError::TooShortHexEscape => { rustc_lexer::unescape::EscapeError::TooShortHexEscape => EscapeError::TooShortHexEscape,
EscapeError::TooShortHexEscape rustc_lexer::unescape::EscapeError::InvalidCharInHexEscape => {
}
ra_rustc_lexer::unescape::EscapeError::InvalidCharInHexEscape => {
EscapeError::InvalidCharInHexEscape EscapeError::InvalidCharInHexEscape
} }
ra_rustc_lexer::unescape::EscapeError::OutOfRangeHexEscape => { rustc_lexer::unescape::EscapeError::OutOfRangeHexEscape => {
EscapeError::OutOfRangeHexEscape EscapeError::OutOfRangeHexEscape
} }
ra_rustc_lexer::unescape::EscapeError::NoBraceInUnicodeEscape => { rustc_lexer::unescape::EscapeError::NoBraceInUnicodeEscape => {
EscapeError::NoBraceInUnicodeEscape EscapeError::NoBraceInUnicodeEscape
} }
ra_rustc_lexer::unescape::EscapeError::InvalidCharInUnicodeEscape => { rustc_lexer::unescape::EscapeError::InvalidCharInUnicodeEscape => {
EscapeError::InvalidCharInUnicodeEscape EscapeError::InvalidCharInUnicodeEscape
} }
ra_rustc_lexer::unescape::EscapeError::EmptyUnicodeEscape => { rustc_lexer::unescape::EscapeError::EmptyUnicodeEscape => {
EscapeError::EmptyUnicodeEscape EscapeError::EmptyUnicodeEscape
} }
ra_rustc_lexer::unescape::EscapeError::UnclosedUnicodeEscape => { rustc_lexer::unescape::EscapeError::UnclosedUnicodeEscape => {
EscapeError::UnclosedUnicodeEscape EscapeError::UnclosedUnicodeEscape
} }
ra_rustc_lexer::unescape::EscapeError::LeadingUnderscoreUnicodeEscape => { rustc_lexer::unescape::EscapeError::LeadingUnderscoreUnicodeEscape => {
EscapeError::LeadingUnderscoreUnicodeEscape EscapeError::LeadingUnderscoreUnicodeEscape
} }
ra_rustc_lexer::unescape::EscapeError::OverlongUnicodeEscape => { rustc_lexer::unescape::EscapeError::OverlongUnicodeEscape => {
EscapeError::OverlongUnicodeEscape EscapeError::OverlongUnicodeEscape
} }
ra_rustc_lexer::unescape::EscapeError::LoneSurrogateUnicodeEscape => { rustc_lexer::unescape::EscapeError::LoneSurrogateUnicodeEscape => {
EscapeError::LoneSurrogateUnicodeEscape EscapeError::LoneSurrogateUnicodeEscape
} }
ra_rustc_lexer::unescape::EscapeError::OutOfRangeUnicodeEscape => { rustc_lexer::unescape::EscapeError::OutOfRangeUnicodeEscape => {
EscapeError::OutOfRangeUnicodeEscape EscapeError::OutOfRangeUnicodeEscape
} }
ra_rustc_lexer::unescape::EscapeError::UnicodeEscapeInByte => { rustc_lexer::unescape::EscapeError::UnicodeEscapeInByte => {
EscapeError::UnicodeEscapeInByte EscapeError::UnicodeEscapeInByte
} }
ra_rustc_lexer::unescape::EscapeError::NonAsciiCharInByte rustc_lexer::unescape::EscapeError::NonAsciiCharInByte
| ra_rustc_lexer::unescape::EscapeError::NonAsciiCharInByteString => { | rustc_lexer::unescape::EscapeError::NonAsciiCharInByteString => {
EscapeError::NonAsciiCharInByte EscapeError::NonAsciiCharInByte
} }
} }
} }
} }
impl From<ra_rustc_lexer::unescape::EscapeError> for SyntaxErrorKind { impl From<rustc_lexer::unescape::EscapeError> for SyntaxErrorKind {
fn from(err: ra_rustc_lexer::unescape::EscapeError) -> Self { fn from(err: rustc_lexer::unescape::EscapeError) -> Self {
SyntaxErrorKind::EscapeError(err.into()) SyntaxErrorKind::EscapeError(err.into())
} }
} }

View file

@ -134,21 +134,25 @@ pub fn parse_fixture(fixture: &str) -> Vec<FixtureEntry> {
} }
}; };
}; };
let margin = fixture let margin = fixture
.lines() .lines()
.filter(|it| it.trim_start().starts_with("//-")) .filter(|it| it.trim_start().starts_with("//-"))
.map(|it| it.len() - it.trim_start().len()) .map(|it| it.len() - it.trim_start().len())
.next() .next()
.expect("empty fixture"); .expect("empty fixture");
let lines = fixture.lines().filter_map(|line| {
if line.len() >= margin { let lines = fixture
assert!(line[..margin].trim().is_empty()); .split('\n') // don't use `.lines` to not drop `\r\n`
Some(&line[margin..]) .filter_map(|line| {
} else { if line.len() >= margin {
assert!(line.trim().is_empty()); assert!(line[..margin].trim().is_empty());
None Some(&line[margin..])
} } else {
}); assert!(line.trim().is_empty());
None
}
});
for line in lines { for line in lines {
if line.starts_with("//-") { if line.starts_with("//-") {