mirror of
https://github.com/rust-lang/rust-analyzer
synced 2024-12-25 20:43:21 +00:00
Merge branch 'master' of https://github.com/rust-analyzer/rust-analyzer into feature/themes
This commit is contained in:
commit
4bdb6351ac
25 changed files with 370 additions and 429 deletions
51
.github/workflows/ci.yaml
vendored
51
.github/workflows/ci.yaml
vendored
|
@ -1,16 +1,23 @@
|
|||
name: Continuous integration
|
||||
on: [pull_request, push]
|
||||
name: CI
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- staging
|
||||
|
||||
jobs:
|
||||
rust-tests:
|
||||
name: Rust tests
|
||||
rust:
|
||||
name: Rust
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
RUSTFLAGS: -D warnings
|
||||
CARGO_INCREMENTAL: 0
|
||||
steps:
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v1
|
||||
|
||||
- name: Install Rust toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
|
@ -18,31 +25,32 @@ jobs:
|
|||
profile: minimal
|
||||
override: true
|
||||
components: rustfmt, rust-src
|
||||
- name: Generate lockfile
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: generate-lockfile
|
||||
|
||||
- name: Cargo target cache
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Compile
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
args: --no-run
|
||||
|
||||
- name: Test
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: test
|
||||
|
||||
- name: Prepare build directory for cache
|
||||
run: |
|
||||
find ./target/debug -maxdepth 1 -type f -delete && \
|
||||
rm -fr ./target/debug/{deps,.fingerprint}/{*ra_*,*heavy_test*,*gen_lsp*,*thread_worker*} && \
|
||||
rm -f ./target/.rustc_info.json
|
||||
vscode-tests:
|
||||
name: VS Code tests
|
||||
|
||||
type-script:
|
||||
name: TypeScript
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CXX: g++-4.9
|
||||
|
@ -50,25 +58,12 @@ jobs:
|
|||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v1
|
||||
- name: Install Rust toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
override: true
|
||||
- name: Generate lockfile
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: generate-lockfile
|
||||
- name: Cargo target cache
|
||||
uses: actions/cache@v1
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Install Nodejs
|
||||
uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 12.x
|
||||
|
||||
- name: Install xvfb
|
||||
run: sudo apt-get install xvfb
|
||||
- run: npm ci
|
||||
|
@ -77,10 +72,6 @@ jobs:
|
|||
working-directory: ./editors/code
|
||||
- run: xvfb-run --auto-servernum npm run travis
|
||||
working-directory: ./editors/code
|
||||
|
||||
- name: Cleanup xvfb
|
||||
uses: bcomnes/cleanup-xvfb@v1
|
||||
- name: Prepare build directory for cache
|
||||
run: |
|
||||
find ./target/debug -maxdepth 1 -type f -delete && \
|
||||
rm -fr ./target/debug/{deps,.fingerprint}/{*ra_*,*heavy_test*,*gen_lsp*,*thread_worker*} && \
|
||||
rm -f ./target/.rustc_info.json
|
||||
|
|
39
.github/workflows/rustdoc.yaml
vendored
Normal file
39
.github/workflows/rustdoc.yaml
vendored
Normal file
|
@ -0,0 +1,39 @@
|
|||
name: rustdoc
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
rustdoc:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
RUSTFLAGS: -D warnings
|
||||
CARGO_INCREMENTAL: 0
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v1
|
||||
|
||||
- name: Install Rust toolchain
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
profile: minimal
|
||||
override: true
|
||||
components: rustfmt, rust-src
|
||||
|
||||
- name: Build Documentation
|
||||
uses: actions-rs/cargo@v1
|
||||
with:
|
||||
command: doc
|
||||
args: --all --no-deps
|
||||
|
||||
- name: Deploy Docs
|
||||
uses: peaceiris/actions-gh-pages@v2.6.0-rc0
|
||||
env:
|
||||
ACTIONS_DEPLOY_KEY: ${{ secrets.ACTIONS_DEPLOY_KEY }}
|
||||
PUBLISH_BRANCH: gh-pages
|
||||
PUBLISH_DIR: ./target/doc
|
||||
with:
|
||||
forceOrphan: true
|
64
.travis.yml
64
.travis.yml
|
@ -1,64 +0,0 @@
|
|||
cache: cargo
|
||||
before_cache:
|
||||
- find ./target/debug -maxdepth 1 -type f -delete
|
||||
- rm -fr ./target/debug/{deps,.fingerprint}/{*ra_*,*heavy_test*,*gen_lsp*,*thread_worker*}
|
||||
- rm -f ./target/.rustc_info.json
|
||||
|
||||
matrix:
|
||||
include:
|
||||
- name: "Rust Tests"
|
||||
os: linux
|
||||
dist: xenial
|
||||
language: rust
|
||||
rust: stable
|
||||
script:
|
||||
- rustup component add rustfmt
|
||||
- rustup component add rust-src
|
||||
- cargo test --no-run # let's measure compile time separately
|
||||
- cargo test
|
||||
env:
|
||||
- RUSTFLAGS="-D warnings", CARGO_INCREMENTAL=0
|
||||
|
||||
- name: "Rust Docs"
|
||||
os: linux
|
||||
if: branch = master AND type = push
|
||||
before_script:
|
||||
- DEPLOY_DOCS=1
|
||||
language: rust
|
||||
rust: stable
|
||||
script:
|
||||
- cargo doc --all --no-deps
|
||||
env:
|
||||
- RUSTFLAGS="-D warnings", CARGO_INCREMENTAL=0
|
||||
|
||||
- name: "VS Code Tests"
|
||||
os: linux
|
||||
language: node_js
|
||||
dist: xenial
|
||||
node_js: node
|
||||
services:
|
||||
- xvfb
|
||||
before_install: cd editors/code
|
||||
install:
|
||||
- npm ci
|
||||
- npm run vscode:prepublish
|
||||
script:
|
||||
- npm run travis
|
||||
env:
|
||||
- CXX="g++-4.9", CC="gcc-4.9"
|
||||
|
||||
branches:
|
||||
only:
|
||||
- staging
|
||||
- master
|
||||
- trying
|
||||
|
||||
deploy:
|
||||
provider: pages
|
||||
skip-cleanup: true
|
||||
github-token: $DOCS_TOKEN # Set in the settings page of your repository, as a secure variable
|
||||
keep-history: false
|
||||
local-dir: target/doc
|
||||
on:
|
||||
branch: master
|
||||
condition: $DEPLOY_DOCS = 1
|
|
@ -1,7 +1,5 @@
|
|||
# Rust Analyzer
|
||||
|
||||
[![Build Status](https://travis-ci.org/rust-analyzer/rust-analyzer.svg?branch=master)](https://travis-ci.org/rust-analyzer/rust-analyzer)
|
||||
|
||||
Rust Analyzer is an **experimental** modular compiler frontend for the Rust
|
||||
language. It is a part of a larger rls-2.0 effort to create excellent IDE
|
||||
support for Rust. If you want to get involved, check the rls-2.0 working group
|
||||
|
@ -58,9 +56,7 @@ https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frls-2.2E0
|
|||
|
||||
## Quick Links
|
||||
|
||||
* Work List: https://paper.dropbox.com/doc/RLS-2.0-work-list--AZ3BgHKKCtqszbsi3gi6sjchAQ-42vbnxzuKq2lKwW0mkn8Y
|
||||
* API docs: https://rust-analyzer.github.io/rust-analyzer/ra_ide_api/
|
||||
* CI: https://travis-ci.org/rust-analyzer/rust-analyzer
|
||||
|
||||
## License
|
||||
|
||||
|
|
|
@ -1,4 +1,2 @@
|
|||
status = [
|
||||
"continuous-integration/travis-ci/push",
|
||||
]
|
||||
status = ["Rust", "TypeScript"]
|
||||
delete_merged_branches = true
|
||||
|
|
|
@ -59,10 +59,8 @@ pub trait AstDatabase: SourceDatabase {
|
|||
fn intern_macro(&self, macro_call: MacroCallLoc) -> MacroCallId;
|
||||
fn macro_arg(&self, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>>;
|
||||
fn macro_def(&self, id: MacroDefId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>>;
|
||||
fn parse_macro(
|
||||
&self,
|
||||
macro_file: MacroFile,
|
||||
) -> Option<(Parse<SyntaxNode>, Arc<mbe::RevTokenMap>)>;
|
||||
fn parse_macro(&self, macro_file: MacroFile)
|
||||
-> Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>;
|
||||
fn macro_expand(&self, macro_call: MacroCallId) -> Result<Arc<tt::Subtree>, String>;
|
||||
}
|
||||
|
||||
|
@ -136,7 +134,7 @@ pub(crate) fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Optio
|
|||
pub(crate) fn parse_macro(
|
||||
db: &dyn AstDatabase,
|
||||
macro_file: MacroFile,
|
||||
) -> Option<(Parse<SyntaxNode>, Arc<mbe::RevTokenMap>)> {
|
||||
) -> Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> {
|
||||
let _p = profile("parse_macro_query");
|
||||
|
||||
let macro_call_id = macro_file.macro_call_id;
|
||||
|
|
|
@ -20,7 +20,7 @@ use ra_db::{salsa, CrateId, FileId};
|
|||
use ra_syntax::{
|
||||
algo,
|
||||
ast::{self, AstNode},
|
||||
SyntaxNode, SyntaxToken, TextRange, TextUnit,
|
||||
SyntaxNode, SyntaxToken, TextUnit,
|
||||
};
|
||||
|
||||
use crate::ast_id_map::FileAstId;
|
||||
|
@ -79,22 +79,17 @@ impl HirFileId {
|
|||
HirFileIdRepr::MacroFile(macro_file) => {
|
||||
let loc: MacroCallLoc = db.lookup_intern_macro(macro_file.macro_call_id);
|
||||
|
||||
let arg_start = loc.ast_id.to_node(db).token_tree()?.syntax().text_range().start();
|
||||
let def_start =
|
||||
loc.def.ast_id.to_node(db).token_tree()?.syntax().text_range().start();
|
||||
let arg_tt = loc.ast_id.to_node(db).token_tree()?;
|
||||
let def_tt = loc.def.ast_id.to_node(db).token_tree()?;
|
||||
|
||||
let macro_def = db.macro_def(loc.def)?;
|
||||
let (parse, exp_map) = db.parse_macro(macro_file)?;
|
||||
let expanded = Source::new(self, parse.syntax_node());
|
||||
let macro_arg = db.macro_arg(macro_file.macro_call_id)?;
|
||||
|
||||
let arg_start = (loc.ast_id.file_id, arg_start);
|
||||
let def_start = (loc.def.ast_id.file_id, def_start);
|
||||
|
||||
Some(ExpansionInfo {
|
||||
expanded,
|
||||
arg_start,
|
||||
def_start,
|
||||
expanded: Source::new(self, parse.syntax_node()),
|
||||
arg: Source::new(loc.ast_id.file_id, arg_tt),
|
||||
def: Source::new(loc.ast_id.file_id, def_tt),
|
||||
macro_arg,
|
||||
macro_def,
|
||||
exp_map,
|
||||
|
@ -159,18 +154,19 @@ impl MacroCallId {
|
|||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct ExpansionInfo {
|
||||
expanded: Source<SyntaxNode>,
|
||||
arg_start: (HirFileId, TextUnit),
|
||||
def_start: (HirFileId, TextUnit),
|
||||
arg: Source<ast::TokenTree>,
|
||||
def: Source<ast::TokenTree>,
|
||||
|
||||
macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>,
|
||||
macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>,
|
||||
exp_map: Arc<mbe::RevTokenMap>,
|
||||
exp_map: Arc<mbe::TokenMap>,
|
||||
}
|
||||
|
||||
impl ExpansionInfo {
|
||||
pub fn map_token_down(&self, token: Source<&SyntaxToken>) -> Option<Source<SyntaxToken>> {
|
||||
assert_eq!(token.file_id, self.arg_start.0);
|
||||
let range = token.ast.text_range().checked_sub(self.arg_start.1)?;
|
||||
assert_eq!(token.file_id, self.arg.file_id);
|
||||
let range =
|
||||
token.ast.text_range().checked_sub(self.arg.ast.syntax().text_range().start())?;
|
||||
let token_id = self.macro_arg.1.token_by_range(range)?;
|
||||
let token_id = self.macro_def.0.map_id_down(token_id);
|
||||
|
||||
|
@ -181,25 +177,22 @@ impl ExpansionInfo {
|
|||
Some(self.expanded.with_ast(token))
|
||||
}
|
||||
|
||||
// FIXME: a more correct signature would be
|
||||
// `pub fn map_token_up(&self, token: Source<&SyntaxToken>) -> Option<Source<SyntaxToken>>`
|
||||
pub fn find_range(&self, from: TextRange) -> Option<(HirFileId, TextRange)> {
|
||||
let token_id = look_in_rev_map(&self.exp_map, from)?;
|
||||
pub fn map_token_up(&self, token: Source<&SyntaxToken>) -> Option<Source<SyntaxToken>> {
|
||||
let token_id = self.exp_map.token_by_range(token.ast.text_range())?;
|
||||
|
||||
let (token_id, origin) = self.macro_def.0.map_id_up(token_id);
|
||||
|
||||
let (token_map, (file_id, start_offset)) = match origin {
|
||||
mbe::Origin::Call => (&self.macro_arg.1, self.arg_start),
|
||||
mbe::Origin::Def => (&self.macro_def.1, self.def_start),
|
||||
let (token_map, tt) = match origin {
|
||||
mbe::Origin::Call => (&self.macro_arg.1, &self.arg),
|
||||
mbe::Origin::Def => (&self.macro_def.1, &self.def),
|
||||
};
|
||||
|
||||
let range = token_map.relative_range_of(token_id)?;
|
||||
|
||||
return Some((file_id, range + start_offset));
|
||||
|
||||
fn look_in_rev_map(exp_map: &mbe::RevTokenMap, from: TextRange) -> Option<tt::TokenId> {
|
||||
exp_map.ranges.iter().find(|&it| it.0.is_subrange(&from)).map(|it| it.1)
|
||||
}
|
||||
let range = token_map.range_by_token(token_id)?;
|
||||
let token = algo::find_covering_element(
|
||||
tt.ast.syntax(),
|
||||
range + tt.ast.syntax().text_range().start(),
|
||||
)
|
||||
.into_token()?;
|
||||
Some(tt.with_ast(token))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,16 +1,17 @@
|
|||
//! FIXME: write short doc here
|
||||
|
||||
use hir::{AssocItem, Either, FieldSource, HasSource, ModuleSource};
|
||||
use hir::{AssocItem, Either, FieldSource, HasSource, ModuleSource, Source};
|
||||
use ra_db::{FileId, SourceDatabase};
|
||||
use ra_syntax::{
|
||||
ast::{self, DocCommentsOwner, NameOwner},
|
||||
match_ast, AstNode, SmolStr,
|
||||
SyntaxKind::{self, BIND_PAT},
|
||||
SyntaxNode, TextRange,
|
||||
TextRange,
|
||||
};
|
||||
|
||||
use crate::{db::RootDatabase, expand::original_range, FileSymbol};
|
||||
|
||||
use super::short_label::ShortLabel;
|
||||
use crate::{db::RootDatabase, FileSymbol};
|
||||
|
||||
/// `NavigationTarget` represents and element in the editor's UI which you can
|
||||
/// click on to navigate to a particular piece of code.
|
||||
|
@ -79,13 +80,13 @@ impl NavigationTarget {
|
|||
pub(crate) fn from_module_to_decl(db: &RootDatabase, module: hir::Module) -> NavigationTarget {
|
||||
let name = module.name(db).map(|it| it.to_string().into()).unwrap_or_default();
|
||||
if let Some(src) = module.declaration_source(db) {
|
||||
let (file_id, text_range) = find_range_from_node(db, src.file_id, src.ast.syntax());
|
||||
let frange = original_range(db, src.as_ref().map(|it| it.syntax()));
|
||||
return NavigationTarget::from_syntax(
|
||||
file_id,
|
||||
frange.file_id,
|
||||
name,
|
||||
None,
|
||||
text_range,
|
||||
src.ast.syntax(),
|
||||
frange.range,
|
||||
src.ast.syntax().kind(),
|
||||
src.ast.doc_comment_text(),
|
||||
src.ast.short_label(),
|
||||
);
|
||||
|
@ -140,22 +141,22 @@ impl NavigationTarget {
|
|||
/// Allows `NavigationTarget` to be created from a `NameOwner`
|
||||
pub(crate) fn from_named(
|
||||
db: &RootDatabase,
|
||||
file_id: hir::HirFileId,
|
||||
node: &impl ast::NameOwner,
|
||||
node: Source<&dyn ast::NameOwner>,
|
||||
docs: Option<String>,
|
||||
description: Option<String>,
|
||||
) -> NavigationTarget {
|
||||
//FIXME: use `_` instead of empty string
|
||||
let name = node.name().map(|it| it.text().clone()).unwrap_or_default();
|
||||
let focus_range = node.name().map(|it| find_range_from_node(db, file_id, it.syntax()).1);
|
||||
let (file_id, full_range) = find_range_from_node(db, file_id, node.syntax());
|
||||
let name = node.ast.name().map(|it| it.text().clone()).unwrap_or_default();
|
||||
let focus_range =
|
||||
node.ast.name().map(|it| original_range(db, node.with_ast(it.syntax())).range);
|
||||
let frange = original_range(db, node.map(|it| it.syntax()));
|
||||
|
||||
NavigationTarget::from_syntax(
|
||||
file_id,
|
||||
frange.file_id,
|
||||
name,
|
||||
focus_range,
|
||||
full_range,
|
||||
node.syntax(),
|
||||
frange.range,
|
||||
node.ast.syntax().kind(),
|
||||
docs,
|
||||
description,
|
||||
)
|
||||
|
@ -166,14 +167,14 @@ impl NavigationTarget {
|
|||
name: SmolStr,
|
||||
focus_range: Option<TextRange>,
|
||||
full_range: TextRange,
|
||||
node: &SyntaxNode,
|
||||
kind: SyntaxKind,
|
||||
docs: Option<String>,
|
||||
description: Option<String>,
|
||||
) -> NavigationTarget {
|
||||
NavigationTarget {
|
||||
file_id,
|
||||
name,
|
||||
kind: node.kind(),
|
||||
kind,
|
||||
full_range,
|
||||
focus_range,
|
||||
container_name: None,
|
||||
|
@ -218,8 +219,7 @@ where
|
|||
let src = self.source(db);
|
||||
NavigationTarget::from_named(
|
||||
db,
|
||||
src.file_id,
|
||||
&src.ast,
|
||||
src.as_ref().map(|it| it as &dyn ast::NameOwner),
|
||||
src.ast.doc_comment_text(),
|
||||
src.ast.short_label(),
|
||||
)
|
||||
|
@ -230,29 +230,29 @@ impl ToNav for hir::Module {
|
|||
fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
|
||||
let src = self.definition_source(db);
|
||||
let name = self.name(db).map(|it| it.to_string().into()).unwrap_or_default();
|
||||
match src.ast {
|
||||
match &src.ast {
|
||||
ModuleSource::SourceFile(node) => {
|
||||
let (file_id, text_range) = find_range_from_node(db, src.file_id, node.syntax());
|
||||
let frange = original_range(db, src.with_ast(node.syntax()));
|
||||
|
||||
NavigationTarget::from_syntax(
|
||||
file_id,
|
||||
frange.file_id,
|
||||
name,
|
||||
None,
|
||||
text_range,
|
||||
node.syntax(),
|
||||
frange.range,
|
||||
node.syntax().kind(),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
}
|
||||
ModuleSource::Module(node) => {
|
||||
let (file_id, text_range) = find_range_from_node(db, src.file_id, node.syntax());
|
||||
let frange = original_range(db, src.with_ast(node.syntax()));
|
||||
|
||||
NavigationTarget::from_syntax(
|
||||
file_id,
|
||||
frange.file_id,
|
||||
name,
|
||||
None,
|
||||
text_range,
|
||||
node.syntax(),
|
||||
frange.range,
|
||||
node.syntax().kind(),
|
||||
node.doc_comment_text(),
|
||||
node.short_label(),
|
||||
)
|
||||
|
@ -264,14 +264,14 @@ impl ToNav for hir::Module {
|
|||
impl ToNav for hir::ImplBlock {
|
||||
fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
|
||||
let src = self.source(db);
|
||||
let (file_id, text_range) = find_range_from_node(db, src.file_id, src.ast.syntax());
|
||||
let frange = original_range(db, src.as_ref().map(|it| it.syntax()));
|
||||
|
||||
NavigationTarget::from_syntax(
|
||||
file_id,
|
||||
frange.file_id,
|
||||
"impl".into(),
|
||||
None,
|
||||
text_range,
|
||||
src.ast.syntax(),
|
||||
frange.range,
|
||||
src.ast.syntax().kind(),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
|
@ -282,22 +282,21 @@ impl ToNav for hir::StructField {
|
|||
fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
|
||||
let src = self.source(db);
|
||||
|
||||
match src.ast {
|
||||
match &src.ast {
|
||||
FieldSource::Named(it) => NavigationTarget::from_named(
|
||||
db,
|
||||
src.file_id,
|
||||
&it,
|
||||
src.with_ast(it),
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
),
|
||||
FieldSource::Pos(it) => {
|
||||
let (file_id, text_range) = find_range_from_node(db, src.file_id, it.syntax());
|
||||
let frange = original_range(db, src.with_ast(it.syntax()));
|
||||
NavigationTarget::from_syntax(
|
||||
file_id,
|
||||
frange.file_id,
|
||||
"".into(),
|
||||
None,
|
||||
text_range,
|
||||
it.syntax(),
|
||||
frange.range,
|
||||
it.syntax().kind(),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
|
@ -310,7 +309,12 @@ impl ToNav for hir::MacroDef {
|
|||
fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
|
||||
let src = self.source(db);
|
||||
log::debug!("nav target {:#?}", src.ast.syntax());
|
||||
NavigationTarget::from_named(db, src.file_id, &src.ast, src.ast.doc_comment_text(), None)
|
||||
NavigationTarget::from_named(
|
||||
db,
|
||||
src.as_ref().map(|it| it as &dyn ast::NameOwner),
|
||||
src.ast.doc_comment_text(),
|
||||
None,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -360,21 +364,6 @@ impl ToNav for hir::Local {
|
|||
}
|
||||
}
|
||||
|
||||
fn find_range_from_node(
|
||||
db: &RootDatabase,
|
||||
src: hir::HirFileId,
|
||||
node: &SyntaxNode,
|
||||
) -> (FileId, TextRange) {
|
||||
let text_range = node.text_range();
|
||||
let (file_id, text_range) = src
|
||||
.expansion_info(db)
|
||||
.and_then(|expansion_info| expansion_info.find_range(text_range))
|
||||
.unwrap_or((src, text_range));
|
||||
|
||||
// FIXME: handle recursive macro generated macro
|
||||
(file_id.original_file(db), text_range)
|
||||
}
|
||||
|
||||
pub(crate) fn docs_from_symbol(db: &RootDatabase, symbol: &FileSymbol) -> Option<String> {
|
||||
let parse = db.parse(symbol.file_id);
|
||||
let node = symbol.ptr.to_node(parse.tree().syntax());
|
||||
|
|
59
crates/ra_ide_api/src/expand.rs
Normal file
59
crates/ra_ide_api/src/expand.rs
Normal file
|
@ -0,0 +1,59 @@
|
|||
//! Utilities to work with files, produced by macros.
|
||||
use std::iter::successors;
|
||||
|
||||
use hir::Source;
|
||||
use ra_db::FileId;
|
||||
use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxToken};
|
||||
|
||||
use crate::{db::RootDatabase, FileRange};
|
||||
|
||||
pub(crate) fn original_range(db: &RootDatabase, node: Source<&SyntaxNode>) -> FileRange {
|
||||
let expansion = match node.file_id.expansion_info(db) {
|
||||
None => {
|
||||
return FileRange {
|
||||
file_id: node.file_id.original_file(db),
|
||||
range: node.ast.text_range(),
|
||||
}
|
||||
}
|
||||
Some(it) => it,
|
||||
};
|
||||
// FIXME: the following completely wrong.
|
||||
//
|
||||
// *First*, we should try to map first and last tokens of node, and, if that
|
||||
// fails, return the range of the overall macro expansions.
|
||||
//
|
||||
// *Second*, we should handle recurside macro expansions
|
||||
|
||||
let token = node
|
||||
.ast
|
||||
.descendants_with_tokens()
|
||||
.filter_map(|it| it.into_token())
|
||||
.find_map(|it| expansion.map_token_up(node.with_ast(&it)));
|
||||
|
||||
match token {
|
||||
Some(it) => FileRange { file_id: it.file_id.original_file(db), range: it.ast.text_range() },
|
||||
None => FileRange { file_id: node.file_id.original_file(db), range: node.ast.text_range() },
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn descend_into_macros(
|
||||
db: &RootDatabase,
|
||||
file_id: FileId,
|
||||
token: SyntaxToken,
|
||||
) -> Source<SyntaxToken> {
|
||||
let src = Source::new(file_id.into(), token);
|
||||
|
||||
successors(Some(src), |token| {
|
||||
let macro_call = token.ast.ancestors().find_map(ast::MacroCall::cast)?;
|
||||
let tt = macro_call.token_tree()?;
|
||||
if !token.ast.text_range().is_subrange(&tt.syntax().text_range()) {
|
||||
return None;
|
||||
}
|
||||
let source_analyzer =
|
||||
hir::SourceAnalyzer::new(db, token.with_ast(token.ast.parent()).as_ref(), None);
|
||||
let exp = source_analyzer.expand(db, ¯o_call)?;
|
||||
exp.map_token_down(db, token.as_ref())
|
||||
})
|
||||
.last()
|
||||
.unwrap()
|
||||
}
|
|
@ -1,16 +1,15 @@
|
|||
//! FIXME: write short doc here
|
||||
|
||||
use std::iter::successors;
|
||||
|
||||
use hir::{db::AstDatabase, Source};
|
||||
use ra_syntax::{
|
||||
ast::{self, DocCommentsOwner},
|
||||
match_ast, AstNode, SyntaxNode, SyntaxToken,
|
||||
match_ast, AstNode, SyntaxNode,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
db::RootDatabase,
|
||||
display::{ShortLabel, ToNav},
|
||||
expand::descend_into_macros,
|
||||
references::{classify_name_ref, NameKind::*},
|
||||
FilePosition, NavigationTarget, RangeInfo,
|
||||
};
|
||||
|
@ -19,7 +18,9 @@ pub(crate) fn goto_definition(
|
|||
db: &RootDatabase,
|
||||
position: FilePosition,
|
||||
) -> Option<RangeInfo<Vec<NavigationTarget>>> {
|
||||
let token = descend_into_macros(db, position)?;
|
||||
let file = db.parse_or_expand(position.file_id.into())?;
|
||||
let token = file.token_at_offset(position.offset).filter(|it| !it.kind().is_trivia()).next()?;
|
||||
let token = descend_into_macros(db, position.file_id, token);
|
||||
|
||||
let res = match_ast! {
|
||||
match (token.ast.parent()) {
|
||||
|
@ -39,24 +40,6 @@ pub(crate) fn goto_definition(
|
|||
Some(res)
|
||||
}
|
||||
|
||||
fn descend_into_macros(db: &RootDatabase, position: FilePosition) -> Option<Source<SyntaxToken>> {
|
||||
let file = db.parse_or_expand(position.file_id.into())?;
|
||||
let token = file.token_at_offset(position.offset).filter(|it| !it.kind().is_trivia()).next()?;
|
||||
|
||||
successors(Some(Source::new(position.file_id.into(), token)), |token| {
|
||||
let macro_call = token.ast.ancestors().find_map(ast::MacroCall::cast)?;
|
||||
let tt = macro_call.token_tree()?;
|
||||
if !token.ast.text_range().is_subrange(&tt.syntax().text_range()) {
|
||||
return None;
|
||||
}
|
||||
let source_analyzer =
|
||||
hir::SourceAnalyzer::new(db, token.with_ast(token.ast.parent()).as_ref(), None);
|
||||
let exp = source_analyzer.expand(db, ¯o_call)?;
|
||||
exp.map_token_down(db, token.as_ref())
|
||||
})
|
||||
.last()
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum ReferenceResult {
|
||||
Exact(NavigationTarget),
|
||||
|
@ -137,8 +120,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
|
|||
ast::StructDef(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
node.file_id,
|
||||
&it,
|
||||
node.with_ast(&it),
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
))
|
||||
|
@ -146,8 +128,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
|
|||
ast::EnumDef(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
node.file_id,
|
||||
&it,
|
||||
node.with_ast(&it),
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
))
|
||||
|
@ -155,8 +136,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
|
|||
ast::EnumVariant(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
node.file_id,
|
||||
&it,
|
||||
node.with_ast(&it),
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
))
|
||||
|
@ -164,8 +144,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
|
|||
ast::FnDef(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
node.file_id,
|
||||
&it,
|
||||
node.with_ast(&it),
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
))
|
||||
|
@ -173,8 +152,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
|
|||
ast::TypeAliasDef(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
node.file_id,
|
||||
&it,
|
||||
node.with_ast(&it),
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
))
|
||||
|
@ -182,8 +160,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
|
|||
ast::ConstDef(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
node.file_id,
|
||||
&it,
|
||||
node.with_ast(&it),
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
))
|
||||
|
@ -191,8 +168,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
|
|||
ast::StaticDef(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
node.file_id,
|
||||
&it,
|
||||
node.with_ast(&it),
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
))
|
||||
|
@ -200,8 +176,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
|
|||
ast::TraitDef(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
node.file_id,
|
||||
&it,
|
||||
node.with_ast(&it),
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
))
|
||||
|
@ -209,8 +184,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
|
|||
ast::RecordFieldDef(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
node.file_id,
|
||||
&it,
|
||||
node.with_ast(&it),
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
))
|
||||
|
@ -218,8 +192,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
|
|||
ast::Module(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
node.file_id,
|
||||
&it,
|
||||
node.with_ast(&it),
|
||||
it.doc_comment_text(),
|
||||
it.short_label(),
|
||||
))
|
||||
|
@ -227,8 +200,7 @@ fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<Navigati
|
|||
ast::MacroCall(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
node.file_id,
|
||||
&it,
|
||||
node.with_ast(&it),
|
||||
it.doc_comment_text(),
|
||||
None,
|
||||
))
|
||||
|
|
|
@ -193,7 +193,9 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
|
|||
None
|
||||
}
|
||||
} else if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), position.offset) {
|
||||
if let Some(name_kind) = classify_name(db, position.file_id, &name).map(|d| d.kind) {
|
||||
if let Some(name_kind) =
|
||||
classify_name(db, Source::new(position.file_id.into(), &name)).map(|d| d.kind)
|
||||
{
|
||||
let mut _b: bool = true;
|
||||
res.extend(hover_text_from_name_kind(db, name_kind, &mut _b));
|
||||
}
|
||||
|
|
|
@ -41,6 +41,7 @@ mod matching_brace;
|
|||
mod display;
|
||||
mod inlay_hints;
|
||||
mod wasm_shims;
|
||||
mod expand;
|
||||
|
||||
#[cfg(test)]
|
||||
mod marks;
|
||||
|
|
|
@ -110,7 +110,7 @@ fn find_name<'a>(
|
|||
position: FilePosition,
|
||||
) -> Option<RangeInfo<(String, NameDefinition)>> {
|
||||
if let Some(name) = find_node_at_offset::<ast::Name>(&syntax, position.offset) {
|
||||
let def = classify_name(db, position.file_id, &name)?;
|
||||
let def = classify_name(db, Source::new(position.file_id.into(), &name))?;
|
||||
let range = name.syntax().text_range();
|
||||
return Some(RangeInfo::new(range, (name.text().to_string(), def)));
|
||||
}
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
//! Functions that are used to classify an element from its definition or reference.
|
||||
|
||||
use hir::{FromSource, Module, ModuleSource, Path, PathResolution, Source, SourceAnalyzer};
|
||||
use ra_db::FileId;
|
||||
use ra_prof::profile;
|
||||
use ra_syntax::{ast, match_ast, AstNode};
|
||||
use test_utils::tested_by;
|
||||
|
@ -12,19 +11,14 @@ use super::{
|
|||
};
|
||||
use crate::db::RootDatabase;
|
||||
|
||||
pub(crate) fn classify_name(
|
||||
db: &RootDatabase,
|
||||
file_id: FileId,
|
||||
name: &ast::Name,
|
||||
) -> Option<NameDefinition> {
|
||||
pub(crate) fn classify_name(db: &RootDatabase, name: Source<&ast::Name>) -> Option<NameDefinition> {
|
||||
let _p = profile("classify_name");
|
||||
let parent = name.syntax().parent()?;
|
||||
let file_id = file_id.into();
|
||||
let parent = name.ast.syntax().parent()?;
|
||||
|
||||
match_ast! {
|
||||
match parent {
|
||||
ast::BindPat(it) => {
|
||||
let src = hir::Source { file_id, ast: it };
|
||||
let src = name.with_ast(it);
|
||||
let local = hir::Local::from_source(db, src)?;
|
||||
Some(NameDefinition {
|
||||
visibility: None,
|
||||
|
@ -34,7 +28,7 @@ pub(crate) fn classify_name(
|
|||
},
|
||||
ast::RecordFieldDef(it) => {
|
||||
let ast = hir::FieldSource::Named(it);
|
||||
let src = hir::Source { file_id, ast };
|
||||
let src = name.with_ast(ast);
|
||||
let field = hir::StructField::from_source(db, src)?;
|
||||
Some(from_struct_field(db, field))
|
||||
},
|
||||
|
@ -42,42 +36,42 @@ pub(crate) fn classify_name(
|
|||
let def = {
|
||||
if !it.has_semi() {
|
||||
let ast = hir::ModuleSource::Module(it);
|
||||
let src = hir::Source { file_id, ast };
|
||||
let src = name.with_ast(ast);
|
||||
hir::Module::from_definition(db, src)
|
||||
} else {
|
||||
let src = hir::Source { file_id, ast: it };
|
||||
let src = name.with_ast(it);
|
||||
hir::Module::from_declaration(db, src)
|
||||
}
|
||||
}?;
|
||||
Some(from_module_def(db, def.into(), None))
|
||||
},
|
||||
ast::StructDef(it) => {
|
||||
let src = hir::Source { file_id, ast: it };
|
||||
let src = name.with_ast(it);
|
||||
let def = hir::Struct::from_source(db, src)?;
|
||||
Some(from_module_def(db, def.into(), None))
|
||||
},
|
||||
ast::EnumDef(it) => {
|
||||
let src = hir::Source { file_id, ast: it };
|
||||
let src = name.with_ast(it);
|
||||
let def = hir::Enum::from_source(db, src)?;
|
||||
Some(from_module_def(db, def.into(), None))
|
||||
},
|
||||
ast::TraitDef(it) => {
|
||||
let src = hir::Source { file_id, ast: it };
|
||||
let src = name.with_ast(it);
|
||||
let def = hir::Trait::from_source(db, src)?;
|
||||
Some(from_module_def(db, def.into(), None))
|
||||
},
|
||||
ast::StaticDef(it) => {
|
||||
let src = hir::Source { file_id, ast: it };
|
||||
let src = name.with_ast(it);
|
||||
let def = hir::Static::from_source(db, src)?;
|
||||
Some(from_module_def(db, def.into(), None))
|
||||
},
|
||||
ast::EnumVariant(it) => {
|
||||
let src = hir::Source { file_id, ast: it };
|
||||
let src = name.with_ast(it);
|
||||
let def = hir::EnumVariant::from_source(db, src)?;
|
||||
Some(from_module_def(db, def.into(), None))
|
||||
},
|
||||
ast::FnDef(it) => {
|
||||
let src = hir::Source { file_id, ast: it };
|
||||
let src = name.with_ast(it);
|
||||
let def = hir::Function::from_source(db, src)?;
|
||||
if parent.parent().and_then(ast::ItemList::cast).is_some() {
|
||||
Some(from_assoc_item(db, def.into()))
|
||||
|
@ -86,7 +80,7 @@ pub(crate) fn classify_name(
|
|||
}
|
||||
},
|
||||
ast::ConstDef(it) => {
|
||||
let src = hir::Source { file_id, ast: it };
|
||||
let src = name.with_ast(it);
|
||||
let def = hir::Const::from_source(db, src)?;
|
||||
if parent.parent().and_then(ast::ItemList::cast).is_some() {
|
||||
Some(from_assoc_item(db, def.into()))
|
||||
|
@ -95,7 +89,7 @@ pub(crate) fn classify_name(
|
|||
}
|
||||
},
|
||||
ast::TypeAliasDef(it) => {
|
||||
let src = hir::Source { file_id, ast: it };
|
||||
let src = name.with_ast(it);
|
||||
let def = hir::TypeAlias::from_source(db, src)?;
|
||||
if parent.parent().and_then(ast::ItemList::cast).is_some() {
|
||||
Some(from_assoc_item(db, def.into()))
|
||||
|
@ -104,11 +98,11 @@ pub(crate) fn classify_name(
|
|||
}
|
||||
},
|
||||
ast::MacroCall(it) => {
|
||||
let src = hir::Source { file_id, ast: it};
|
||||
let src = name.with_ast(it);
|
||||
let def = hir::MacroDef::from_source(db, src.clone())?;
|
||||
|
||||
let module_src = ModuleSource::from_child_node(db, src.as_ref().map(|it| it.syntax()));
|
||||
let module = Module::from_definition(db, Source::new(file_id, module_src))?;
|
||||
let module = Module::from_definition(db, src.with_ast(module_src))?;
|
||||
|
||||
Some(NameDefinition {
|
||||
visibility: None,
|
||||
|
|
|
@ -94,7 +94,8 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
|
|||
}
|
||||
NAME => {
|
||||
let name = node.as_node().cloned().and_then(ast::Name::cast).unwrap();
|
||||
let name_kind = classify_name(db, file_id, &name).map(|d| d.kind);
|
||||
let name_kind =
|
||||
classify_name(db, Source::new(file_id.into(), &name)).map(|d| d.kind);
|
||||
|
||||
if let Some(Local(local)) = &name_kind {
|
||||
if let Some(name) = local.name(db) {
|
||||
|
|
|
@ -31,7 +31,7 @@ pub enum ExpandError {
|
|||
}
|
||||
|
||||
pub use crate::syntax_bridge::{
|
||||
ast_to_token_tree, syntax_node_to_token_tree, token_tree_to_syntax_node, RevTokenMap, TokenMap,
|
||||
ast_to_token_tree, syntax_node_to_token_tree, token_tree_to_syntax_node, TokenMap,
|
||||
};
|
||||
|
||||
/// This struct contains AST for a single `macro_rules` definition. What might
|
||||
|
|
|
@ -14,30 +14,22 @@ use crate::ExpandError;
|
|||
#[derive(Debug, PartialEq, Eq, Default)]
|
||||
pub struct TokenMap {
|
||||
/// Maps `tt::TokenId` to the *relative* source range.
|
||||
tokens: Vec<TextRange>,
|
||||
}
|
||||
|
||||
/// Maps relative range of the expanded syntax node to `tt::TokenId`
|
||||
#[derive(Debug, PartialEq, Eq, Default)]
|
||||
pub struct RevTokenMap {
|
||||
pub ranges: Vec<(TextRange, tt::TokenId)>,
|
||||
entries: Vec<(tt::TokenId, TextRange)>,
|
||||
}
|
||||
|
||||
/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro
|
||||
/// will consume).
|
||||
pub fn ast_to_token_tree(ast: &ast::TokenTree) -> Option<(tt::Subtree, TokenMap)> {
|
||||
let mut token_map = TokenMap::default();
|
||||
let node = ast.syntax();
|
||||
let tt = convert_tt(&mut token_map, node.text_range().start(), node)?;
|
||||
Some((tt, token_map))
|
||||
syntax_node_to_token_tree(ast.syntax())
|
||||
}
|
||||
|
||||
/// Convert the syntax node to a `TokenTree` (what macro
|
||||
/// will consume).
|
||||
pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, TokenMap)> {
|
||||
let mut token_map = TokenMap::default();
|
||||
let tt = convert_tt(&mut token_map, node.text_range().start(), node)?;
|
||||
Some((tt, token_map))
|
||||
let global_offset = node.text_range().start();
|
||||
let mut c = Convertor { map: TokenMap::default(), global_offset, next_id: 0 };
|
||||
let subtree = c.go(node)?;
|
||||
Some((subtree, c.map))
|
||||
}
|
||||
|
||||
// The following items are what `rustc` macro can be parsed into :
|
||||
|
@ -55,7 +47,7 @@ pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, Toke
|
|||
pub fn token_tree_to_syntax_node(
|
||||
tt: &tt::Subtree,
|
||||
fragment_kind: FragmentKind,
|
||||
) -> Result<(Parse<SyntaxNode>, RevTokenMap), ExpandError> {
|
||||
) -> Result<(Parse<SyntaxNode>, TokenMap), ExpandError> {
|
||||
let tmp;
|
||||
let tokens = match tt {
|
||||
tt::Subtree { delimiter: tt::Delimiter::None, token_trees } => token_trees.as_slice(),
|
||||
|
@ -78,31 +70,17 @@ pub fn token_tree_to_syntax_node(
|
|||
|
||||
impl TokenMap {
|
||||
pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
|
||||
let (idx, _) =
|
||||
self.tokens.iter().enumerate().find(|(_, range)| **range == relative_range)?;
|
||||
Some(tt::TokenId(idx as u32))
|
||||
let &(token_id, _) = self.entries.iter().find(|(_, range)| *range == relative_range)?;
|
||||
Some(token_id)
|
||||
}
|
||||
|
||||
pub fn relative_range_of(&self, token_id: tt::TokenId) -> Option<TextRange> {
|
||||
let idx = token_id.0 as usize;
|
||||
self.tokens.get(idx).copied()
|
||||
}
|
||||
|
||||
fn alloc(&mut self, relative_range: TextRange) -> tt::TokenId {
|
||||
let id = self.tokens.len();
|
||||
self.tokens.push(relative_range);
|
||||
tt::TokenId(id as u32)
|
||||
}
|
||||
}
|
||||
|
||||
impl RevTokenMap {
|
||||
pub fn range_by_token(&self, token_id: tt::TokenId) -> Option<TextRange> {
|
||||
let &(r, _) = self.ranges.iter().find(|(_, tid)| *tid == token_id)?;
|
||||
Some(r)
|
||||
let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?;
|
||||
Some(range)
|
||||
}
|
||||
|
||||
fn add(&mut self, relative_range: TextRange, token_id: tt::TokenId) {
|
||||
self.ranges.push((relative_range, token_id.clone()))
|
||||
fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) {
|
||||
self.entries.push((token_id, relative_range));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -167,84 +145,98 @@ fn convert_doc_comment(token: &ra_syntax::SyntaxToken) -> Option<Vec<tt::TokenTr
|
|||
}
|
||||
}
|
||||
|
||||
fn convert_tt(
|
||||
token_map: &mut TokenMap,
|
||||
struct Convertor {
|
||||
map: TokenMap,
|
||||
global_offset: TextUnit,
|
||||
tt: &SyntaxNode,
|
||||
) -> Option<tt::Subtree> {
|
||||
// This tree is empty
|
||||
if tt.first_child_or_token().is_none() {
|
||||
return Some(tt::Subtree { token_trees: vec![], delimiter: tt::Delimiter::None });
|
||||
}
|
||||
next_id: u32,
|
||||
}
|
||||
|
||||
let first_child = tt.first_child_or_token()?;
|
||||
let last_child = tt.last_child_or_token()?;
|
||||
let (delimiter, skip_first) = match (first_child.kind(), last_child.kind()) {
|
||||
(T!['('], T![')']) => (tt::Delimiter::Parenthesis, true),
|
||||
(T!['{'], T!['}']) => (tt::Delimiter::Brace, true),
|
||||
(T!['['], T![']']) => (tt::Delimiter::Bracket, true),
|
||||
_ => (tt::Delimiter::None, false),
|
||||
};
|
||||
|
||||
let mut token_trees = Vec::new();
|
||||
let mut child_iter = tt.children_with_tokens().skip(skip_first as usize).peekable();
|
||||
|
||||
while let Some(child) = child_iter.next() {
|
||||
if skip_first && (child == first_child || child == last_child) {
|
||||
continue;
|
||||
impl Convertor {
|
||||
fn go(&mut self, tt: &SyntaxNode) -> Option<tt::Subtree> {
|
||||
// This tree is empty
|
||||
if tt.first_child_or_token().is_none() {
|
||||
return Some(tt::Subtree { token_trees: vec![], delimiter: tt::Delimiter::None });
|
||||
}
|
||||
|
||||
match child {
|
||||
NodeOrToken::Token(token) => {
|
||||
if let Some(doc_tokens) = convert_doc_comment(&token) {
|
||||
token_trees.extend(doc_tokens);
|
||||
} else if token.kind().is_trivia() {
|
||||
continue;
|
||||
} else if token.kind().is_punct() {
|
||||
assert!(token.text().len() == 1, "Input ast::token punct must be single char.");
|
||||
let char = token.text().chars().next().unwrap();
|
||||
let first_child = tt.first_child_or_token()?;
|
||||
let last_child = tt.last_child_or_token()?;
|
||||
let (delimiter, skip_first) = match (first_child.kind(), last_child.kind()) {
|
||||
(T!['('], T![')']) => (tt::Delimiter::Parenthesis, true),
|
||||
(T!['{'], T!['}']) => (tt::Delimiter::Brace, true),
|
||||
(T!['['], T![']']) => (tt::Delimiter::Bracket, true),
|
||||
_ => (tt::Delimiter::None, false),
|
||||
};
|
||||
|
||||
let spacing = match child_iter.peek() {
|
||||
Some(NodeOrToken::Token(token)) => {
|
||||
if token.kind().is_punct() {
|
||||
tt::Spacing::Joint
|
||||
} else {
|
||||
tt::Spacing::Alone
|
||||
let mut token_trees = Vec::new();
|
||||
let mut child_iter = tt.children_with_tokens().skip(skip_first as usize).peekable();
|
||||
|
||||
while let Some(child) = child_iter.next() {
|
||||
if skip_first && (child == first_child || child == last_child) {
|
||||
continue;
|
||||
}
|
||||
|
||||
match child {
|
||||
NodeOrToken::Token(token) => {
|
||||
if let Some(doc_tokens) = convert_doc_comment(&token) {
|
||||
token_trees.extend(doc_tokens);
|
||||
} else if token.kind().is_trivia() {
|
||||
continue;
|
||||
} else if token.kind().is_punct() {
|
||||
assert!(
|
||||
token.text().len() == 1,
|
||||
"Input ast::token punct must be single char."
|
||||
);
|
||||
let char = token.text().chars().next().unwrap();
|
||||
|
||||
let spacing = match child_iter.peek() {
|
||||
Some(NodeOrToken::Token(token)) => {
|
||||
if token.kind().is_punct() {
|
||||
tt::Spacing::Joint
|
||||
} else {
|
||||
tt::Spacing::Alone
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => tt::Spacing::Alone,
|
||||
};
|
||||
|
||||
token_trees.push(tt::Leaf::from(tt::Punct { char, spacing }).into());
|
||||
} else {
|
||||
let child: tt::TokenTree =
|
||||
if token.kind() == T![true] || token.kind() == T![false] {
|
||||
tt::Leaf::from(tt::Literal { text: token.text().clone() }).into()
|
||||
} else if token.kind().is_keyword()
|
||||
|| token.kind() == IDENT
|
||||
|| token.kind() == LIFETIME
|
||||
{
|
||||
let relative_range = token.text_range() - global_offset;
|
||||
let id = token_map.alloc(relative_range);
|
||||
let text = token.text().clone();
|
||||
tt::Leaf::from(tt::Ident { text, id }).into()
|
||||
} else if token.kind().is_literal() {
|
||||
tt::Leaf::from(tt::Literal { text: token.text().clone() }).into()
|
||||
} else {
|
||||
return None;
|
||||
_ => tt::Spacing::Alone,
|
||||
};
|
||||
|
||||
token_trees.push(tt::Leaf::from(tt::Punct { char, spacing }).into());
|
||||
} else {
|
||||
let child: tt::TokenTree =
|
||||
if token.kind() == T![true] || token.kind() == T![false] {
|
||||
tt::Leaf::from(tt::Literal { text: token.text().clone() }).into()
|
||||
} else if token.kind().is_keyword()
|
||||
|| token.kind() == IDENT
|
||||
|| token.kind() == LIFETIME
|
||||
{
|
||||
let id = self.alloc(token.text_range());
|
||||
let text = token.text().clone();
|
||||
tt::Leaf::from(tt::Ident { text, id }).into()
|
||||
} else if token.kind().is_literal() {
|
||||
tt::Leaf::from(tt::Literal { text: token.text().clone() }).into()
|
||||
} else {
|
||||
return None;
|
||||
};
|
||||
token_trees.push(child);
|
||||
}
|
||||
}
|
||||
NodeOrToken::Node(node) => {
|
||||
let child = self.go(&node)?.into();
|
||||
token_trees.push(child);
|
||||
}
|
||||
}
|
||||
NodeOrToken::Node(node) => {
|
||||
let child = convert_tt(token_map, global_offset, &node)?.into();
|
||||
token_trees.push(child);
|
||||
}
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
let res = tt::Subtree { delimiter, token_trees };
|
||||
Some(res)
|
||||
}
|
||||
|
||||
let res = tt::Subtree { delimiter, token_trees };
|
||||
Some(res)
|
||||
fn alloc(&mut self, absolute_range: TextRange) -> tt::TokenId {
|
||||
let relative_range = absolute_range - self.global_offset;
|
||||
let token_id = tt::TokenId(self.next_id);
|
||||
self.next_id += 1;
|
||||
self.map.insert(token_id, relative_range);
|
||||
token_id
|
||||
}
|
||||
}
|
||||
|
||||
struct TtTreeSink<'a> {
|
||||
|
@ -252,7 +244,7 @@ struct TtTreeSink<'a> {
|
|||
cursor: Cursor<'a>,
|
||||
text_pos: TextUnit,
|
||||
inner: SyntaxTreeBuilder,
|
||||
range_map: RevTokenMap,
|
||||
token_map: TokenMap,
|
||||
|
||||
// Number of roots
|
||||
// Use for detect ill-form tree which is not single root
|
||||
|
@ -267,12 +259,12 @@ impl<'a> TtTreeSink<'a> {
|
|||
text_pos: 0.into(),
|
||||
inner: SyntaxTreeBuilder::default(),
|
||||
roots: smallvec::SmallVec::new(),
|
||||
range_map: RevTokenMap::default(),
|
||||
token_map: TokenMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn finish(self) -> (Parse<SyntaxNode>, RevTokenMap) {
|
||||
(self.inner.finish(), self.range_map)
|
||||
fn finish(self) -> (Parse<SyntaxNode>, TokenMap) {
|
||||
(self.inner.finish(), self.token_map)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -308,7 +300,7 @@ impl<'a> TreeSink for TtTreeSink<'a> {
|
|||
if kind == IDENT {
|
||||
let range =
|
||||
TextRange::offset_len(self.text_pos, TextUnit::of_str(&ident.text));
|
||||
self.range_map.add(range, ident.id);
|
||||
self.token_map.insert(ident.id, range);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
//! Extensions for various expressions live in a sibling `expr_extensions` module.
|
||||
|
||||
use crate::{
|
||||
ast::{self, child_opt, children, AstChildren, AstNode, AttrInput, SyntaxNode},
|
||||
ast::{self, child_opt, children, AstNode, AttrInput, SyntaxNode},
|
||||
SmolStr, SyntaxElement,
|
||||
SyntaxKind::*,
|
||||
SyntaxToken, T,
|
||||
|
@ -176,16 +176,6 @@ impl ast::ImplBlock {
|
|||
}
|
||||
}
|
||||
|
||||
impl ast::AttrsOwner for ast::ImplItem {
|
||||
fn attrs(&self) -> AstChildren<ast::Attr> {
|
||||
match self {
|
||||
ast::ImplItem::FnDef(it) => it.attrs(),
|
||||
ast::ImplItem::TypeAliasDef(it) => it.attrs(),
|
||||
ast::ImplItem::ConstDef(it) => it.attrs(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum StructKind {
|
||||
Tuple(ast::TupleFieldDefList),
|
||||
|
|
|
@ -1298,6 +1298,7 @@ impl AstNode for ImplItem {
|
|||
}
|
||||
}
|
||||
}
|
||||
impl ast::AttrsOwner for ImplItem {}
|
||||
impl ImplItem {}
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct ImplTraitType {
|
||||
|
|
|
@ -401,7 +401,8 @@ Grammar(
|
|||
traits: ["AttrsOwner"]
|
||||
),
|
||||
"ImplItem": (
|
||||
enum: ["FnDef", "TypeAliasDef", "ConstDef"]
|
||||
enum: ["FnDef", "TypeAliasDef", "ConstDef"],
|
||||
traits: ["AttrsOwner"]
|
||||
),
|
||||
|
||||
"TupleExpr": (
|
||||
|
|
|
@ -55,7 +55,7 @@ We use Travis for CI. Most of the things, including formatting, are checked by
|
|||
be green as well. We use bors-ng to enforce the [not rocket
|
||||
science](https://graydon2.dreamwidth.org/1597.html) rule.
|
||||
|
||||
You can run `cargo format-hook` to install git-hook to run rustfmt on commit.
|
||||
You can run `cargo xtask install-pre-commit-hook` to install git-hook to run rustfmt on commit.
|
||||
|
||||
# Code organization
|
||||
|
||||
|
|
|
@ -1,31 +0,0 @@
|
|||
//! FIXME: write short doc here
|
||||
|
||||
use std::process::Command;
|
||||
|
||||
use xtask::{codegen::Mode, project_root, run, run_rustfmt, Result};
|
||||
|
||||
fn main() -> Result<()> {
|
||||
run_rustfmt(Mode::Overwrite)?;
|
||||
update_staged()
|
||||
}
|
||||
|
||||
fn update_staged() -> Result<()> {
|
||||
let root = project_root();
|
||||
let output = Command::new("git")
|
||||
.arg("diff")
|
||||
.arg("--diff-filter=MAR")
|
||||
.arg("--name-only")
|
||||
.arg("--cached")
|
||||
.current_dir(&root)
|
||||
.output()?;
|
||||
if !output.status.success() {
|
||||
anyhow::bail!(
|
||||
"`git diff --diff-filter=MAR --name-only --cached` exited with {}",
|
||||
output.status
|
||||
);
|
||||
}
|
||||
for line in String::from_utf8(output.stdout)?.lines() {
|
||||
run(&format!("git update-index --add {}", root.join(line).to_string_lossy()), ".")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
|
@ -10,7 +10,7 @@ FLAGS:
|
|||
|
||||
SUBCOMMANDS:
|
||||
format
|
||||
format-hook
|
||||
install-pre-commit-hook
|
||||
fuzz-tests
|
||||
codegen
|
||||
install
|
||||
|
|
|
@ -83,19 +83,12 @@ pub fn install_rustfmt() -> Result<()> {
|
|||
run(&format!("rustup component add rustfmt --toolchain {}", TOOLCHAIN), ".")
|
||||
}
|
||||
|
||||
pub fn install_format_hook() -> Result<()> {
|
||||
let result_path = Path::new(if cfg!(windows) {
|
||||
"./.git/hooks/pre-commit.exe"
|
||||
} else {
|
||||
"./.git/hooks/pre-commit"
|
||||
});
|
||||
pub fn install_pre_commit_hook() -> Result<()> {
|
||||
let result_path =
|
||||
PathBuf::from(format!("./.git/hooks/pre-commit{}", std::env::consts::EXE_SUFFIX));
|
||||
if !result_path.exists() {
|
||||
run("cargo build --package xtask --bin pre-commit", ".")?;
|
||||
if cfg!(windows) {
|
||||
fs::copy("./target/debug/pre-commit.exe", result_path)?;
|
||||
} else {
|
||||
fs::copy("./target/debug/pre-commit", result_path)?;
|
||||
}
|
||||
let me = std::env::current_exe()?;
|
||||
fs::copy(me, result_path)?;
|
||||
} else {
|
||||
Err(IoError::new(ErrorKind::AlreadyExists, "Git hook already created"))?;
|
||||
}
|
||||
|
@ -150,6 +143,27 @@ pub fn run_fuzzer() -> Result<()> {
|
|||
run("rustup run nightly -- cargo fuzz run parser", "./crates/ra_syntax")
|
||||
}
|
||||
|
||||
pub fn reformat_staged_files() -> Result<()> {
|
||||
let root = project_root();
|
||||
let output = Command::new("git")
|
||||
.arg("diff")
|
||||
.arg("--diff-filter=MAR")
|
||||
.arg("--name-only")
|
||||
.arg("--cached")
|
||||
.current_dir(&root)
|
||||
.output()?;
|
||||
if !output.status.success() {
|
||||
anyhow::bail!(
|
||||
"`git diff --diff-filter=MAR --name-only --cached` exited with {}",
|
||||
output.status
|
||||
);
|
||||
}
|
||||
for line in String::from_utf8(output.stdout)?.lines() {
|
||||
run(&format!("git update-index --add {}", root.join(line).to_string_lossy()), ".")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn do_run<F>(cmdline: &str, dir: &str, mut f: F) -> Result<Output>
|
||||
where
|
||||
F: FnMut(&mut Command),
|
||||
|
|
|
@ -16,7 +16,8 @@ use pico_args::Arguments;
|
|||
use std::{env, path::PathBuf};
|
||||
use xtask::{
|
||||
codegen::{self, Mode},
|
||||
install_format_hook, run, run_clippy, run_fuzzer, run_rustfmt, run_with_output, Cmd, Result,
|
||||
install_pre_commit_hook, reformat_staged_files, run, run_clippy, run_fuzzer, run_rustfmt,
|
||||
run_with_output, Cmd, Result,
|
||||
};
|
||||
|
||||
// Latest stable, feel free to send a PR if this lags behind.
|
||||
|
@ -36,6 +37,10 @@ struct ServerOpt {
|
|||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
if std::env::args().next().map(|it| it.contains("pre-commit")) == Some(true) {
|
||||
return reformat_staged_files();
|
||||
}
|
||||
|
||||
let subcommand = match std::env::args_os().nth(1) {
|
||||
None => {
|
||||
eprintln!("{}", help::GLOBAL_HELP);
|
||||
|
@ -81,12 +86,12 @@ fn main() -> Result<()> {
|
|||
}
|
||||
run_rustfmt(Mode::Overwrite)?
|
||||
}
|
||||
"format-hook" => {
|
||||
"install-pre-commit-hook" => {
|
||||
if matches.contains(["-h", "--help"]) {
|
||||
help::print_no_param_subcommand_help(&subcommand);
|
||||
return Ok(());
|
||||
}
|
||||
install_format_hook()?
|
||||
install_pre_commit_hook()?
|
||||
}
|
||||
"lint" => {
|
||||
if matches.contains(["-h", "--help"]) {
|
||||
|
|
Loading…
Reference in a new issue