Merge branch 'master' of github.com:rust-analyzer/rust-analyzer

This commit is contained in:
Benjamin Coenen
2020-04-21 14:32:02 +02:00
60 changed files with 1077 additions and 423 deletions

16
Cargo.lock generated
View File

@@ -114,7 +114,7 @@ checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
[[package]] [[package]]
name = "chalk-derive" name = "chalk-derive"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/rust-lang/chalk.git?rev=28cef6ff403d403e6ad2f3d27d944e9ffac1bce8#28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" source = "git+https://github.com/rust-lang/chalk.git?rev=2c072cc830d04af5f10b390e6643327f85108282#2c072cc830d04af5f10b390e6643327f85108282"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@@ -125,7 +125,7 @@ dependencies = [
[[package]] [[package]]
name = "chalk-engine" name = "chalk-engine"
version = "0.9.0" version = "0.9.0"
source = "git+https://github.com/rust-lang/chalk.git?rev=28cef6ff403d403e6ad2f3d27d944e9ffac1bce8#28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" source = "git+https://github.com/rust-lang/chalk.git?rev=2c072cc830d04af5f10b390e6643327f85108282#2c072cc830d04af5f10b390e6643327f85108282"
dependencies = [ dependencies = [
"chalk-macros", "chalk-macros",
"rustc-hash", "rustc-hash",
@@ -134,7 +134,7 @@ dependencies = [
[[package]] [[package]]
name = "chalk-ir" name = "chalk-ir"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/rust-lang/chalk.git?rev=28cef6ff403d403e6ad2f3d27d944e9ffac1bce8#28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" source = "git+https://github.com/rust-lang/chalk.git?rev=2c072cc830d04af5f10b390e6643327f85108282#2c072cc830d04af5f10b390e6643327f85108282"
dependencies = [ dependencies = [
"chalk-derive", "chalk-derive",
"chalk-engine", "chalk-engine",
@@ -144,7 +144,7 @@ dependencies = [
[[package]] [[package]]
name = "chalk-macros" name = "chalk-macros"
version = "0.1.1" version = "0.1.1"
source = "git+https://github.com/rust-lang/chalk.git?rev=28cef6ff403d403e6ad2f3d27d944e9ffac1bce8#28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" source = "git+https://github.com/rust-lang/chalk.git?rev=2c072cc830d04af5f10b390e6643327f85108282#2c072cc830d04af5f10b390e6643327f85108282"
dependencies = [ dependencies = [
"lazy_static", "lazy_static",
] ]
@@ -152,7 +152,7 @@ dependencies = [
[[package]] [[package]]
name = "chalk-rust-ir" name = "chalk-rust-ir"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/rust-lang/chalk.git?rev=28cef6ff403d403e6ad2f3d27d944e9ffac1bce8#28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" source = "git+https://github.com/rust-lang/chalk.git?rev=2c072cc830d04af5f10b390e6643327f85108282#2c072cc830d04af5f10b390e6643327f85108282"
dependencies = [ dependencies = [
"chalk-derive", "chalk-derive",
"chalk-engine", "chalk-engine",
@@ -163,7 +163,7 @@ dependencies = [
[[package]] [[package]]
name = "chalk-solve" name = "chalk-solve"
version = "0.1.0" version = "0.1.0"
source = "git+https://github.com/rust-lang/chalk.git?rev=28cef6ff403d403e6ad2f3d27d944e9ffac1bce8#28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" source = "git+https://github.com/rust-lang/chalk.git?rev=2c072cc830d04af5f10b390e6643327f85108282#2c072cc830d04af5f10b390e6643327f85108282"
dependencies = [ dependencies = [
"chalk-derive", "chalk-derive",
"chalk-engine", "chalk-engine",
@@ -1288,9 +1288,9 @@ checksum = "2439c63f3f6139d1b57529d16bc3b8bb855230c8efcc5d3a896c8bea7c3b1e84"
[[package]] [[package]]
name = "regex" name = "regex"
version = "1.3.6" version = "1.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f6946991529684867e47d86474e3a6d0c0ab9b82d5821e314b1ede31fa3a4b3" checksum = "a6020f034922e3194c711b82a627453881bc4682166cabb07134a10c26ba7692"
dependencies = [ dependencies = [
"aho-corasick", "aho-corasick",
"memchr", "memchr",

View File

@@ -98,7 +98,7 @@ fn already_has_from_impl(
}; };
let var_ty = hir_enum_var.fields(sema.db)[0].signature_ty(sema.db); let var_ty = hir_enum_var.fields(sema.db)[0].signature_ty(sema.db);
e_ty.impls_trait(sema.db, from_trait, &[var_ty.clone()]) e_ty.impls_trait(sema.db, from_trait, &[var_ty])
} }
#[cfg(test)] #[cfg(test)]

View File

@@ -124,7 +124,7 @@ fn anchor_stmt(expr: ast::Expr) -> Option<(SyntaxNode, bool)> {
} }
} }
if ast::Stmt::cast(node.clone().into()).is_some() { if ast::Stmt::cast(node.clone()).is_some() {
return Some((node, false)); return Some((node, false));
} }

View File

@@ -30,7 +30,7 @@ pub(crate) fn merge_imports(ctx: AssistCtx) -> Option<Assist> {
.filter_map(|dir| neighbor(&use_item, dir)) .filter_map(|dir| neighbor(&use_item, dir))
.filter_map(|it| Some((it.clone(), it.use_tree()?))) .filter_map(|it| Some((it.clone(), it.use_tree()?)))
.find_map(|(use_item, use_tree)| { .find_map(|(use_item, use_tree)| {
Some((try_merge_trees(&tree, &use_tree)?, use_item.clone())) Some((try_merge_trees(&tree, &use_tree)?, use_item))
})?; })?;
rewriter.replace_ast(&tree, &merged); rewriter.replace_ast(&tree, &merged);

View File

@@ -37,7 +37,7 @@ pub(crate) fn split_import(ctx: AssistCtx) -> Option<Assist> {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::helpers::{check_assist, check_assist_target}; use crate::helpers::{check_assist, check_assist_not_applicable, check_assist_target};
use super::*; use super::*;
@@ -63,4 +63,9 @@ mod tests {
fn split_import_target() { fn split_import_target() {
check_assist_target(split_import, "use crate::<|>db::{RootDatabase, FileSymbol}", "::"); check_assist_target(split_import, "use crate::<|>db::{RootDatabase, FileSymbol}", "::");
} }
#[test]
fn issue4044() {
check_assist_not_applicable(split_import, "use crate::<|>:::self;")
}
} }

View File

@@ -235,7 +235,7 @@ fn parse_meta(meta: &str) -> ParsedMeta {
"env" => { "env" => {
for key in value.split(',') { for key in value.split(',') {
if let Some((k, v)) = split1(key, '=') { if let Some((k, v)) = split1(key, '=') {
env.set(k.into(), v.into()); env.set(k, v.into());
} }
} }
} }

View File

@@ -327,7 +327,7 @@ impl ExternSource {
self.extern_paths.iter().find_map(|(root_path, id)| { self.extern_paths.iter().find_map(|(root_path, id)| {
if let Ok(rel_path) = path.strip_prefix(root_path) { if let Ok(rel_path) = path.strip_prefix(root_path) {
let rel_path = RelativePathBuf::from_path(rel_path).ok()?; let rel_path = RelativePathBuf::from_path(rel_path).ok()?;
Some((id.clone(), rel_path)) Some((*id, rel_path))
} else { } else {
None None
} }

View File

@@ -759,6 +759,17 @@ impl MacroDef {
pub fn name(self, db: &dyn HirDatabase) -> Option<Name> { pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
self.source(db).value.name().map(|it| it.as_name()) self.source(db).value.name().map(|it| it.as_name())
} }
/// Indicate it is a proc-macro
pub fn is_proc_macro(&self) -> bool {
match self.id.kind {
hir_expand::MacroDefKind::Declarative => false,
hir_expand::MacroDefKind::BuiltIn(_) => false,
hir_expand::MacroDefKind::BuiltInDerive(_) => false,
hir_expand::MacroDefKind::BuiltInEager(_) => false,
hir_expand::MacroDefKind::CustomDerive(_) => true,
}
}
} }
/// Invariant: `inner.as_assoc_item(db).is_some()` /// Invariant: `inner.as_assoc_item(db).is_some()`

View File

@@ -195,6 +195,10 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.analyze(field.syntax()).resolve_record_field(self.db, field) self.analyze(field.syntax()).resolve_record_field(self.db, field)
} }
pub fn resolve_record_field_pat(&self, field: &ast::RecordFieldPat) -> Option<StructField> {
self.analyze(field.syntax()).resolve_record_field_pat(self.db, field)
}
pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> { pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> {
let sa = self.analyze(macro_call.syntax()); let sa = self.analyze(macro_call.syntax());
let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call); let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call);

View File

@@ -95,6 +95,7 @@ impl SourceAnalyzer {
} }
fn pat_id(&self, pat: &ast::Pat) -> Option<PatId> { fn pat_id(&self, pat: &ast::Pat) -> Option<PatId> {
// FIXME: macros, see `expr_id`
let src = InFile { file_id: self.file_id, value: pat }; let src = InFile { file_id: self.file_id, value: pat };
self.body_source_map.as_ref()?.node_pat(src) self.body_source_map.as_ref()?.node_pat(src)
} }
@@ -167,6 +168,16 @@ impl SourceAnalyzer {
Some((struct_field.into(), local)) Some((struct_field.into(), local))
} }
pub(crate) fn resolve_record_field_pat(
&self,
_db: &dyn HirDatabase,
field: &ast::RecordFieldPat,
) -> Option<StructField> {
let pat_id = self.pat_id(&field.pat()?)?;
let struct_field = self.infer.as_ref()?.record_field_pat_resolution(pat_id)?;
Some(struct_field.into())
}
pub(crate) fn resolve_macro_call( pub(crate) fn resolve_macro_call(
&self, &self,
db: &dyn HirDatabase, db: &dyn HirDatabase,

View File

@@ -473,16 +473,14 @@ impl ExprCollector<'_> {
self.collect_block_items(&block); self.collect_block_items(&block);
let statements = block let statements = block
.statements() .statements()
.filter_map(|s| match s { .map(|s| match s {
ast::Stmt::LetStmt(stmt) => { ast::Stmt::LetStmt(stmt) => {
let pat = self.collect_pat_opt(stmt.pat()); let pat = self.collect_pat_opt(stmt.pat());
let type_ref = stmt.ascribed_type().map(TypeRef::from_ast); let type_ref = stmt.ascribed_type().map(TypeRef::from_ast);
let initializer = stmt.initializer().map(|e| self.collect_expr(e)); let initializer = stmt.initializer().map(|e| self.collect_expr(e));
Some(Statement::Let { pat, type_ref, initializer }) Statement::Let { pat, type_ref, initializer }
}
ast::Stmt::ExprStmt(stmt) => {
Some(Statement::Expr(self.collect_expr_opt(stmt.expr())))
} }
ast::Stmt::ExprStmt(stmt) => Statement::Expr(self.collect_expr_opt(stmt.expr())),
}) })
.collect(); .collect();
let tail = block.expr().map(|e| self.collect_expr(e)); let tail = block.expr().map(|e| self.collect_expr(e));

View File

@@ -157,6 +157,10 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope
for arm in arms { for arm in arms {
let scope = scopes.new_scope(scope); let scope = scopes.new_scope(scope);
scopes.add_bindings(body, scope, arm.pat); scopes.add_bindings(body, scope, arm.pat);
if let Some(guard) = arm.guard {
scopes.set_scope(guard, scope);
compute_expr_scopes(guard, body, scopes, scope);
}
scopes.set_scope(arm.expr, scope); scopes.set_scope(arm.expr, scope);
compute_expr_scopes(arm.expr, body, scopes, scope); compute_expr_scopes(arm.expr, body, scopes, scope);
} }

View File

@@ -66,7 +66,7 @@ impl AstIdMap {
// change parent's id. This means that, say, adding a new function to a // change parent's id. This means that, say, adding a new function to a
// trait does not change ids of top-level items, which helps caching. // trait does not change ids of top-level items, which helps caching.
bfs(node, |it| { bfs(node, |it| {
if let Some(module_item) = ast::ModuleItem::cast(it.clone()) { if let Some(module_item) = ast::ModuleItem::cast(it) {
res.alloc(module_item.syntax()); res.alloc(module_item.syntax());
} }
}); });

View File

@@ -301,7 +301,7 @@ fn relative_file(db: &dyn AstDatabase, call_id: MacroCallId, path: &str) -> Opti
} }
// Extern paths ? // Extern paths ?
let krate = db.relevant_crates(call_site).get(0)?.clone(); let krate = *db.relevant_crates(call_site).get(0)?;
let (extern_source_id, relative_file) = let (extern_source_id, relative_file) =
db.crate_graph()[krate].extern_source.extern_path(path)?; db.crate_graph()[krate].extern_source.extern_path(path)?;
@@ -329,7 +329,7 @@ fn include_expand(
// FIXME: // FIXME:
// Handle include as expression // Handle include as expression
let res = parse_to_token_tree(&db.file_text(file_id.into())) let res = parse_to_token_tree(&db.file_text(file_id))
.ok_or_else(|| mbe::ExpandError::ConversionError)? .ok_or_else(|| mbe::ExpandError::ConversionError)?
.0; .0;
@@ -340,7 +340,7 @@ fn get_env_inner(db: &dyn AstDatabase, arg_id: EagerMacroId, key: &str) -> Optio
let call_id: MacroCallId = arg_id.into(); let call_id: MacroCallId = arg_id.into();
let original_file = call_id.as_file().original_file(db); let original_file = call_id.as_file().original_file(db);
let krate = db.relevant_crates(original_file).get(0)?.clone(); let krate = *db.relevant_crates(original_file).get(0)?;
db.crate_graph()[krate].env.get(key) db.crate_graph()[krate].env.get(key)
} }
@@ -447,7 +447,7 @@ mod tests {
file_id: file_id.into(), file_id: file_id.into(),
}; };
let id: MacroCallId = db.intern_eager_expansion(eager.into()).into(); let id: MacroCallId = db.intern_eager_expansion(eager).into();
id.as_file() id.as_file()
} }
}; };

View File

@@ -27,9 +27,9 @@ test_utils = { path = "../test_utils" }
scoped-tls = "1" scoped-tls = "1"
chalk-solve = { git = "https://github.com/rust-lang/chalk.git", rev = "28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" } chalk-solve = { git = "https://github.com/rust-lang/chalk.git", rev = "2c072cc830d04af5f10b390e6643327f85108282" }
chalk-rust-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" } chalk-rust-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "2c072cc830d04af5f10b390e6643327f85108282" }
chalk-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" } chalk-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "2c072cc830d04af5f10b390e6643327f85108282" }
[dev-dependencies] [dev-dependencies]
insta = "0.16.0" insta = "0.16.0"

View File

@@ -235,10 +235,19 @@ impl From<PatId> for PatIdOrWild {
} }
} }
impl From<&PatId> for PatIdOrWild {
fn from(pat_id: &PatId) -> Self {
Self::PatId(*pat_id)
}
}
#[derive(Debug, Clone, Copy, PartialEq)] #[derive(Debug, Clone, Copy, PartialEq)]
pub enum MatchCheckErr { pub enum MatchCheckErr {
NotImplemented, NotImplemented,
MalformedMatchArm, MalformedMatchArm,
/// Used when type inference cannot resolve the type of
/// a pattern or expression.
Unknown,
} }
/// The return type of `is_useful` is either an indication of usefulness /// The return type of `is_useful` is either an indication of usefulness
@@ -290,10 +299,14 @@ impl PatStack {
Self::from_slice(&self.0[1..]) Self::from_slice(&self.0[1..])
} }
fn replace_head_with<T: Into<PatIdOrWild> + Copy>(&self, pat_ids: &[T]) -> PatStack { fn replace_head_with<I, T>(&self, pats: I) -> PatStack
where
I: Iterator<Item = T>,
T: Into<PatIdOrWild>,
{
let mut patterns: PatStackInner = smallvec![]; let mut patterns: PatStackInner = smallvec![];
for pat in pat_ids { for pat in pats {
patterns.push((*pat).into()); patterns.push(pat.into());
} }
for pat in &self.0[1..] { for pat in &self.0[1..] {
patterns.push(*pat); patterns.push(*pat);
@@ -330,7 +343,7 @@ impl PatStack {
return Err(MatchCheckErr::NotImplemented); return Err(MatchCheckErr::NotImplemented);
} }
Some(self.replace_head_with(pat_ids)) Some(self.replace_head_with(pat_ids.iter()))
} }
(Pat::Lit(lit_expr), Constructor::Bool(constructor_val)) => { (Pat::Lit(lit_expr), Constructor::Bool(constructor_val)) => {
match cx.body.exprs[lit_expr] { match cx.body.exprs[lit_expr] {
@@ -382,7 +395,7 @@ impl PatStack {
new_patterns.push((*pat_id).into()); new_patterns.push((*pat_id).into());
} }
Some(self.replace_head_with(&new_patterns)) Some(self.replace_head_with(new_patterns.into_iter()))
} else { } else {
return Err(MatchCheckErr::MalformedMatchArm); return Err(MatchCheckErr::MalformedMatchArm);
} }
@@ -390,13 +403,41 @@ impl PatStack {
// If there is no ellipsis in the tuple pattern, the number // If there is no ellipsis in the tuple pattern, the number
// of patterns must equal the constructor arity. // of patterns must equal the constructor arity.
if pat_ids.len() == constructor_arity { if pat_ids.len() == constructor_arity {
Some(self.replace_head_with(pat_ids)) Some(self.replace_head_with(pat_ids.into_iter()))
} else { } else {
return Err(MatchCheckErr::MalformedMatchArm); return Err(MatchCheckErr::MalformedMatchArm);
} }
} }
} }
} }
(Pat::Record { args: ref arg_patterns, .. }, Constructor::Enum(e)) => {
let pat_id = self.head().as_id().expect("we know this isn't a wild");
if !enum_variant_matches(cx, pat_id, *e) {
None
} else {
match cx.db.enum_data(e.parent).variants[e.local_id].variant_data.as_ref() {
VariantData::Record(struct_field_arena) => {
// Here we treat any missing fields in the record as the wild pattern, as
// if the record has ellipsis. We want to do this here even if the
// record does not contain ellipsis, because it allows us to continue
// enforcing exhaustiveness for the rest of the match statement.
//
// Creating the diagnostic for the missing field in the pattern
// should be done in a different diagnostic.
let patterns = struct_field_arena.iter().map(|(_, struct_field)| {
arg_patterns
.iter()
.find(|pat| pat.name == struct_field.name)
.map(|pat| PatIdOrWild::from(pat.pat))
.unwrap_or(PatIdOrWild::Wild)
});
Some(self.replace_head_with(patterns))
}
_ => return Err(MatchCheckErr::Unknown),
}
}
}
(Pat::Or(_), _) => return Err(MatchCheckErr::NotImplemented), (Pat::Or(_), _) => return Err(MatchCheckErr::NotImplemented),
(_, _) => return Err(MatchCheckErr::NotImplemented), (_, _) => return Err(MatchCheckErr::NotImplemented),
}; };
@@ -655,8 +696,8 @@ impl Constructor {
Constructor::Enum(e) => { Constructor::Enum(e) => {
match cx.db.enum_data(e.parent).variants[e.local_id].variant_data.as_ref() { match cx.db.enum_data(e.parent).variants[e.local_id].variant_data.as_ref() {
VariantData::Tuple(struct_field_data) => struct_field_data.len(), VariantData::Tuple(struct_field_data) => struct_field_data.len(),
VariantData::Record(struct_field_data) => struct_field_data.len(),
VariantData::Unit => 0, VariantData::Unit => 0,
_ => return Err(MatchCheckErr::NotImplemented),
} }
} }
}; };
@@ -695,10 +736,10 @@ fn pat_constructor(cx: &MatchCheckCtx, pat: PatIdOrWild) -> MatchCheckResult<Opt
Expr::Literal(Literal::Bool(val)) => Some(Constructor::Bool(val)), Expr::Literal(Literal::Bool(val)) => Some(Constructor::Bool(val)),
_ => return Err(MatchCheckErr::NotImplemented), _ => return Err(MatchCheckErr::NotImplemented),
}, },
Pat::TupleStruct { .. } | Pat::Path(_) => { Pat::TupleStruct { .. } | Pat::Path(_) | Pat::Record { .. } => {
let pat_id = pat.as_id().expect("we already know this pattern is not a wild"); let pat_id = pat.as_id().expect("we already know this pattern is not a wild");
let variant_id = let variant_id =
cx.infer.variant_resolution_for_pat(pat_id).ok_or(MatchCheckErr::NotImplemented)?; cx.infer.variant_resolution_for_pat(pat_id).ok_or(MatchCheckErr::Unknown)?;
match variant_id { match variant_id {
VariantId::EnumVariantId(enum_variant_id) => { VariantId::EnumVariantId(enum_variant_id) => {
Some(Constructor::Enum(enum_variant_id)) Some(Constructor::Enum(enum_variant_id))
@@ -759,20 +800,22 @@ mod tests {
pub(super) use insta::assert_snapshot; pub(super) use insta::assert_snapshot;
pub(super) use ra_db::fixture::WithFixture; pub(super) use ra_db::fixture::WithFixture;
pub(super) use crate::test_db::TestDB; pub(super) use crate::{diagnostics::MissingMatchArms, test_db::TestDB};
pub(super) fn check_diagnostic_message(content: &str) -> String { pub(super) fn check_diagnostic_message(content: &str) -> String {
TestDB::with_single_file(content).0.diagnostics().0 TestDB::with_single_file(content).0.diagnostic::<MissingMatchArms>().0
} }
pub(super) fn check_diagnostic(content: &str) { pub(super) fn check_diagnostic(content: &str) {
let diagnostic_count = TestDB::with_single_file(content).0.diagnostics().1; let diagnostic_count =
TestDB::with_single_file(content).0.diagnostic::<MissingMatchArms>().1;
assert_eq!(1, diagnostic_count, "no diagnostic reported"); assert_eq!(1, diagnostic_count, "no diagnostic reported");
} }
pub(super) fn check_no_diagnostic(content: &str) { pub(super) fn check_no_diagnostic(content: &str) {
let diagnostic_count = TestDB::with_single_file(content).0.diagnostics().1; let diagnostic_count =
TestDB::with_single_file(content).0.diagnostic::<MissingMatchArms>().1;
assert_eq!(0, diagnostic_count, "expected no diagnostic, found one"); assert_eq!(0, diagnostic_count, "expected no diagnostic, found one");
} }
@@ -1531,6 +1574,236 @@ mod tests {
check_no_diagnostic(content); check_no_diagnostic(content);
} }
#[test]
fn enum_record_no_arms() {
let content = r"
enum Either {
A { foo: bool },
B,
}
fn test_fn() {
let a = Either::A { foo: true };
match a {
}
}
";
check_diagnostic(content);
}
#[test]
fn enum_record_missing_arms() {
let content = r"
enum Either {
A { foo: bool },
B,
}
fn test_fn() {
let a = Either::A { foo: true };
match a {
Either::A { foo: true } => (),
}
}
";
check_diagnostic(content);
}
#[test]
fn enum_record_no_diagnostic() {
let content = r"
enum Either {
A { foo: bool },
B,
}
fn test_fn() {
let a = Either::A { foo: true };
match a {
Either::A { foo: true } => (),
Either::A { foo: false } => (),
Either::B => (),
}
}
";
check_no_diagnostic(content);
}
#[test]
fn enum_record_missing_field_no_diagnostic() {
let content = r"
enum Either {
A { foo: bool },
B,
}
fn test_fn() {
let a = Either::B;
match a {
Either::A { } => (),
Either::B => (),
}
}
";
// When `Either::A` is missing a struct member, we don't want
// to fire the missing match arm diagnostic. This should fire
// some other diagnostic.
check_no_diagnostic(content);
}
#[test]
fn enum_record_missing_field_missing_match_arm() {
let content = r"
enum Either {
A { foo: bool },
B,
}
fn test_fn() {
let a = Either::B;
match a {
Either::A { } => (),
}
}
";
// Even though `Either::A` is missing fields, we still want to fire
// the missing arm diagnostic here, since we know `Either::B` is missing.
check_diagnostic(content);
}
#[test]
fn enum_record_no_diagnostic_wild() {
let content = r"
enum Either {
A { foo: bool },
B,
}
fn test_fn() {
let a = Either::A { foo: true };
match a {
Either::A { foo: _ } => (),
Either::B => (),
}
}
";
check_no_diagnostic(content);
}
#[test]
fn enum_record_fields_out_of_order_missing_arm() {
let content = r"
enum Either {
A { foo: bool, bar: () },
B,
}
fn test_fn() {
let a = Either::A { foo: true };
match a {
Either::A { bar: (), foo: false } => (),
Either::A { foo: true, bar: () } => (),
}
}
";
check_diagnostic(content);
}
#[test]
fn enum_record_fields_out_of_order_no_diagnostic() {
let content = r"
enum Either {
A { foo: bool, bar: () },
B,
}
fn test_fn() {
let a = Either::A { foo: true };
match a {
Either::A { bar: (), foo: false } => (),
Either::A { foo: true, bar: () } => (),
Either::B => (),
}
}
";
check_no_diagnostic(content);
}
#[test]
fn enum_record_ellipsis_missing_arm() {
let content = r"
enum Either {
A { foo: bool, bar: bool },
B,
}
fn test_fn() {
match Either::B {
Either::A { foo: true, .. } => (),
Either::B => (),
}
}
";
check_diagnostic(content);
}
#[test]
fn enum_record_ellipsis_no_diagnostic() {
let content = r"
enum Either {
A { foo: bool, bar: bool },
B,
}
fn test_fn() {
let a = Either::A { foo: true };
match a {
Either::A { foo: true, .. } => (),
Either::A { foo: false, .. } => (),
Either::B => (),
}
}
";
check_no_diagnostic(content);
}
#[test]
fn enum_record_ellipsis_all_fields_missing_arm() {
let content = r"
enum Either {
A { foo: bool, bar: bool },
B,
}
fn test_fn() {
let a = Either::B;
match a {
Either::A { .. } => (),
}
}
";
check_diagnostic(content);
}
#[test]
fn enum_record_ellipsis_all_fields_no_diagnostic() {
let content = r"
enum Either {
A { foo: bool, bar: bool },
B,
}
fn test_fn() {
let a = Either::B;
match a {
Either::A { .. } => (),
Either::B => (),
}
}
";
check_no_diagnostic(content);
}
#[test] #[test]
fn enum_tuple_partial_ellipsis_no_diagnostic() { fn enum_tuple_partial_ellipsis_no_diagnostic() {
let content = r" let content = r"
@@ -1688,25 +1961,6 @@ mod false_negatives {
check_no_diagnostic(content); check_no_diagnostic(content);
} }
#[test]
fn enum_record() {
let content = r"
enum Either {
A { foo: u32 },
B,
}
fn test_fn() {
match Either::B {
Either::A { foo: 5 } => (),
}
}
";
// This is a false negative.
// We don't currently handle enum record types.
check_no_diagnostic(content);
}
#[test] #[test]
fn internal_or() { fn internal_or() {
let content = r" let content = r"
@@ -1796,4 +2050,22 @@ mod false_negatives {
// We don't currently handle tuple patterns with ellipsis. // We don't currently handle tuple patterns with ellipsis.
check_no_diagnostic(content); check_no_diagnostic(content);
} }
#[test]
fn struct_missing_arm() {
let content = r"
struct Foo {
a: bool,
}
fn test_fn(f: Foo) {
match f {
Foo { a: true } => {},
}
}
";
// This is a false negative.
// We don't currently handle structs.
check_no_diagnostic(content);
}
} }

View File

@@ -107,6 +107,13 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
krate: CrateId, krate: CrateId,
goal: crate::Canonical<crate::InEnvironment<crate::Obligation>>, goal: crate::Canonical<crate::InEnvironment<crate::Obligation>>,
) -> Option<crate::traits::Solution>; ) -> Option<crate::traits::Solution>;
#[salsa::invoke(crate::traits::chalk::program_clauses_for_chalk_env_query)]
fn program_clauses_for_chalk_env(
&self,
krate: CrateId,
env: chalk_ir::Environment<chalk::Interner>,
) -> chalk_ir::ProgramClauses<chalk::Interner>;
} }
fn infer_wait(db: &impl HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> { fn infer_wait(db: &impl HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {

View File

@@ -127,6 +127,7 @@ pub struct InferenceResult {
field_resolutions: FxHashMap<ExprId, StructFieldId>, field_resolutions: FxHashMap<ExprId, StructFieldId>,
/// For each field in record literal, records the field it resolves to. /// For each field in record literal, records the field it resolves to.
record_field_resolutions: FxHashMap<ExprId, StructFieldId>, record_field_resolutions: FxHashMap<ExprId, StructFieldId>,
record_field_pat_resolutions: FxHashMap<PatId, StructFieldId>,
/// For each struct literal, records the variant it resolves to. /// For each struct literal, records the variant it resolves to.
variant_resolutions: FxHashMap<ExprOrPatId, VariantId>, variant_resolutions: FxHashMap<ExprOrPatId, VariantId>,
/// For each associated item record what it resolves to /// For each associated item record what it resolves to
@@ -147,6 +148,9 @@ impl InferenceResult {
pub fn record_field_resolution(&self, expr: ExprId) -> Option<StructFieldId> { pub fn record_field_resolution(&self, expr: ExprId) -> Option<StructFieldId> {
self.record_field_resolutions.get(&expr).copied() self.record_field_resolutions.get(&expr).copied()
} }
pub fn record_field_pat_resolution(&self, pat: PatId) -> Option<StructFieldId> {
self.record_field_pat_resolutions.get(&pat).copied()
}
pub fn variant_resolution_for_expr(&self, id: ExprId) -> Option<VariantId> { pub fn variant_resolution_for_expr(&self, id: ExprId) -> Option<VariantId> {
self.variant_resolutions.get(&id.into()).copied() self.variant_resolutions.get(&id.into()).copied()
} }

View File

@@ -7,6 +7,7 @@ use hir_def::{
expr::{BindingAnnotation, Pat, PatId, RecordFieldPat}, expr::{BindingAnnotation, Pat, PatId, RecordFieldPat},
path::Path, path::Path,
type_ref::Mutability, type_ref::Mutability,
StructFieldId,
}; };
use hir_expand::name::Name; use hir_expand::name::Name;
use test_utils::tested_by; use test_utils::tested_by;
@@ -67,6 +68,11 @@ impl<'a> InferenceContext<'a> {
let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default(); let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default();
for subpat in subpats { for subpat in subpats {
let matching_field = var_data.as_ref().and_then(|it| it.field(&subpat.name)); let matching_field = var_data.as_ref().and_then(|it| it.field(&subpat.name));
if let Some(local_id) = matching_field {
let field_def = StructFieldId { parent: def.unwrap(), local_id };
self.result.record_field_pat_resolutions.insert(subpat.pat, field_def);
}
let expected_ty = let expected_ty =
matching_field.map_or(Ty::Unknown, |field| field_tys[field].clone().subst(&substs)); matching_field.map_or(Ty::Unknown, |field| field_tys[field].clone().subst(&substs));
let expected_ty = self.normalize_associated_types_in(expected_ty); let expected_ty = self.normalize_associated_types_in(expected_ty);

View File

@@ -12,7 +12,7 @@ use ra_db::{
}; };
use stdx::format_to; use stdx::format_to;
use crate::{db::HirDatabase, expr::ExprValidator}; use crate::{db::HirDatabase, diagnostics::Diagnostic, expr::ExprValidator};
#[salsa::database( #[salsa::database(
ra_db::SourceDatabaseExtStorage, ra_db::SourceDatabaseExtStorage,
@@ -104,10 +104,7 @@ impl TestDB {
panic!("Can't find module for file") panic!("Can't find module for file")
} }
// FIXME: don't duplicate this fn diag<F: FnMut(&dyn Diagnostic)>(&self, mut cb: F) {
pub fn diagnostics(&self) -> (String, u32) {
let mut buf = String::new();
let mut count = 0;
let crate_graph = self.crate_graph(); let crate_graph = self.crate_graph();
for krate in crate_graph.iter() { for krate in crate_graph.iter() {
let crate_def_map = self.crate_def_map(krate); let crate_def_map = self.crate_def_map(krate);
@@ -132,15 +129,36 @@ impl TestDB {
for f in fns { for f in fns {
let infer = self.infer(f.into()); let infer = self.infer(f.into());
let mut sink = DiagnosticSink::new(|d| { let mut sink = DiagnosticSink::new(&mut cb);
format_to!(buf, "{:?}: {}\n", d.syntax_node(self).text(), d.message());
count += 1;
});
infer.add_diagnostics(self, f, &mut sink); infer.add_diagnostics(self, f, &mut sink);
let mut validator = ExprValidator::new(f, infer, &mut sink); let mut validator = ExprValidator::new(f, infer, &mut sink);
validator.validate_body(self); validator.validate_body(self);
} }
} }
}
pub fn diagnostics(&self) -> (String, u32) {
let mut buf = String::new();
let mut count = 0;
self.diag(|d| {
format_to!(buf, "{:?}: {}\n", d.syntax_node(self).text(), d.message());
count += 1;
});
(buf, count)
}
/// Like `diagnostics`, but filtered for a single diagnostic.
pub fn diagnostic<D: Diagnostic>(&self) -> (String, u32) {
let mut buf = String::new();
let mut count = 0;
self.diag(|d| {
// We want to filter diagnostics by the particular one we are testing for, to
// avoid surprising results in tests.
if d.downcast_ref::<D>().is_some() {
format_to!(buf, "{:?}: {}\n", d.syntax_node(self).text(), d.message());
count += 1;
};
});
(buf, count) (buf, count)
} }
} }

View File

@@ -455,3 +455,29 @@ fn test() {
"### "###
); );
} }
#[test]
fn infer_guard() {
assert_snapshot!(
infer(r#"
struct S;
impl S { fn foo(&self) -> bool { false } }
fn main() {
match S {
s if s.foo() => (),
}
}
"#), @"
[28; 32) 'self': &S
[42; 51) '{ false }': bool
[44; 49) 'false': bool
[65; 116) '{ ... } }': ()
[71; 114) 'match ... }': ()
[77; 78) 'S': S
[89; 90) 's': S
[94; 95) 's': S
[94; 101) 's.foo()': bool
[105; 107) '()': ()
")
}

View File

@@ -225,7 +225,7 @@ fn solution_from_chalk(
None => unimplemented!(), None => unimplemented!(),
}) })
.collect(); .collect();
let result = Canonical { value, num_vars: subst.binders.len() }; let result = Canonical { value, num_vars: subst.binders.len(&Interner) };
SolutionVariables(result) SolutionVariables(result)
}; };
match solution { match solution {

View File

@@ -4,8 +4,8 @@ use std::{fmt, sync::Arc};
use log::debug; use log::debug;
use chalk_ir::{ use chalk_ir::{
cast::Cast, fold::shift::Shift, Goal, GoalData, Parameter, PlaceholderIndex, TypeName, cast::Cast, fold::shift::Shift, interner::HasInterner, Goal, GoalData, Parameter,
UniverseIndex, PlaceholderIndex, TypeName, UniverseIndex,
}; };
use hir_def::{AssocContainerId, AssocItemId, GenericDefId, HasModule, Lookup, TypeAliasId}; use hir_def::{AssocContainerId, AssocItemId, GenericDefId, HasModule, Lookup, TypeAliasId};
@@ -33,8 +33,10 @@ impl chalk_ir::interner::Interner for Interner {
type InternedGoals = Vec<Goal<Self>>; type InternedGoals = Vec<Goal<Self>>;
type InternedSubstitution = Vec<Parameter<Self>>; type InternedSubstitution = Vec<Parameter<Self>>;
type InternedProgramClause = chalk_ir::ProgramClauseData<Self>; type InternedProgramClause = chalk_ir::ProgramClauseData<Self>;
type InternedProgramClauses = Vec<chalk_ir::ProgramClause<Self>>; type InternedProgramClauses = Arc<[chalk_ir::ProgramClause<Self>]>;
type InternedQuantifiedWhereClauses = Vec<chalk_ir::QuantifiedWhereClause<Self>>; type InternedQuantifiedWhereClauses = Vec<chalk_ir::QuantifiedWhereClause<Self>>;
type InternedParameterKinds = Vec<chalk_ir::ParameterKind<()>>;
type InternedCanonicalVarKinds = Vec<chalk_ir::ParameterKind<UniverseIndex>>;
type Identifier = TypeAliasId; type Identifier = TypeAliasId;
type DefId = InternId; type DefId = InternId;
@@ -60,6 +62,27 @@ impl chalk_ir::interner::Interner for Interner {
tls::with_current_program(|prog| Some(prog?.debug_alias(alias, fmt))) tls::with_current_program(|prog| Some(prog?.debug_alias(alias, fmt)))
} }
fn debug_projection_ty(
proj: &chalk_ir::ProjectionTy<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
tls::with_current_program(|prog| Some(prog?.debug_projection_ty(proj, fmt)))
}
fn debug_opaque_ty(
opaque_ty: &chalk_ir::OpaqueTy<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
tls::with_current_program(|prog| Some(prog?.debug_opaque_ty(opaque_ty, fmt)))
}
fn debug_opaque_ty_id(
opaque_ty_id: chalk_ir::OpaqueTyId<Self>,
fmt: &mut fmt::Formatter<'_>,
) -> Option<fmt::Result> {
tls::with_current_program(|prog| Some(prog?.debug_opaque_ty_id(opaque_ty_id, fmt)))
}
fn debug_ty(ty: &chalk_ir::Ty<Interner>, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> { fn debug_ty(ty: &chalk_ir::Ty<Interner>, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
tls::with_current_program(|prog| Some(prog?.debug_ty(ty, fmt))) tls::with_current_program(|prog| Some(prog?.debug_ty(ty, fmt)))
} }
@@ -202,15 +225,15 @@ impl chalk_ir::interner::Interner for Interner {
fn intern_program_clauses( fn intern_program_clauses(
&self, &self,
data: impl IntoIterator<Item = chalk_ir::ProgramClause<Self>>, data: impl IntoIterator<Item = chalk_ir::ProgramClause<Self>>,
) -> Vec<chalk_ir::ProgramClause<Self>> { ) -> Arc<[chalk_ir::ProgramClause<Self>]> {
data.into_iter().collect() data.into_iter().collect()
} }
fn program_clauses_data<'a>( fn program_clauses_data<'a>(
&self, &self,
clauses: &'a Vec<chalk_ir::ProgramClause<Self>>, clauses: &'a Arc<[chalk_ir::ProgramClause<Self>]>,
) -> &'a [chalk_ir::ProgramClause<Self>] { ) -> &'a [chalk_ir::ProgramClause<Self>] {
clauses &clauses
} }
fn intern_quantified_where_clauses( fn intern_quantified_where_clauses(
@@ -226,6 +249,34 @@ impl chalk_ir::interner::Interner for Interner {
) -> &'a [chalk_ir::QuantifiedWhereClause<Self>] { ) -> &'a [chalk_ir::QuantifiedWhereClause<Self>] {
clauses clauses
} }
fn intern_parameter_kinds(
&self,
data: impl IntoIterator<Item = chalk_ir::ParameterKind<()>>,
) -> Self::InternedParameterKinds {
data.into_iter().collect()
}
fn parameter_kinds_data<'a>(
&self,
parameter_kinds: &'a Self::InternedParameterKinds,
) -> &'a [chalk_ir::ParameterKind<()>] {
&parameter_kinds
}
fn intern_canonical_var_kinds(
&self,
data: impl IntoIterator<Item = chalk_ir::ParameterKind<UniverseIndex>>,
) -> Self::InternedCanonicalVarKinds {
data.into_iter().collect()
}
fn canonical_var_kinds_data<'a>(
&self,
canonical_var_kinds: &'a Self::InternedCanonicalVarKinds,
) -> &'a [chalk_ir::ParameterKind<UniverseIndex>] {
&canonical_var_kinds
}
} }
impl chalk_ir::interner::HasInterner for Interner { impl chalk_ir::interner::HasInterner for Interner {
@@ -268,7 +319,10 @@ impl ToChalk for Ty {
Ty::Projection(proj_ty) => { Ty::Projection(proj_ty) => {
let associated_ty_id = proj_ty.associated_ty.to_chalk(db); let associated_ty_id = proj_ty.associated_ty.to_chalk(db);
let substitution = proj_ty.parameters.to_chalk(db); let substitution = proj_ty.parameters.to_chalk(db);
chalk_ir::AliasTy { associated_ty_id, substitution } chalk_ir::AliasTy::Projection(chalk_ir::ProjectionTy {
associated_ty_id,
substitution,
})
.cast(&Interner) .cast(&Interner)
.intern(&Interner) .intern(&Interner)
} }
@@ -314,16 +368,17 @@ impl ToChalk for Ty {
); );
Ty::Placeholder(db.lookup_intern_type_param_id(interned_id)) Ty::Placeholder(db.lookup_intern_type_param_id(interned_id))
} }
chalk_ir::TyData::Alias(proj) => { chalk_ir::TyData::Alias(chalk_ir::AliasTy::Projection(proj)) => {
let associated_ty = from_chalk(db, proj.associated_ty_id); let associated_ty = from_chalk(db, proj.associated_ty_id);
let parameters = from_chalk(db, proj.substitution); let parameters = from_chalk(db, proj.substitution);
Ty::Projection(ProjectionTy { associated_ty, parameters }) Ty::Projection(ProjectionTy { associated_ty, parameters })
} }
chalk_ir::TyData::Alias(chalk_ir::AliasTy::Opaque(_)) => unimplemented!(),
chalk_ir::TyData::Function(_) => unimplemented!(), chalk_ir::TyData::Function(_) => unimplemented!(),
chalk_ir::TyData::BoundVar(idx) => Ty::Bound(idx), chalk_ir::TyData::BoundVar(idx) => Ty::Bound(idx),
chalk_ir::TyData::InferenceVar(_iv) => Ty::Unknown, chalk_ir::TyData::InferenceVar(_iv) => Ty::Unknown,
chalk_ir::TyData::Dyn(where_clauses) => { chalk_ir::TyData::Dyn(where_clauses) => {
assert_eq!(where_clauses.bounds.binders.len(), 1); assert_eq!(where_clauses.bounds.binders.len(&Interner), 1);
let predicates = where_clauses let predicates = where_clauses
.bounds .bounds
.skip_binders() .skip_binders()
@@ -404,6 +459,7 @@ impl ToChalk for TypeCtor {
match type_name { match type_name {
TypeName::Struct(struct_id) => db.lookup_intern_type_ctor(struct_id.into()), TypeName::Struct(struct_id) => db.lookup_intern_type_ctor(struct_id.into()),
TypeName::AssociatedType(type_id) => TypeCtor::AssociatedType(from_chalk(db, type_id)), TypeName::AssociatedType(type_id) => TypeCtor::AssociatedType(from_chalk(db, type_id)),
TypeName::OpaqueType(_) => unreachable!(),
TypeName::Error => { TypeName::Error => {
// this should not be reached, since we don't represent TypeName::Error with TypeCtor // this should not be reached, since we don't represent TypeName::Error with TypeCtor
unreachable!() unreachable!()
@@ -460,7 +516,8 @@ impl ToChalk for GenericPredicate {
} }
GenericPredicate::Projection(projection_pred) => { GenericPredicate::Projection(projection_pred) => {
let ty = projection_pred.ty.to_chalk(db).shifted_in(&Interner); let ty = projection_pred.ty.to_chalk(db).shifted_in(&Interner);
let alias = projection_pred.projection_ty.to_chalk(db).shifted_in(&Interner); let projection = projection_pred.projection_ty.to_chalk(db).shifted_in(&Interner);
let alias = chalk_ir::AliasTy::Projection(projection);
make_binders(chalk_ir::WhereClause::AliasEq(chalk_ir::AliasEq { alias, ty }), 0) make_binders(chalk_ir::WhereClause::AliasEq(chalk_ir::AliasEq { alias, ty }), 0)
} }
GenericPredicate::Error => panic!("tried passing GenericPredicate::Error to Chalk"), GenericPredicate::Error => panic!("tried passing GenericPredicate::Error to Chalk"),
@@ -481,7 +538,13 @@ impl ToChalk for GenericPredicate {
GenericPredicate::Implemented(from_chalk(db, tr)) GenericPredicate::Implemented(from_chalk(db, tr))
} }
chalk_ir::WhereClause::AliasEq(projection_eq) => { chalk_ir::WhereClause::AliasEq(projection_eq) => {
let projection_ty = from_chalk(db, projection_eq.alias); let projection_ty = from_chalk(
db,
match projection_eq.alias {
chalk_ir::AliasTy::Projection(p) => p,
_ => unimplemented!(),
},
);
let ty = from_chalk(db, projection_eq.ty); let ty = from_chalk(db, projection_eq.ty);
GenericPredicate::Projection(super::ProjectionPredicate { projection_ty, ty }) GenericPredicate::Projection(super::ProjectionPredicate { projection_ty, ty })
} }
@@ -490,10 +553,10 @@ impl ToChalk for GenericPredicate {
} }
impl ToChalk for ProjectionTy { impl ToChalk for ProjectionTy {
type Chalk = chalk_ir::AliasTy<Interner>; type Chalk = chalk_ir::ProjectionTy<Interner>;
fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::AliasTy<Interner> { fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::ProjectionTy<Interner> {
chalk_ir::AliasTy { chalk_ir::ProjectionTy {
associated_ty_id: self.associated_ty.to_chalk(db), associated_ty_id: self.associated_ty.to_chalk(db),
substitution: self.parameters.to_chalk(db), substitution: self.parameters.to_chalk(db),
} }
@@ -501,7 +564,7 @@ impl ToChalk for ProjectionTy {
fn from_chalk( fn from_chalk(
db: &dyn HirDatabase, db: &dyn HirDatabase,
projection_ty: chalk_ir::AliasTy<Interner>, projection_ty: chalk_ir::ProjectionTy<Interner>,
) -> ProjectionTy { ) -> ProjectionTy {
ProjectionTy { ProjectionTy {
associated_ty: from_chalk(db, projection_ty.associated_ty_id), associated_ty: from_chalk(db, projection_ty.associated_ty_id),
@@ -514,7 +577,10 @@ impl ToChalk for super::ProjectionPredicate {
type Chalk = chalk_ir::AliasEq<Interner>; type Chalk = chalk_ir::AliasEq<Interner>;
fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::AliasEq<Interner> { fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::AliasEq<Interner> {
chalk_ir::AliasEq { alias: self.projection_ty.to_chalk(db), ty: self.ty.to_chalk(db) } chalk_ir::AliasEq {
alias: chalk_ir::AliasTy::Projection(self.projection_ty.to_chalk(db)),
ty: self.ty.to_chalk(db),
}
} }
fn from_chalk(_db: &dyn HirDatabase, _normalize: chalk_ir::AliasEq<Interner>) -> Self { fn from_chalk(_db: &dyn HirDatabase, _normalize: chalk_ir::AliasEq<Interner>) -> Self {
@@ -540,17 +606,24 @@ impl ToChalk for Obligation {
impl<T> ToChalk for Canonical<T> impl<T> ToChalk for Canonical<T>
where where
T: ToChalk, T: ToChalk,
T::Chalk: HasInterner<Interner = Interner>,
{ {
type Chalk = chalk_ir::Canonical<T::Chalk>; type Chalk = chalk_ir::Canonical<T::Chalk>;
fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Canonical<T::Chalk> { fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Canonical<T::Chalk> {
let parameter = chalk_ir::ParameterKind::Ty(chalk_ir::UniverseIndex::ROOT); let parameter = chalk_ir::ParameterKind::Ty(chalk_ir::UniverseIndex::ROOT);
let value = self.value.to_chalk(db); let value = self.value.to_chalk(db);
chalk_ir::Canonical { value, binders: vec![parameter; self.num_vars] } chalk_ir::Canonical {
value,
binders: chalk_ir::CanonicalVarKinds::from(&Interner, vec![parameter; self.num_vars]),
}
} }
fn from_chalk(db: &dyn HirDatabase, canonical: chalk_ir::Canonical<T::Chalk>) -> Canonical<T> { fn from_chalk(db: &dyn HirDatabase, canonical: chalk_ir::Canonical<T::Chalk>) -> Canonical<T> {
Canonical { num_vars: canonical.binders.len(), value: from_chalk(db, canonical.value) } Canonical {
num_vars: canonical.binders.len(&Interner),
value: from_chalk(db, canonical.value),
}
} }
} }
@@ -649,9 +722,15 @@ impl ToChalk for builtin::BuiltinImplAssocTyValueData {
} }
} }
fn make_binders<T>(value: T, num_vars: usize) -> chalk_ir::Binders<T> { fn make_binders<T>(value: T, num_vars: usize) -> chalk_ir::Binders<T>
where
T: HasInterner<Interner = Interner>,
{
chalk_ir::Binders::new( chalk_ir::Binders::new(
std::iter::repeat(chalk_ir::ParameterKind::Ty(())).take(num_vars).collect(), chalk_ir::ParameterKinds::from(
&Interner,
std::iter::repeat(chalk_ir::ParameterKind::Ty(())).take(num_vars),
),
value, value,
) )
} }
@@ -799,6 +878,28 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
// FIXME tell Chalk about well-known traits (here and in trait_datum) // FIXME tell Chalk about well-known traits (here and in trait_datum)
None None
} }
fn program_clauses_for_env(
&self,
environment: &chalk_ir::Environment<Interner>,
) -> chalk_ir::ProgramClauses<Interner> {
self.db.program_clauses_for_chalk_env(self.krate, environment.clone())
}
fn opaque_ty_data(
&self,
_id: chalk_ir::OpaqueTyId<Interner>,
) -> Arc<chalk_rust_ir::OpaqueTyDatum<Interner>> {
unimplemented!()
}
}
pub(crate) fn program_clauses_for_chalk_env_query(
db: &dyn HirDatabase,
krate: CrateId,
environment: chalk_ir::Environment<Interner>,
) -> chalk_ir::ProgramClauses<Interner> {
chalk_solve::program_clauses_for_env(&ChalkContext { db, krate }, &environment)
} }
pub(crate) fn associated_ty_data_query( pub(crate) fn associated_ty_data_query(

View File

@@ -121,19 +121,38 @@ impl DebugContext<'_> {
write!(fmt, "{}::{}", trait_data.name, type_alias_data.name) write!(fmt, "{}::{}", trait_data.name, type_alias_data.name)
} }
pub fn debug_alias( pub fn debug_opaque_ty_id(
&self, &self,
alias: &AliasTy<Interner>, opaque_ty_id: chalk_ir::OpaqueTyId<Interner>,
fmt: &mut fmt::Formatter<'_>, fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> { ) -> Result<(), fmt::Error> {
let type_alias: TypeAliasId = from_chalk(self.0, alias.associated_ty_id); fmt.debug_struct("OpaqueTyId").field("index", &opaque_ty_id.0).finish()
}
pub fn debug_alias(
&self,
alias_ty: &AliasTy<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
match alias_ty {
AliasTy::Projection(projection_ty) => self.debug_projection_ty(projection_ty, fmt),
AliasTy::Opaque(opaque_ty) => self.debug_opaque_ty(opaque_ty, fmt),
}
}
pub fn debug_projection_ty(
&self,
projection_ty: &chalk_ir::ProjectionTy<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
let type_alias: TypeAliasId = from_chalk(self.0, projection_ty.associated_ty_id);
let type_alias_data = self.0.type_alias_data(type_alias); let type_alias_data = self.0.type_alias_data(type_alias);
let trait_ = match type_alias.lookup(self.0.upcast()).container { let trait_ = match type_alias.lookup(self.0.upcast()).container {
AssocContainerId::TraitId(t) => t, AssocContainerId::TraitId(t) => t,
_ => panic!("associated type not in trait"), _ => panic!("associated type not in trait"),
}; };
let trait_data = self.0.trait_data(trait_); let trait_data = self.0.trait_data(trait_);
let params = alias.substitution.parameters(&Interner); let params = projection_ty.substitution.parameters(&Interner);
write!(fmt, "<{:?} as {}", &params[0], trait_data.name,)?; write!(fmt, "<{:?} as {}", &params[0], trait_data.name,)?;
if params.len() > 1 { if params.len() > 1 {
write!( write!(
@@ -145,6 +164,14 @@ impl DebugContext<'_> {
write!(fmt, ">::{}", type_alias_data.name) write!(fmt, ">::{}", type_alias_data.name)
} }
pub fn debug_opaque_ty(
&self,
opaque_ty: &chalk_ir::OpaqueTy<Interner>,
fmt: &mut fmt::Formatter<'_>,
) -> Result<(), fmt::Error> {
write!(fmt, "{:?}", opaque_ty.opaque_ty_id)
}
pub fn debug_ty( pub fn debug_ty(
&self, &self,
ty: &chalk_ir::Ty<Interner>, ty: &chalk_ir::Ty<Interner>,

View File

@@ -161,6 +161,12 @@ impl Completions {
name: Option<String>, name: Option<String>,
macro_: hir::MacroDef, macro_: hir::MacroDef,
) { ) {
// FIXME: Currently proc-macro do not have ast-node,
// such that it does not have source
if macro_.is_proc_macro() {
return;
}
let name = match name { let name = match name {
Some(it) => it, Some(it) => it,
None => return, None => return,

View File

@@ -96,7 +96,7 @@ fn try_extend_selection(
return Some(node.text_range()); return Some(node.text_range());
} }
let node = shallowest_node(&node.into()); let node = shallowest_node(&node);
if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) { if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) {
if let Some(range) = extend_list_item(&node) { if let Some(range) = extend_list_item(&node) {

View File

@@ -62,10 +62,9 @@ pub(crate) enum ReferenceResult {
impl ReferenceResult { impl ReferenceResult {
fn to_vec(self) -> Vec<NavigationTarget> { fn to_vec(self) -> Vec<NavigationTarget> {
use self::ReferenceResult::*;
match self { match self {
Exact(target) => vec![target], ReferenceResult::Exact(target) => vec![target],
Approximate(vec) => vec, ReferenceResult::Approximate(vec) => vec,
} }
} }
} }
@@ -74,8 +73,6 @@ pub(crate) fn reference_definition(
sema: &Semantics<RootDatabase>, sema: &Semantics<RootDatabase>,
name_ref: &ast::NameRef, name_ref: &ast::NameRef,
) -> ReferenceResult { ) -> ReferenceResult {
use self::ReferenceResult::*;
let name_kind = classify_name_ref(sema, name_ref); let name_kind = classify_name_ref(sema, name_ref);
if let Some(def) = name_kind { if let Some(def) = name_kind {
let def = def.definition(); let def = def.definition();
@@ -91,7 +88,7 @@ pub(crate) fn reference_definition(
.into_iter() .into_iter()
.map(|s| s.to_nav(sema.db)) .map(|s| s.to_nav(sema.db))
.collect(); .collect();
Approximate(navs) ReferenceResult::Approximate(navs)
} }
#[cfg(test)] #[cfg(test)]
@@ -398,6 +395,25 @@ mod tests {
); );
} }
#[test]
fn goto_def_for_record_pat_fields() {
covers!(ra_ide_db::goto_def_for_record_field_pats);
check_goto(
r"
//- /lib.rs
struct Foo {
spam: u32,
}
fn bar(foo: Foo) -> Foo {
let Foo { spam<|>: _, } = foo
}
",
"spam RECORD_FIELD_DEF FileId(1) [17; 26) [17; 21)",
"spam: u32|spam",
);
}
#[test] #[test]
fn goto_def_for_record_fields_macros() { fn goto_def_for_record_fields_macros() {
check_goto( check_goto(

View File

@@ -50,12 +50,12 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="keyword">fn</span> <span class="function declaration">main</span>() { <span class="keyword">fn</span> <span class="function declaration">main</span>() {
<span class="macro">println!</span>(<span class="string_literal">"Hello, {}!"</span>, <span class="numeric_literal">92</span>); <span class="macro">println!</span>(<span class="string_literal">"Hello, {}!"</span>, <span class="numeric_literal">92</span>);
<span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">vec</span> = Vec::new(); <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">vec</span> = <span class="unresolved_reference">Vec</span>::<span class="unresolved_reference">new</span>();
<span class="keyword control">if</span> <span class="keyword">true</span> { <span class="keyword control">if</span> <span class="keyword">true</span> {
<span class="keyword">let</span> <span class="variable declaration">x</span> = <span class="numeric_literal">92</span>; <span class="keyword">let</span> <span class="variable declaration">x</span> = <span class="numeric_literal">92</span>;
<span class="variable mutable">vec</span>.push(<span class="struct">Foo</span> { <span class="field">x</span>, <span class="field">y</span>: <span class="numeric_literal">1</span> }); <span class="variable mutable">vec</span>.<span class="unresolved_reference">push</span>(<span class="struct">Foo</span> { <span class="field">x</span>, <span class="field">y</span>: <span class="numeric_literal">1</span> });
} }
<span class="keyword unsafe">unsafe</span> { <span class="variable mutable">vec</span>.set_len(<span class="numeric_literal">0</span>); } <span class="keyword unsafe">unsafe</span> { <span class="variable mutable">vec</span>.<span class="unresolved_reference">set_len</span>(<span class="numeric_literal">0</span>); }
<span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">x</span> = <span class="numeric_literal">42</span>; <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">x</span> = <span class="numeric_literal">42</span>;
<span class="keyword">let</span> <span class="variable declaration mutable">y</span> = &<span class="keyword">mut</span> <span class="variable mutable">x</span>; <span class="keyword">let</span> <span class="variable declaration mutable">y</span> = &<span class="keyword">mut</span> <span class="variable mutable">x</span>;

View File

@@ -28,11 +28,11 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
</style> </style>
<pre><code><span class="keyword">fn</span> <span class="function declaration">main</span>() { <pre><code><span class="keyword">fn</span> <span class="function declaration">main</span>() {
<span class="keyword">let</span> <span class="variable declaration" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span> = <span class="string_literal">"hello"</span>; <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span> = <span class="string_literal">"hello"</span>;
<span class="keyword">let</span> <span class="variable declaration" data-binding-hash="2705725358298919760" style="color: hsl(17,51%,74%);">x</span> = <span class="variable" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span>.to_string(); <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="2705725358298919760" style="color: hsl(17,51%,74%);">x</span> = <span class="variable" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span>.<span class="unresolved_reference">to_string</span>();
<span class="keyword">let</span> <span class="variable declaration" data-binding-hash="3365759661443752373" style="color: hsl(127,76%,66%);">y</span> = <span class="variable" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span>.to_string(); <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="3365759661443752373" style="color: hsl(127,76%,66%);">y</span> = <span class="variable" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span>.<span class="unresolved_reference">to_string</span>();
<span class="keyword">let</span> <span class="variable declaration" data-binding-hash="794745962933817518" style="color: hsl(19,74%,76%);">x</span> = <span class="string_literal">"other color please!"</span>; <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="794745962933817518" style="color: hsl(19,74%,76%);">x</span> = <span class="string_literal">"other color please!"</span>;
<span class="keyword">let</span> <span class="variable declaration" data-binding-hash="6717528807933952652" style="color: hsl(85,49%,84%);">y</span> = <span class="variable" data-binding-hash="794745962933817518" style="color: hsl(19,74%,76%);">x</span>.to_string(); <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="6717528807933952652" style="color: hsl(85,49%,84%);">y</span> = <span class="variable" data-binding-hash="794745962933817518" style="color: hsl(19,74%,76%);">x</span>.<span class="unresolved_reference">to_string</span>();
} }
<span class="keyword">fn</span> <span class="function declaration">bar</span>() { <span class="keyword">fn</span> <span class="function declaration">bar</span>() {

View File

@@ -239,9 +239,8 @@ fn highlight_element(
NAME_REF if element.ancestors().any(|it| it.kind() == ATTR) => return None, NAME_REF if element.ancestors().any(|it| it.kind() == ATTR) => return None,
NAME_REF => { NAME_REF => {
let name_ref = element.into_node().and_then(ast::NameRef::cast).unwrap(); let name_ref = element.into_node().and_then(ast::NameRef::cast).unwrap();
let name_kind = classify_name_ref(sema, &name_ref)?; match classify_name_ref(sema, &name_ref) {
Some(name_kind) => match name_kind {
match name_kind {
NameRefClass::Definition(def) => { NameRefClass::Definition(def) => {
if let Definition::Local(local) = &def { if let Definition::Local(local) = &def {
if let Some(name) = local.name(db) { if let Some(name) = local.name(db) {
@@ -253,6 +252,8 @@ fn highlight_element(
highlight_name(db, def) highlight_name(db, def)
} }
NameRefClass::FieldShorthand { .. } => HighlightTag::Field.into(), NameRefClass::FieldShorthand { .. } => HighlightTag::Field.into(),
},
None => HighlightTag::UnresolvedReference.into(),
} }
} }

View File

@@ -38,6 +38,7 @@ pub enum HighlightTag {
TypeParam, TypeParam,
Union, Union,
Local, Local,
UnresolvedReference,
} }
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
@@ -79,6 +80,7 @@ impl HighlightTag {
HighlightTag::TypeParam => "type_param", HighlightTag::TypeParam => "type_param",
HighlightTag::Union => "union", HighlightTag::Union => "union",
HighlightTag::Local => "variable", HighlightTag::Local => "variable",
HighlightTag::UnresolvedReference => "unresolved_reference",
} }
} }
} }

View File

@@ -180,6 +180,7 @@ fn classify_name_inner(sema: &Semantics<RootDatabase>, name: &ast::Name) -> Opti
} }
} }
#[derive(Debug)]
pub enum NameRefClass { pub enum NameRefClass {
Definition(Definition), Definition(Definition),
FieldShorthand { local: Local, field: Definition }, FieldShorthand { local: Local, field: Definition },
@@ -229,6 +230,14 @@ pub fn classify_name_ref(
} }
} }
if let Some(record_field_pat) = ast::RecordFieldPat::cast(parent.clone()) {
tested_by!(goto_def_for_record_field_pats; force);
if let Some(field) = sema.resolve_record_field_pat(&record_field_pat) {
let field = Definition::StructField(field);
return Some(NameRefClass::Definition(field));
}
}
if let Some(macro_call) = parent.ancestors().find_map(ast::MacroCall::cast) { if let Some(macro_call) = parent.ancestors().find_map(ast::MacroCall::cast) {
tested_by!(goto_def_for_macros; force); tested_by!(goto_def_for_macros; force);
if let Some(macro_def) = sema.resolve_macro_call(&macro_call) { if let Some(macro_def) = sema.resolve_macro_call(&macro_call) {

View File

@@ -6,5 +6,6 @@ test_utils::marks![
goto_def_for_fields goto_def_for_fields
goto_def_for_record_fields goto_def_for_record_fields
goto_def_for_field_init_shorthand goto_def_for_field_init_shorthand
goto_def_for_record_field_pats
search_filters_by_range search_filters_by_range
]; ];

View File

@@ -187,7 +187,11 @@ impl<'a> TtIter<'a> {
_ => false, _ => false,
}, },
Separator::Literal(lhs) => match fork.expect_literal() { Separator::Literal(lhs) => match fork.expect_literal() {
Ok(rhs) => rhs.text == lhs.text, Ok(rhs) => match rhs {
tt::Leaf::Literal(rhs) => rhs.text == lhs.text,
tt::Leaf::Ident(rhs) => rhs.text == lhs.text,
tt::Leaf::Punct(_) => false,
},
_ => false, _ => false,
}, },
Separator::Puncts(lhss) => lhss.iter().all(|lhs| match fork.expect_punct() { Separator::Puncts(lhss) => lhss.iter().all(|lhs| match fork.expect_punct() {
@@ -202,6 +206,13 @@ impl<'a> TtIter<'a> {
} }
pub(crate) fn expect_tt(&mut self) -> Result<tt::TokenTree, ()> { pub(crate) fn expect_tt(&mut self) -> Result<tt::TokenTree, ()> {
match self.peek_n(0) {
Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) if punct.char == '\'' => {
return self.expect_lifetime();
}
_ => (),
}
let tt = self.next().ok_or_else(|| ())?.clone(); let tt = self.next().ok_or_else(|| ())?.clone();
let punct = match tt { let punct = match tt {
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if punct.spacing == tt::Spacing::Joint => { tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if punct.spacing == tt::Spacing::Joint => {
@@ -255,13 +266,21 @@ impl<'a> TtIter<'a> {
} }
} }
pub(crate) fn expect_lifetime(&mut self) -> Result<&tt::Ident, ()> { pub(crate) fn expect_lifetime(&mut self) -> Result<tt::TokenTree, ()> {
let ident = self.expect_ident()?; let punct = self.expect_punct()?;
// check if it start from "`" if punct.char != '\'' {
if !ident.text.starts_with('\'') {
return Err(()); return Err(());
} }
Ok(ident) let ident = self.expect_ident()?;
Ok(tt::Subtree {
delimiter: None,
token_trees: vec![
tt::Leaf::Punct(punct.clone()).into(),
tt::Leaf::Ident(ident.clone()).into(),
],
}
.into())
} }
pub(crate) fn expect_fragment( pub(crate) fn expect_fragment(
@@ -274,7 +293,10 @@ impl<'a> TtIter<'a> {
} }
impl<'a> TreeSink for OffsetTokenSink<'a> { impl<'a> TreeSink for OffsetTokenSink<'a> {
fn token(&mut self, _kind: SyntaxKind, n_tokens: u8) { fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) {
if kind == SyntaxKind::LIFETIME {
n_tokens = 2;
}
for _ in 0..n_tokens { for _ in 0..n_tokens {
self.cursor = self.cursor.bump_subtree(); self.cursor = self.cursor.bump_subtree();
} }
@@ -286,7 +308,7 @@ impl<'a> TtIter<'a> {
} }
} }
let buffer = TokenBuffer::new(self.inner.as_slice()); let buffer = TokenBuffer::new(&self.inner.as_slice());
let mut src = SubtreeTokenSource::new(&buffer); let mut src = SubtreeTokenSource::new(&buffer);
let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false }; let mut sink = OffsetTokenSink { cursor: buffer.begin(), error: false };
@@ -422,7 +444,7 @@ fn match_meta_var(kind: &str, input: &mut TtIter) -> ExpandResult<Option<Fragmen
"tt" => input.expect_tt().map(Some).map_err(|()| err!()), "tt" => input.expect_tt().map(Some).map_err(|()| err!()),
"lifetime" => input "lifetime" => input
.expect_lifetime() .expect_lifetime()
.map(|ident| Some(tt::Leaf::Ident(ident.clone()).into())) .map(|tt| Some(tt))
.map_err(|()| err!("expected lifetime")), .map_err(|()| err!("expected lifetime")),
"literal" => input "literal" => input
.expect_literal() .expect_literal()

View File

@@ -50,6 +50,26 @@ impl<'a> SubtreeTokenSource<'a> {
} }
fn get(&self, pos: usize) -> Ref<Option<TtToken>> { fn get(&self, pos: usize) -> Ref<Option<TtToken>> {
fn is_lifetime(c: Cursor) -> Option<(Cursor, SmolStr)> {
let tkn = c.token_tree();
if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = tkn {
if punct.char == '\'' {
let next = c.bump();
if let Some(tt::TokenTree::Leaf(tt::Leaf::Ident(ident))) = next.token_tree() {
let res_cursor = next.bump();
let text = SmolStr::new("'".to_string() + &ident.to_string());
return Some((res_cursor, text));
} else {
panic!("Next token must be ident : {:#?}", next.token_tree());
}
}
}
None
}
if pos < self.cached.borrow().len() { if pos < self.cached.borrow().len() {
return Ref::map(self.cached.borrow(), |c| &c[pos]); return Ref::map(self.cached.borrow(), |c| &c[pos]);
} }
@@ -63,6 +83,12 @@ impl<'a> SubtreeTokenSource<'a> {
continue; continue;
} }
if let Some((curr, text)) = is_lifetime(cursor) {
cached.push(Some(TtToken { kind: LIFETIME, is_joint_to_next: false, text }));
self.cached_cursor.set(curr);
continue;
}
match cursor.token_tree() { match cursor.token_tree() {
Some(tt::TokenTree::Leaf(leaf)) => { Some(tt::TokenTree::Leaf(leaf)) => {
cached.push(Some(convert_leaf(&leaf))); cached.push(Some(convert_leaf(&leaf)));
@@ -132,27 +158,28 @@ fn convert_literal(l: &tt::Literal) -> TtToken {
let kind = lex_single_syntax_kind(&l.text) let kind = lex_single_syntax_kind(&l.text)
.map(|(kind, _error)| kind) .map(|(kind, _error)| kind)
.filter(|kind| kind.is_literal()) .filter(|kind| kind.is_literal())
.unwrap_or_else(|| match l.text.as_ref() { .unwrap_or_else(|| panic!("Fail to convert given literal {:#?}", &l));
"true" => T![true],
"false" => T![false],
_ => panic!("Fail to convert given literal {:#?}", &l),
});
TtToken { kind, is_joint_to_next: false, text: l.text.clone() } TtToken { kind, is_joint_to_next: false, text: l.text.clone() }
} }
fn convert_ident(ident: &tt::Ident) -> TtToken { fn convert_ident(ident: &tt::Ident) -> TtToken {
let kind = if ident.text.starts_with('\'') { let kind = match ident.text.as_ref() {
LIFETIME "true" => T![true],
} else { "false" => T![false],
SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT) i if i.starts_with('\'') => LIFETIME,
_ => SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT),
}; };
TtToken { kind, is_joint_to_next: false, text: ident.text.clone() } TtToken { kind, is_joint_to_next: false, text: ident.text.clone() }
} }
fn convert_punct(p: tt::Punct) -> TtToken { fn convert_punct(p: tt::Punct) -> TtToken {
let kind = SyntaxKind::from_char(p.char).unwrap(); let kind = match SyntaxKind::from_char(p.char) {
None => panic!("{:#?} is not a valid punct", p),
Some(kind) => kind,
};
let text = { let text = {
let mut buf = [0u8; 4]; let mut buf = [0u8; 4];
let s: &str = p.char.encode_utf8(&mut buf); let s: &str = p.char.encode_utf8(&mut buf);

View File

@@ -271,7 +271,7 @@ struct RawConvertor<'a> {
inner: std::slice::Iter<'a, RawToken>, inner: std::slice::Iter<'a, RawToken>,
} }
trait SrcToken { trait SrcToken: std::fmt::Debug {
fn kind(&self) -> SyntaxKind; fn kind(&self) -> SyntaxKind;
fn to_char(&self) -> Option<char>; fn to_char(&self) -> Option<char>;
@@ -361,8 +361,12 @@ trait TokenConvertor {
Some(next) if next.kind().is_punct() => tt::Spacing::Joint, Some(next) if next.kind().is_punct() => tt::Spacing::Joint,
_ => tt::Spacing::Alone, _ => tt::Spacing::Alone,
}; };
let char = token.to_char().expect("Token from lexer must be single char"); let char = match token.to_char() {
Some(c) => c,
None => {
panic!("Token from lexer must be single char: token = {:#?}", token);
}
};
tt::Leaf::from(tt::Punct { char, spacing, id: self.id_alloc().alloc(range) }).into() tt::Leaf::from(tt::Punct { char, spacing, id: self.id_alloc().alloc(range) }).into()
} }
} else { } else {
@@ -372,10 +376,29 @@ trait TokenConvertor {
}; };
} }
let leaf: tt::Leaf = match k { let leaf: tt::Leaf = match k {
T![true] | T![false] => make_leaf!(Literal), T![true] | T![false] => make_leaf!(Ident),
IDENT | LIFETIME => make_leaf!(Ident), IDENT => make_leaf!(Ident),
k if k.is_keyword() => make_leaf!(Ident), k if k.is_keyword() => make_leaf!(Ident),
k if k.is_literal() => make_leaf!(Literal), k if k.is_literal() => make_leaf!(Literal),
LIFETIME => {
let char_unit = TextUnit::from_usize(1);
let r = TextRange::offset_len(range.start(), char_unit);
let apostrophe = tt::Leaf::from(tt::Punct {
char: '\'',
spacing: tt::Spacing::Joint,
id: self.id_alloc().alloc(r),
});
result.push(apostrophe.into());
let r =
TextRange::offset_len(range.start() + char_unit, range.len() - char_unit);
let ident = tt::Leaf::from(tt::Ident {
text: SmolStr::new(&token.to_text()[1..]),
id: self.id_alloc().alloc(r),
});
result.push(ident.into());
return;
}
_ => return, _ => return,
}; };
@@ -455,6 +478,7 @@ impl Convertor {
} }
} }
#[derive(Debug)]
enum SynToken { enum SynToken {
Ordiniary(SyntaxToken), Ordiniary(SyntaxToken),
Punch(SyntaxToken, TextUnit), Punch(SyntaxToken, TextUnit),
@@ -592,11 +616,14 @@ fn delim_to_str(d: Option<tt::DelimiterKind>, closing: bool) -> SmolStr {
} }
impl<'a> TreeSink for TtTreeSink<'a> { impl<'a> TreeSink for TtTreeSink<'a> {
fn token(&mut self, kind: SyntaxKind, n_tokens: u8) { fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) {
if kind == L_DOLLAR || kind == R_DOLLAR { if kind == L_DOLLAR || kind == R_DOLLAR {
self.cursor = self.cursor.bump_subtree(); self.cursor = self.cursor.bump_subtree();
return; return;
} }
if kind == LIFETIME {
n_tokens = 2;
}
let mut last = self.cursor; let mut last = self.cursor;
for _ in 0..n_tokens { for _ in 0..n_tokens {

View File

@@ -214,6 +214,33 @@ SUBTREE $
); );
} }
#[test]
fn test_lifetime_split() {
parse_macro(
r#"
macro_rules! foo {
($($t:tt)*) => { $($t)*}
}
"#,
)
.assert_expand(
r#"foo!(static bar: &'static str = "hello";);"#,
r#"
SUBTREE $
IDENT static 17
IDENT bar 18
PUNCH : [alone] 19
PUNCH & [alone] 20
PUNCH ' [joint] 21
IDENT static 22
IDENT str 23
PUNCH = [alone] 24
LITERAL "hello" 25
PUNCH ; [joint] 26
"#,
);
}
#[test] #[test]
fn test_expr_order() { fn test_expr_order() {
let expanded = parse_macro( let expanded = parse_macro(
@@ -988,6 +1015,36 @@ fn test_literal() {
.assert_expand_items(r#"foo!(u8 0);"#, r#"const VALUE : u8 = 0 ;"#); .assert_expand_items(r#"foo!(u8 0);"#, r#"const VALUE : u8 = 0 ;"#);
} }
#[test]
fn test_boolean_is_ident() {
parse_macro(
r#"
macro_rules! foo {
($lit0:literal, $lit1:literal) => { const VALUE: (bool,bool) = ($lit0,$lit1); };
}
"#,
)
.assert_expand(
r#"foo!(true,false);"#,
r#"
SUBTREE $
IDENT const 14
IDENT VALUE 15
PUNCH : [alone] 16
SUBTREE () 17
IDENT bool 18
PUNCH , [alone] 19
IDENT bool 20
PUNCH = [alone] 21
SUBTREE () 22
IDENT true 29
PUNCH , [joint] 25
IDENT false 31
PUNCH ; [alone] 28
"#,
);
}
#[test] #[test]
fn test_vis() { fn test_vis() {
parse_macro( parse_macro(

View File

@@ -40,9 +40,11 @@ impl<'a> TtIter<'a> {
} }
} }
pub(crate) fn expect_literal(&mut self) -> Result<&'a tt::Literal, ()> { pub(crate) fn expect_literal(&mut self) -> Result<&'a tt::Leaf, ()> {
match self.expect_leaf()? { let it = self.expect_leaf()?;
tt::Leaf::Literal(it) => Ok(it), match it {
tt::Leaf::Literal(_) => Ok(it),
tt::Leaf::Ident(ident) if ident.text == "true" || ident.text == "false" => Ok(it),
_ => Err(()), _ => Err(()),
} }
} }

View File

@@ -13,6 +13,7 @@ use process::{ProcMacroProcessSrv, ProcMacroProcessThread};
use ra_tt::{SmolStr, Subtree}; use ra_tt::{SmolStr, Subtree};
use std::{ use std::{
ffi::OsStr, ffi::OsStr,
io,
path::{Path, PathBuf}, path::{Path, PathBuf},
sync::Arc, sync::Arc,
}; };
@@ -57,14 +58,10 @@ pub struct ProcMacroClient {
} }
impl ProcMacroClient { impl ProcMacroClient {
pub fn extern_process<I, S>( pub fn extern_process(
process_path: &Path, process_path: PathBuf,
args: I, args: impl IntoIterator<Item = impl AsRef<OsStr>>,
) -> Result<ProcMacroClient, std::io::Error> ) -> io::Result<ProcMacroClient> {
where
I: IntoIterator<Item = S>,
S: AsRef<OsStr>,
{
let (thread, process) = ProcMacroProcessSrv::run(process_path, args)?; let (thread, process) = ProcMacroProcessSrv::run(process_path, args)?;
Ok(ProcMacroClient { Ok(ProcMacroClient {
kind: ProcMacroClientKind::Process { process: Arc::new(process), thread }, kind: ProcMacroClientKind::Process { process: Arc::new(process), thread },
@@ -84,7 +81,7 @@ impl ProcMacroClient {
ProcMacroClientKind::Process { process, .. } => { ProcMacroClientKind::Process { process, .. } => {
let macros = match process.find_proc_macros(dylib_path) { let macros = match process.find_proc_macros(dylib_path) {
Err(err) => { Err(err) => {
eprintln!("Fail to find proc macro. Error: {:#?}", err); eprintln!("Failed to find proc macros. Error: {:#?}", err);
return vec![]; return vec![];
} }
Ok(macros) => macros, Ok(macros) => macros,

View File

@@ -1,4 +1,4 @@
//! Defines messages for cross-process message based on `ndjson` wire protocol //! Defines messages for cross-process message passing based on `ndjson` wire protocol
use std::{ use std::{
convert::TryFrom, convert::TryFrom,
@@ -31,7 +31,7 @@ macro_rules! impl_try_from_response {
fn try_from(value: Response) -> Result<Self, Self::Error> { fn try_from(value: Response) -> Result<Self, Self::Error> {
match value { match value {
Response::$tag(res) => Ok(res), Response::$tag(res) => Ok(res),
_ => Err("Fail to convert from response"), _ => Err(concat!("Failed to convert response to ", stringify!($tag))),
} }
} }
} }
@@ -53,18 +53,16 @@ pub enum ErrorCode {
ExpansionError, ExpansionError,
} }
pub trait Message: Sized + Serialize + DeserializeOwned { pub trait Message: Serialize + DeserializeOwned {
fn read(r: &mut impl BufRead) -> io::Result<Option<Self>> { fn read(inp: &mut impl BufRead) -> io::Result<Option<Self>> {
let text = match read_json(r)? { Ok(match read_json(inp)? {
None => return Ok(None), None => None,
Some(text) => text, Some(text) => Some(serde_json::from_str(&text)?),
}; })
let msg = serde_json::from_str(&text)?;
Ok(Some(msg))
} }
fn write(self, w: &mut impl Write) -> io::Result<()> { fn write(self, out: &mut impl Write) -> io::Result<()> {
let text = serde_json::to_string(&self)?; let text = serde_json::to_string(&self)?;
write_json(w, &text) write_json(out, &text)
} }
} }
@@ -73,15 +71,12 @@ impl Message for Response {}
fn read_json(inp: &mut impl BufRead) -> io::Result<Option<String>> { fn read_json(inp: &mut impl BufRead) -> io::Result<Option<String>> {
let mut buf = String::new(); let mut buf = String::new();
if inp.read_line(&mut buf)? == 0 { inp.read_line(&mut buf)?;
return Ok(None); buf.pop(); // Remove traling '\n'
} Ok(match buf.len() {
// Remove ending '\n' 0 => None,
let buf = &buf[..buf.len() - 1]; _ => Some(buf),
if buf.is_empty() { })
return Ok(None);
}
Ok(Some(buf.to_string()))
} }
fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> { fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> {

View File

@@ -9,7 +9,7 @@ use crate::rpc::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTas
use io::{BufRead, BufReader}; use io::{BufRead, BufReader};
use std::{ use std::{
convert::{TryFrom, TryInto}, convert::{TryFrom, TryInto},
ffi::OsStr, ffi::{OsStr, OsString},
io::{self, Write}, io::{self, Write},
path::{Path, PathBuf}, path::{Path, PathBuf},
process::{Child, Command, Stdio}, process::{Child, Command, Stdio},
@@ -28,66 +28,11 @@ pub(crate) struct ProcMacroProcessThread {
handle: jod_thread::JoinHandle<()>, handle: jod_thread::JoinHandle<()>,
} }
struct Task {
req: Request,
result_tx: Sender<Option<Response>>,
}
struct Process {
path: PathBuf,
child: Child,
}
impl Drop for Process {
fn drop(&mut self) {
let _ = self.child.kill();
}
}
impl Process {
fn run<I, S>(process_path: &Path, args: I) -> Result<Process, io::Error>
where
I: IntoIterator<Item = S>,
S: AsRef<OsStr>,
{
let child = Command::new(process_path.clone())
.args(args)
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.stderr(Stdio::null())
.spawn()?;
Ok(Process { path: process_path.into(), child })
}
fn restart(&mut self) -> Result<(), io::Error> {
let _ = self.child.kill();
self.child = Command::new(self.path.clone())
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.stderr(Stdio::null())
.spawn()?;
Ok(())
}
fn stdio(&mut self) -> Option<(impl Write, impl BufRead)> {
let stdin = self.child.stdin.take()?;
let stdout = self.child.stdout.take()?;
let read = BufReader::new(stdout);
Some((stdin, read))
}
}
impl ProcMacroProcessSrv { impl ProcMacroProcessSrv {
pub fn run<I, S>( pub fn run(
process_path: &Path, process_path: PathBuf,
args: I, args: impl IntoIterator<Item = impl AsRef<OsStr>>,
) -> Result<(ProcMacroProcessThread, ProcMacroProcessSrv), io::Error> ) -> io::Result<(ProcMacroProcessThread, ProcMacroProcessSrv)> {
where
I: IntoIterator<Item = S>,
S: AsRef<OsStr>,
{
let process = Process::run(process_path, args)?; let process = Process::run(process_path, args)?;
let (task_tx, task_rx) = bounded(0); let (task_tx, task_rx) = bounded(0);
@@ -197,11 +142,62 @@ fn client_loop(task_rx: Receiver<Task>, mut process: Process) {
} }
} }
struct Task {
req: Request,
result_tx: Sender<Option<Response>>,
}
struct Process {
path: PathBuf,
args: Vec<OsString>,
child: Child,
}
impl Drop for Process {
fn drop(&mut self) {
let _ = self.child.kill();
}
}
impl Process {
fn run(
path: PathBuf,
args: impl IntoIterator<Item = impl AsRef<OsStr>>,
) -> io::Result<Process> {
let args = args.into_iter().map(|s| s.as_ref().into()).collect();
let child = mk_child(&path, &args)?;
Ok(Process { path, args, child })
}
fn restart(&mut self) -> io::Result<()> {
let _ = self.child.kill();
self.child = mk_child(&self.path, &self.args)?;
Ok(())
}
fn stdio(&mut self) -> Option<(impl Write, impl BufRead)> {
let stdin = self.child.stdin.take()?;
let stdout = self.child.stdout.take()?;
let read = BufReader::new(stdout);
Some((stdin, read))
}
}
fn mk_child(path: &Path, args: impl IntoIterator<Item = impl AsRef<OsStr>>) -> io::Result<Child> {
Command::new(&path)
.args(args)
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.stderr(Stdio::null())
.spawn()
}
fn send_request( fn send_request(
mut writer: &mut impl Write, mut writer: &mut impl Write,
mut reader: &mut impl BufRead, mut reader: &mut impl BufRead,
req: Request, req: Request,
) -> Result<Option<Response>, io::Error> { ) -> io::Result<Option<Response>> {
req.write(&mut writer)?; req.write(&mut writer)?;
Ok(Response::read(&mut reader)?) Ok(Response::read(&mut reader)?)
} }

View File

@@ -1,9 +1,9 @@
//! Data struture serialization related stuffs for RPC //! Data struture serialization related stuff for RPC
//! //!
//! Define all necessary rpc serialization data structure, //! Defines all necessary rpc serialization data structures,
//! which include ra_tt related data and some task messages. //! which includes `ra_tt` related data and some task messages.
//! Although adding Serialize and Deserialize trait to ra_tt directly seem to be much easier, //! Although adding `Serialize` and `Deserialize` traits to `ra_tt` directly seems
//! we deliberately duplicate the ra_tt struct with #[serde(with = "XXDef")] //! to be much easier, we deliberately duplicate `ra_tt` structs with `#[serde(with = "XXDef")]`
//! for separation of code responsibility. //! for separation of code responsibility.
use ra_tt::{ use ra_tt::{
@@ -34,15 +34,15 @@ pub struct ListMacrosResult {
pub struct ExpansionTask { pub struct ExpansionTask {
/// Argument of macro call. /// Argument of macro call.
/// ///
/// In custom derive that would be a struct or enum; in attribute-like macro - underlying /// In custom derive this will be a struct or enum; in attribute-like macro - underlying
/// item; in function-like macro - the macro body. /// item; in function-like macro - the macro body.
#[serde(with = "SubtreeDef")] #[serde(with = "SubtreeDef")]
pub macro_body: Subtree, pub macro_body: Subtree,
/// Names of macros to expand. /// Name of macro to expand.
/// ///
/// In custom derive those are names of derived traits (`Serialize`, `Getters`, etc.). In /// In custom derive this is the name of the derived trait (`Serialize`, `Getters`, etc.).
/// attribute-like and functiona-like macros - single name of macro itself (`show_streams`). /// In attribute-like and function-like macros - single name of macro itself (`show_streams`).
pub macro_name: String, pub macro_name: String,
/// Possible attributes for the attribute-like macros. /// Possible attributes for the attribute-like macros.

View File

@@ -2,55 +2,43 @@
use crate::{expand_task, list_macros}; use crate::{expand_task, list_macros};
use ra_proc_macro::msg::{self, Message}; use ra_proc_macro::msg::{self, Message};
use std::io; use std::io;
fn read_request() -> Result<Option<msg::Request>, io::Error> {
let stdin = io::stdin();
let mut stdin = stdin.lock();
msg::Request::read(&mut stdin)
}
fn write_response(res: Result<msg::Response, String>) -> Result<(), io::Error> {
let msg: msg::Response = match res {
Ok(res) => res,
Err(err) => msg::Response::Error(msg::ResponseError {
code: msg::ErrorCode::ExpansionError,
message: err,
}),
};
let stdout = io::stdout();
let mut stdout = stdout.lock();
msg.write(&mut stdout)
}
pub fn run() { pub fn run() {
loop { loop {
let req = match read_request() { let req = match read_request() {
Err(err) => { Err(err) => {
eprintln!("Read message error on ra_proc_macro_srv: {}", err.to_string()); eprintln!("Read message error on ra_proc_macro_srv: {}", err);
continue; continue;
} }
Ok(None) => continue, Ok(None) => continue,
Ok(Some(req)) => req, Ok(Some(req)) => req,
}; };
match req { let res = match req {
msg::Request::ListMacro(task) => { msg::Request::ListMacro(task) => Ok(msg::Response::ListMacro(list_macros(&task))),
if let Err(err) =
write_response(list_macros(&task).map(|it| msg::Response::ListMacro(it)))
{
eprintln!("Write message error on list macro: {}", err);
}
}
msg::Request::ExpansionMacro(task) => { msg::Request::ExpansionMacro(task) => {
if let Err(err) = expand_task(&task).map(msg::Response::ExpansionMacro)
write_response(expand_task(&task).map(|it| msg::Response::ExpansionMacro(it)))
{
eprintln!("Write message error on expansion macro: {}", err);
}
} }
};
let msg = res.unwrap_or_else(|err| {
msg::Response::Error(msg::ResponseError {
code: msg::ErrorCode::ExpansionError,
message: err,
})
});
if let Err(err) = write_response(msg) {
eprintln!("Write message error: {}", err);
} }
} }
} }
fn read_request() -> io::Result<Option<msg::Request>> {
msg::Request::read(&mut io::stdin().lock())
}
fn write_response(msg: msg::Response) -> io::Result<()> {
msg.write(&mut io::stdout().lock())
}

View File

@@ -9,43 +9,37 @@ use libloading::Library;
use memmap::Mmap; use memmap::Mmap;
use ra_proc_macro::ProcMacroKind; use ra_proc_macro::ProcMacroKind;
use std::io::Error as IoError; use std::io;
use std::io::ErrorKind as IoErrorKind;
const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_"; const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_";
fn invalid_data_err(e: impl Into<Box<dyn std::error::Error + Send + Sync>>) -> IoError { fn invalid_data_err(e: impl Into<Box<dyn std::error::Error + Send + Sync>>) -> io::Error {
IoError::new(IoErrorKind::InvalidData, e) io::Error::new(io::ErrorKind::InvalidData, e)
} }
fn is_derive_registrar_symbol(symbol: &str) -> bool { fn is_derive_registrar_symbol(symbol: &str) -> bool {
symbol.contains(NEW_REGISTRAR_SYMBOL) symbol.contains(NEW_REGISTRAR_SYMBOL)
} }
fn find_registrar_symbol(file: &Path) -> Result<Option<String>, IoError> { fn find_registrar_symbol(file: &Path) -> io::Result<Option<String>> {
let file = File::open(file)?; let file = File::open(file)?;
let buffer = unsafe { Mmap::map(&file)? }; let buffer = unsafe { Mmap::map(&file)? };
let object = Object::parse(&buffer).map_err(invalid_data_err)?; let object = Object::parse(&buffer).map_err(invalid_data_err)?;
match object { let name = match object {
Object::Elf(elf) => { Object::Elf(elf) => {
let symbols = elf.dynstrtab.to_vec().map_err(invalid_data_err)?; let symbols = elf.dynstrtab.to_vec().map_err(invalid_data_err)?;
let name = symbols.into_iter().find(|s| is_derive_registrar_symbol(s)).map(&str::to_owned)
symbols.iter().find(|s| is_derive_registrar_symbol(s)).map(|s| s.to_string());
Ok(name)
} }
Object::PE(pe) => { Object::PE(pe) => pe
let name = pe
.exports .exports
.iter() .iter()
.flat_map(|s| s.name) .flat_map(|s| s.name)
.find(|s| is_derive_registrar_symbol(s)) .find(|s| is_derive_registrar_symbol(s))
.map(|s| s.to_string()); .map(&str::to_owned),
Ok(name)
}
Object::Mach(Mach::Binary(binary)) => { Object::Mach(Mach::Binary(binary)) => {
let exports = binary.exports().map_err(invalid_data_err)?; let exports = binary.exports().map_err(invalid_data_err)?;
let name = exports exports
.iter() .iter()
.map(|s| { .map(|s| {
// In macos doc: // In macos doc:
@@ -59,11 +53,11 @@ fn find_registrar_symbol(file: &Path) -> Result<Option<String>, IoError> {
} }
}) })
.find(|s| is_derive_registrar_symbol(s)) .find(|s| is_derive_registrar_symbol(s))
.map(|s| s.to_string()); .map(&str::to_owned)
Ok(name)
}
_ => Ok(None),
} }
_ => return Ok(None),
};
return Ok(name);
} }
/// Loads dynamic library in platform dependent manner. /// Loads dynamic library in platform dependent manner.
@@ -93,15 +87,16 @@ fn load_library(file: &Path) -> Result<Library, libloading::Error> {
} }
struct ProcMacroLibraryLibloading { struct ProcMacroLibraryLibloading {
// Hold the dylib to prevent it for unloadeding // Hold the dylib to prevent it from unloading
_lib: Library, _lib: Library,
exported_macros: Vec<bridge::client::ProcMacro>, exported_macros: Vec<bridge::client::ProcMacro>,
} }
impl ProcMacroLibraryLibloading { impl ProcMacroLibraryLibloading {
fn open(file: &Path) -> Result<Self, IoError> { fn open(file: &Path) -> io::Result<Self> {
let symbol_name = find_registrar_symbol(file)? let symbol_name = find_registrar_symbol(file)?.ok_or_else(|| {
.ok_or(invalid_data_err(format!("Cannot find registrar symbol in file {:?}", file)))?; invalid_data_err(format!("Cannot find registrar symbol in file {}", file.display()))
})?;
let lib = load_library(file).map_err(invalid_data_err)?; let lib = load_library(file).map_err(invalid_data_err)?;
let exported_macros = { let exported_macros = {
@@ -121,18 +116,16 @@ pub struct Expander {
} }
impl Expander { impl Expander {
pub fn new<P: AsRef<Path>>(lib: &P) -> Result<Expander, String> { pub fn new(lib: &Path) -> Result<Expander, String> {
let mut libs = vec![]; // Some libraries for dynamic loading require canonicalized path even when it is
/* Some libraries for dynamic loading require canonicalized path (even when it is // already absolute
already absolute let lib = lib
*/ .canonicalize()
let lib = .unwrap_or_else(|err| panic!("Cannot canonicalize {}: {:?}", lib.display(), err));
lib.as_ref().canonicalize().expect(&format!("Cannot canonicalize {:?}", lib.as_ref()));
let library = ProcMacroLibraryImpl::open(&lib).map_err(|e| e.to_string())?; let library = ProcMacroLibraryImpl::open(&lib).map_err(|e| e.to_string())?;
libs.push(library);
Ok(Expander { libs }) Ok(Expander { libs: vec![library] })
} }
pub fn expand( pub fn expand(
@@ -176,7 +169,6 @@ impl Expander {
parsed_attributes, parsed_attributes,
parsed_body, parsed_body,
); );
return res.map(|it| it.subtree); return res.map(|it| it.subtree);
} }
_ => continue, _ => continue,
@@ -187,12 +179,11 @@ impl Expander {
Err(bridge::PanicMessage::String("Nothing to expand".to_string())) Err(bridge::PanicMessage::String("Nothing to expand".to_string()))
} }
pub fn list_macros(&self) -> Result<Vec<(String, ProcMacroKind)>, bridge::PanicMessage> { pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
let mut result = vec![]; self.libs
.iter()
for lib in &self.libs { .flat_map(|it| &it.exported_macros)
for proc_macro in &lib.exported_macros { .map(|proc_macro| match proc_macro {
let res = match proc_macro {
bridge::client::ProcMacro::CustomDerive { trait_name, .. } => { bridge::client::ProcMacro::CustomDerive { trait_name, .. } => {
(trait_name.to_string(), ProcMacroKind::CustomDerive) (trait_name.to_string(), ProcMacroKind::CustomDerive)
} }
@@ -202,11 +193,7 @@ impl Expander {
bridge::client::ProcMacro::Attr { name, .. } => { bridge::client::ProcMacro::Attr { name, .. } => {
(name.to_string(), ProcMacroKind::Attr) (name.to_string(), ProcMacroKind::Attr)
} }
}; })
result.push(res); .collect()
}
}
Ok(result)
} }
} }

View File

@@ -3,10 +3,10 @@
//! This library is able to call compiled Rust custom derive dynamic libraries on arbitrary code. //! This library is able to call compiled Rust custom derive dynamic libraries on arbitrary code.
//! The general idea here is based on https://github.com/fedochet/rust-proc-macro-expander. //! The general idea here is based on https://github.com/fedochet/rust-proc-macro-expander.
//! //!
//! But we change some several design for fitting RA needs: //! But we adapt it to better fit RA needs:
//! //!
//! * We use `ra_tt` for proc-macro `TokenStream` server, it is easy to manipute and interact with //! * We use `ra_tt` for proc-macro `TokenStream` server, it is easier to manipulate and interact with
//! RA then proc-macro2 token stream. //! RA than `proc-macro2` token stream.
//! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable` //! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable`
//! rustc rather than `unstable`. (Although in gerenal ABI compatibility is still an issue) //! rustc rather than `unstable`. (Although in gerenal ABI compatibility is still an issue)
@@ -21,36 +21,28 @@ mod dylib;
use proc_macro::bridge::client::TokenStream; use proc_macro::bridge::client::TokenStream;
use ra_proc_macro::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTask}; use ra_proc_macro::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTask};
use std::path::Path;
pub(crate) fn expand_task(task: &ExpansionTask) -> Result<ExpansionResult, String> { pub(crate) fn expand_task(task: &ExpansionTask) -> Result<ExpansionResult, String> {
let expander = dylib::Expander::new(&task.lib) let expander = create_expander(&task.lib);
.expect(&format!("Cannot expand with provided libraries: ${:?}", &task.lib));
match expander.expand(&task.macro_name, &task.macro_body, task.attributes.as_ref()) { match expander.expand(&task.macro_name, &task.macro_body, task.attributes.as_ref()) {
Ok(expansion) => Ok(ExpansionResult { expansion }), Ok(expansion) => Ok(ExpansionResult { expansion }),
Err(msg) => { Err(msg) => {
let reason = format!( Err(format!("Cannot perform expansion for {}: error {:?}", &task.macro_name, msg))
"Cannot perform expansion for {}: error {:?}!",
&task.macro_name,
msg.as_str()
);
Err(reason)
} }
} }
} }
pub(crate) fn list_macros(task: &ListMacrosTask) -> Result<ListMacrosResult, String> { pub(crate) fn list_macros(task: &ListMacrosTask) -> ListMacrosResult {
let expander = dylib::Expander::new(&task.lib) let expander = create_expander(&task.lib);
.expect(&format!("Cannot expand with provided libraries: ${:?}", &task.lib));
match expander.list_macros() { ListMacrosResult { macros: expander.list_macros() }
Ok(macros) => Ok(ListMacrosResult { macros }), }
Err(msg) => {
let reason = fn create_expander(lib: &Path) -> dylib::Expander {
format!("Cannot perform expansion for {:?}: error {:?}!", &task.lib, msg.as_str()); dylib::Expander::new(lib)
Err(reason) .unwrap_or_else(|err| panic!("Cannot create expander for {}: {:?}", lib.display(), err))
}
}
} }
pub mod cli; pub mod cli;

View File

@@ -6,7 +6,7 @@
//! The original idea from fedochet is using proc-macro2 as backend, //! The original idea from fedochet is using proc-macro2 as backend,
//! we use ra_tt instead for better intergation with RA. //! we use ra_tt instead for better intergation with RA.
//! //!
//! FIXME: No span and source file informatin is implemented yet //! FIXME: No span and source file information is implemented yet
use crate::proc_macro::bridge::{self, server}; use crate::proc_macro::bridge::{self, server};
use ra_tt as tt; use ra_tt as tt;
@@ -76,7 +76,16 @@ impl Extend<TokenTree> for TokenStream {
impl Extend<TokenStream> for TokenStream { impl Extend<TokenStream> for TokenStream {
fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) { fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
for item in streams { for item in streams {
self.subtree.token_trees.extend(&mut item.into_iter()) for tkn in item {
match tkn {
tt::TokenTree::Subtree(subtree) if subtree.delimiter.is_none() => {
self.subtree.token_trees.extend(subtree.token_trees);
}
_ => {
self.subtree.token_trees.push(tkn);
}
}
}
} }
} }
} }

View File

@@ -25,7 +25,6 @@ SUBTREE $
SUBTREE () 4294967295 SUBTREE () 4294967295
IDENT feature 4294967295 IDENT feature 4294967295
PUNCH = [alone] 4294967295 PUNCH = [alone] 4294967295
SUBTREE $
LITERAL "cargo-clippy" 0 LITERAL "cargo-clippy" 0
PUNCH , [alone] 4294967295 PUNCH , [alone] 4294967295
IDENT allow 4294967295 IDENT allow 4294967295

View File

@@ -60,6 +60,6 @@ pub fn list(crate_name: &str, version: &str) -> Vec<String> {
let path = fixtures::dylib_path(crate_name, version); let path = fixtures::dylib_path(crate_name, version);
let task = ListMacrosTask { lib: path }; let task = ListMacrosTask { lib: path };
let res = list_macros(&task).unwrap(); let res = list_macros(&task);
res.macros.into_iter().map(|(name, kind)| format!("{} [{:?}]", name, kind)).collect() res.macros.into_iter().map(|(name, kind)| format!("{} [{:?}]", name, kind)).collect()
} }

View File

@@ -303,8 +303,7 @@ pub fn load_extern_resources(
if message.target.kind.contains(&"proc-macro".to_string()) { if message.target.kind.contains(&"proc-macro".to_string()) {
let package_id = message.package_id; let package_id = message.package_id;
// Skip rmeta file // Skip rmeta file
if let Some(filename) = if let Some(filename) = message.filenames.iter().find(|name| is_dylib(name))
message.filenames.iter().filter(|name| is_dylib(name)).next()
{ {
res.proc_dylib_paths.insert(package_id, filename.clone()); res.proc_dylib_paths.insert(package_id, filename.clone());
} }

View File

@@ -10,8 +10,8 @@ use ra_text_edit::TextEditBuilder;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use crate::{ use crate::{
AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxNodePtr, SyntaxToken, AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxNodePtr,
TextRange, TextUnit, SyntaxToken, TextRange, TextUnit,
}; };
/// Returns ancestors of the node at the offset, sorted by length. This should /// Returns ancestors of the node at the offset, sorted by length. This should
@@ -90,6 +90,10 @@ pub fn neighbor<T: AstNode>(me: &T, direction: Direction) -> Option<T> {
me.syntax().siblings(direction).skip(1).find_map(T::cast) me.syntax().siblings(direction).skip(1).find_map(T::cast)
} }
pub fn has_errors(node: &SyntaxNode) -> bool {
node.children().any(|it| it.kind() == SyntaxKind::ERROR)
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)] #[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum InsertPosition<T> { pub enum InsertPosition<T> {
First, First,

View File

@@ -307,7 +307,11 @@ impl ast::UseTree {
fn split_path_prefix(prefix: &ast::Path) -> Option<ast::Path> { fn split_path_prefix(prefix: &ast::Path) -> Option<ast::Path> {
let parent = prefix.parent_path()?; let parent = prefix.parent_path()?;
let mut res = make::path_unqualified(parent.segment()?); let segment = parent.segment()?;
if algo::has_errors(segment.syntax()) {
return None;
}
let mut res = make::path_unqualified(segment);
for p in iter::successors(parent.parent_path(), |it| it.parent_path()) { for p in iter::successors(parent.parent_path(), |it| it.parent_path()) {
res = make::path_qualified(res, p.segment()?); res = make::path_qualified(res, p.segment()?);
} }

View File

@@ -84,7 +84,7 @@ impl Args {
if matches.contains(["-h", "--help"]) { if matches.contains(["-h", "--help"]) {
eprintln!( eprintln!(
"\ "\
ra-cli-parse rust-analyzer parse
USAGE: USAGE:
rust-analyzer parse [FLAGS] rust-analyzer parse [FLAGS]
@@ -104,7 +104,7 @@ FLAGS:
if matches.contains(["-h", "--help"]) { if matches.contains(["-h", "--help"]) {
eprintln!( eprintln!(
"\ "\
ra-cli-symbols rust-analyzer symbols
USAGE: USAGE:
rust-analyzer highlight [FLAGS] rust-analyzer highlight [FLAGS]
@@ -123,7 +123,7 @@ FLAGS:
if matches.contains(["-h", "--help"]) { if matches.contains(["-h", "--help"]) {
eprintln!( eprintln!(
"\ "\
ra-cli-highlight rust-analyzer highlight
USAGE: USAGE:
rust-analyzer highlight [FLAGS] rust-analyzer highlight [FLAGS]
@@ -143,7 +143,7 @@ FLAGS:
if matches.contains(["-h", "--help"]) { if matches.contains(["-h", "--help"]) {
eprintln!( eprintln!(
"\ "\
ra-cli-analysis-stats rust-analyzer analysis-stats
USAGE: USAGE:
rust-analyzer analysis-stats [FLAGS] [OPTIONS] [PATH] rust-analyzer analysis-stats [FLAGS] [OPTIONS] [PATH]
@@ -193,7 +193,7 @@ ARGS:
if matches.contains(["-h", "--help"]) { if matches.contains(["-h", "--help"]) {
eprintln!( eprintln!(
"\ "\
rust-analyzer-analysis-bench rust-analyzer analysis-bench
USAGE: USAGE:
rust-analyzer analysis-bench [FLAGS] [OPTIONS] rust-analyzer analysis-bench [FLAGS] [OPTIONS]
@@ -236,7 +236,7 @@ ARGS:
if matches.contains(["-h", "--help"]) { if matches.contains(["-h", "--help"]) {
eprintln!( eprintln!(
"\ "\
ra-cli-diagnostics rust-analyzer diagnostics
USAGE: USAGE:
rust-analyzer diagnostics [FLAGS] [PATH] rust-analyzer diagnostics [FLAGS] [PATH]
@@ -269,7 +269,7 @@ ARGS:
_ => { _ => {
eprintln!( eprintln!(
"\ "\
ra-cli rust-analyzer
USAGE: USAGE:
rust-analyzer <SUBCOMMAND> rust-analyzer <SUBCOMMAND>
@@ -281,6 +281,8 @@ SUBCOMMANDS:
analysis-bench analysis-bench
analysis-stats analysis-stats
highlight highlight
diagnostics
proc-macro
parse parse
symbols" symbols"
); );

View File

@@ -51,7 +51,7 @@ fn main() -> Result<()> {
cli::diagnostics(path.as_ref(), load_output_dirs, with_proc_macro, all)? cli::diagnostics(path.as_ref(), load_output_dirs, with_proc_macro, all)?
} }
args::Command::ProcMacro => run_proc_macro_sv()?, args::Command::ProcMacro => run_proc_macro_srv()?,
args::Command::RunServer => run_server()?, args::Command::RunServer => run_server()?,
args::Command::Version => println!("rust-analyzer {}", env!("REV")), args::Command::Version => println!("rust-analyzer {}", env!("REV")),
} }
@@ -65,7 +65,7 @@ fn setup_logging() -> Result<()> {
Ok(()) Ok(())
} }
fn run_proc_macro_sv() -> Result<()> { fn run_proc_macro_srv() -> Result<()> {
ra_proc_macro_srv::cli::run(); ra_proc_macro_srv::cli::run();
Ok(()) Ok(())
} }

View File

@@ -76,7 +76,7 @@ pub(crate) fn load_cargo(
ProcMacroClient::dummy() ProcMacroClient::dummy()
} else { } else {
let path = std::env::current_exe()?; let path = std::env::current_exe()?;
ProcMacroClient::extern_process(&path, &["proc-macro"]).unwrap() ProcMacroClient::extern_process(path, &["proc-macro"]).unwrap()
}; };
let host = load(&source_roots, ws, &mut vfs, receiver, extern_dirs, &proc_macro_client); let host = load(&source_roots, ws, &mut vfs, receiver, extern_dirs, &proc_macro_client);
Ok((host, source_roots)) Ok((host, source_roots))

View File

@@ -24,7 +24,9 @@ use crate::{
world::WorldSnapshot, world::WorldSnapshot,
Result, Result,
}; };
use semantic_tokens::{ATTRIBUTE, BUILTIN_TYPE, ENUM_MEMBER, LIFETIME, TYPE_ALIAS, UNION}; use semantic_tokens::{
ATTRIBUTE, BUILTIN_TYPE, ENUM_MEMBER, LIFETIME, TYPE_ALIAS, UNION, UNRESOLVED_REFERENCE,
};
pub trait Conv { pub trait Conv {
type Output; type Output;
@@ -381,6 +383,7 @@ impl Conv for Highlight {
HighlightTag::Comment => SemanticTokenType::COMMENT, HighlightTag::Comment => SemanticTokenType::COMMENT,
HighlightTag::Attribute => ATTRIBUTE, HighlightTag::Attribute => ATTRIBUTE,
HighlightTag::Keyword => SemanticTokenType::KEYWORD, HighlightTag::Keyword => SemanticTokenType::KEYWORD,
HighlightTag::UnresolvedReference => UNRESOLVED_REFERENCE,
}; };
for modifier in self.modifiers.iter() { for modifier in self.modifiers.iter() {

View File

@@ -10,6 +10,8 @@ pub(crate) const ENUM_MEMBER: SemanticTokenType = SemanticTokenType::new("enumMe
pub(crate) const LIFETIME: SemanticTokenType = SemanticTokenType::new("lifetime"); pub(crate) const LIFETIME: SemanticTokenType = SemanticTokenType::new("lifetime");
pub(crate) const TYPE_ALIAS: SemanticTokenType = SemanticTokenType::new("typeAlias"); pub(crate) const TYPE_ALIAS: SemanticTokenType = SemanticTokenType::new("typeAlias");
pub(crate) const UNION: SemanticTokenType = SemanticTokenType::new("union"); pub(crate) const UNION: SemanticTokenType = SemanticTokenType::new("union");
pub(crate) const UNRESOLVED_REFERENCE: SemanticTokenType =
SemanticTokenType::new("unresolvedReference");
pub(crate) const CONSTANT: SemanticTokenModifier = SemanticTokenModifier::new("constant"); pub(crate) const CONSTANT: SemanticTokenModifier = SemanticTokenModifier::new("constant");
pub(crate) const CONTROL_FLOW: SemanticTokenModifier = SemanticTokenModifier::new("controlFlow"); pub(crate) const CONTROL_FLOW: SemanticTokenModifier = SemanticTokenModifier::new("controlFlow");
@@ -43,6 +45,7 @@ pub(crate) const SUPPORTED_TYPES: &[SemanticTokenType] = &[
LIFETIME, LIFETIME,
TYPE_ALIAS, TYPE_ALIAS,
UNION, UNION,
UNRESOLVED_REFERENCE,
]; ];
pub(crate) const SUPPORTED_MODIFIERS: &[SemanticTokenModifier] = &[ pub(crate) const SUPPORTED_MODIFIERS: &[SemanticTokenModifier] = &[

View File

@@ -148,20 +148,17 @@ impl WorldState {
let proc_macro_client = match &config.proc_macro_srv { let proc_macro_client = match &config.proc_macro_srv {
None => ProcMacroClient::dummy(), None => ProcMacroClient::dummy(),
Some((path, args)) => { Some((path, args)) => match ProcMacroClient::extern_process(path.into(), args) {
let path = std::path::Path::new(path);
match ProcMacroClient::extern_process(path, args) {
Ok(it) => it, Ok(it) => it,
Err(err) => { Err(err) => {
log::error!( log::error!(
"Fail to run ra_proc_macro_srv from path {}, error : {}", "Fail to run ra_proc_macro_srv from path {}, error: {:?}",
path.to_string_lossy(), path,
err err
); );
ProcMacroClient::dummy() ProcMacroClient::dummy()
} }
} },
}
}; };
workspaces workspaces
@@ -184,7 +181,7 @@ impl WorldState {
let mut analysis_host = AnalysisHost::new(lru_capacity); let mut analysis_host = AnalysisHost::new(lru_capacity);
analysis_host.apply_change(change); analysis_host.apply_change(change);
WorldState { WorldState {
config: config, config,
roots: folder_roots, roots: folder_roots,
workspaces: Arc::new(workspaces), workspaces: Arc::new(workspaces),
analysis_host, analysis_host,

View File

@@ -14,9 +14,9 @@
// Master copy of this document lives in the https://github.com/rust-analyzer/rust-analyzer repository // Master copy of this document lives in the https://github.com/rust-analyzer/rust-analyzer repository
At its core, rust-analyzer is a *library* for semantic analysis of Rust code as it changes over time. At its core, rust-analyzer is a *library* for semantic analysis of Rust code as it changes over time.
This manual focuses on a specific usage of the library -- the implementation of This manual focuses on a specific usage of the library -- running it as part of a server that implements the
https://microsoft.github.io/language-server-protocol/[Language Server Protocol]. https://microsoft.github.io/language-server-protocol/[Language Server Protocol] (LSP).
LSP allows various code editors, like VS Code, Emacs or Vim, to implement semantic features like completion or goto definition by talking to an external language server process. The LSP allows various code editors, like VS Code, Emacs or Vim, to implement semantic features like completion or goto definition by talking to an external language server process.
To improve this document, send a pull request against To improve this document, send a pull request against
https://github.com/rust-analyzer/rust-analyzer/blob/master/docs/user/readme.adoc[this file]. https://github.com/rust-analyzer/rust-analyzer/blob/master/docs/user/readme.adoc[this file].
@@ -26,7 +26,7 @@ https://github.com/rust-analyzer/rust-analyzer/blob/master/docs/user/readme.adoc
In theory, one should be able to just install the server binary and have it automatically work with any editor. In theory, one should be able to just install the server binary and have it automatically work with any editor.
We are not there yet, so some editor specific setup is required. We are not there yet, so some editor specific setup is required.
Additionally, rust-analyzer needs sources of the standard library. Additionally, rust-analyzer needs the sources of the standard library.
If the source code is not present, rust-analyzer will attempt to install it automatically. If the source code is not present, rust-analyzer will attempt to install it automatically.
To add the sources manually, run the following command: To add the sources manually, run the following command:
@@ -38,7 +38,7 @@ $ rustup component add rust-src
=== VS Code === VS Code
This is the best supported editor at the moment. This is the best supported editor at the moment.
rust-analyzer plugin for VS Code is maintained The rust-analyzer plugin for VS Code is maintained
https://github.com/rust-analyzer/rust-analyzer/tree/master/editors/code[in tree]. https://github.com/rust-analyzer/rust-analyzer/tree/master/editors/code[in tree].
You can install the latest release of the plugin from You can install the latest release of the plugin from
@@ -74,7 +74,7 @@ We ship nightly releases for VS Code. To help us out with testing the newest cod
{ "rust-analyzer.updates.channel": "nightly" } { "rust-analyzer.updates.channel": "nightly" }
---- ----
You will be prompted to install the `nightly` extension version. Just click `Download now` and from that moment you will get automatic updates each 24 hours. You will be prompted to install the `nightly` extension version. Just click `Download now` and from that moment you will get automatic updates every 24 hours.
If you don't want to be asked for `Download now` every day when the new nightly version is released add the following to your `settings.json`: If you don't want to be asked for `Download now` every day when the new nightly version is released add the following to your `settings.json`:
[source,json] [source,json]
@@ -110,19 +110,21 @@ Here are some useful self-diagnostic commands:
=== Language Server Binary === Language Server Binary
Other editors generally require `rust-analyzer` binary to be in `$PATH`. Other editors generally require the `rust-analyzer` binary to be in `$PATH`.
You can download the pre-built binary from You can download the pre-built binary from the https://github.com/rust-analyzer/rust-analyzer/releases[releases] page. Typically, you then need to rename the binary for your platform, e.g. `rust-analyzer-mac` if you're on Mac OS, to `rust-analzyer` and make it executable in addition to moving it into a directory in your `$PATH`.
https://github.com/rust-analyzer/rust-analyzer/releases[releases]
page, or you can install it from source using the following command: Alternatively, you can install it from source using the following command:
[source,bash] [source,bash]
---- ----
$ cargo xtask install --server $ cargo xtask install --server
---- ----
If your editor can't find the binary even though the binary is on your `$PATH`, the likely explanation is that it doesn't see the same `$PATH` as the shell, see https://github.com/rust-analyzer/rust-analyzer/issues/1811[this issue]. On Unix, running the editor from a shell or changing the `.desktop` file to set the environment should help.
==== Arch Linux ==== Arch Linux
`rust-analyzer` binary can be installed from AUR (Arch User Repository): The `rust-analyzer` binary can be installed from AUR (Arch User Repository):
- https://aur.archlinux.org/packages/rust-analyzer-bin[`rust-analyzer-bin`] (binary from GitHub releases) - https://aur.archlinux.org/packages/rust-analyzer-bin[`rust-analyzer-bin`] (binary from GitHub releases)
- https://aur.archlinux.org/packages/rust-analyzer[`rust-analyzer`] (built from latest tagged source) - https://aur.archlinux.org/packages/rust-analyzer[`rust-analyzer`] (built from latest tagged source)
@@ -156,8 +158,8 @@ The are several LSP client implementations for vim:
2. Run `:CocInstall coc-rust-analyzer` to install 2. Run `:CocInstall coc-rust-analyzer` to install
https://github.com/fannheyward/coc-rust-analyzer[coc-rust-analyzer], https://github.com/fannheyward/coc-rust-analyzer[coc-rust-analyzer],
this extension implements _most_ of the features supported in the VSCode extension: this extension implements _most_ of the features supported in the VSCode extension:
* same configurations as VSCode extension, `rust-analyzer.serverPath`, `rust-analyzer.enableCargoWatchOnStartup` etc. * same configurations as VSCode extension, `rust-analyzer.serverPath`, `rust-analyzer.cargo.features` etc.
* same commands too, `rust-analyzer.analyzerStatus`, `rust-analyzer.startCargoWatch` etc. * same commands too, `rust-analyzer.analyzerStatus`, `rust-analyzer.ssr` etc.
* highlighting and inlay_hints are not implemented yet * highlighting and inlay_hints are not implemented yet
==== LanguageClient-neovim ==== LanguageClient-neovim
@@ -183,11 +185,20 @@ Once `neovim/nvim-lsp` is installed, use `+lua require'nvim_lsp'.rust_analyzer.s
=== Sublime Text 3 === Sublime Text 3
Prerequisites: Prerequisites: You have installed the <<language-server-binary,`rust-analyzer` binary>>.
`LSP` package. You also need the `LSP` package. To install it:
Invoke the command palette (`ctrl+shift+p`) and type LSP enable to locally/globally enable the rust-analyzer LSP (type LSP enable, then choose either locally or globally, then select rust-analyzer) 1. If you've never installed a Sublime Text package, install Package Control:
* Open the command palette (Win/Linux: `ctrl+shift+p`, Mac: `cmd+shift+p`)
* Type `Install Package Control`, press enter
2. In the command palette, run `Package control: Install package`, and in the list that pops up, type `LSP` and press enter.
Finally, with your Rust project open, in the command palette, run `LSP: Enable Language Server In Project` or `LSP: Enable Language Server Globally`, then select `rust-analyzer` in the list that pops up to enable the rust-analyzer LSP. The latter means that rust-analzyer is enabled by default in Rust projects.
If it worked, you should see "rust-analzyer, Line X, Column Y" on the left side of the bottom bar, and after waiting a bit, functionality like tooltips on hovering over variables should become available.
If you get an error saying `No such file or directory: 'rust-analyzer'`, see the <<language-server-binary,section on installing the language server binary>>.
== Usage == Usage

View File

@@ -517,6 +517,10 @@
"id": "union", "id": "union",
"description": "Style for C-style untagged unions", "description": "Style for C-style untagged unions",
"superType": "type" "superType": "type"
},
{
"id": "unresolvedReference",
"description": "Style for names which can not be resolved due to compilation errors"
} }
], ],
"semanticTokenModifiers": [ "semanticTokenModifiers": [

View File

@@ -9,9 +9,9 @@ mod gen_syntax;
mod gen_parser_tests; mod gen_parser_tests;
mod gen_assists_docs; mod gen_assists_docs;
use std::{fs, mem, path::Path}; use std::{mem, path::Path};
use crate::Result; use crate::{not_bash::fs2, Result};
pub use self::{ pub use self::{
gen_assists_docs::generate_assists_docs, gen_parser_tests::generate_parser_tests, gen_assists_docs::generate_assists_docs, gen_parser_tests::generate_parser_tests,
@@ -39,7 +39,7 @@ pub enum Mode {
/// A helper to update file on disk if it has changed. /// A helper to update file on disk if it has changed.
/// With verify = false, /// With verify = false,
fn update(path: &Path, contents: &str, mode: Mode) -> Result<()> { fn update(path: &Path, contents: &str, mode: Mode) -> Result<()> {
match fs::read_to_string(path) { match fs2::read_to_string(path) {
Ok(ref old_contents) if normalize(old_contents) == normalize(contents) => { Ok(ref old_contents) if normalize(old_contents) == normalize(contents) => {
return Ok(()); return Ok(());
} }
@@ -49,7 +49,7 @@ fn update(path: &Path, contents: &str, mode: Mode) -> Result<()> {
anyhow::bail!("`{}` is not up-to-date", path.display()); anyhow::bail!("`{}` is not up-to-date", path.display());
} }
eprintln!("updating {}", path.display()); eprintln!("updating {}", path.display());
fs::write(path, contents)?; fs2::write(path, contents)?;
return Ok(()); return Ok(());
fn normalize(s: &str) -> String { fn normalize(s: &str) -> String {
@@ -65,7 +65,7 @@ fn extract_comment_blocks_with_empty_lines(text: &str) -> Vec<Vec<String>> {
do_extract_comment_blocks(text, true) do_extract_comment_blocks(text, true)
} }
fn do_extract_comment_blocks(text: &str, allow_blocks_with_empty_lins: bool) -> Vec<Vec<String>> { fn do_extract_comment_blocks(text: &str, allow_blocks_with_empty_lines: bool) -> Vec<Vec<String>> {
let mut res = Vec::new(); let mut res = Vec::new();
let prefix = "// "; let prefix = "// ";
@@ -73,7 +73,7 @@ fn do_extract_comment_blocks(text: &str, allow_blocks_with_empty_lins: bool) ->
let mut block = vec![]; let mut block = vec![];
for line in lines { for line in lines {
if line == "//" && allow_blocks_with_empty_lins { if line == "//" && allow_blocks_with_empty_lines {
block.push(String::new()); block.push(String::new());
continue; continue;
} }