Auto merge of #141954 - matthiaskrgr:rollup-zptd6t9, r=matthiaskrgr

Rollup of 9 pull requests

Successful merges:

 - rust-lang/rust#141554 (Improve documentation for codegen options)
 - rust-lang/rust#141817 (rustc_llvm: add Windows system libs only when cross-compiling from Wi…)
 - rust-lang/rust#141843 (Add `visit_id` to ast `Visitor`)
 - rust-lang/rust#141881 (Subtree update of `rust-analyzer`)
 - rust-lang/rust#141898 ([rustdoc-json] Implement PartialOrd and Ord for rustdoc_types::Id)
 - rust-lang/rust#141921 (Disable f64 minimum/maximum tests for arm 32)
 - rust-lang/rust#141930 (Enable triagebot `[concern]` functionality)
 - rust-lang/rust#141936 (Decouple "reporting in deps" from `FutureIncompatibilityReason`)
 - rust-lang/rust#141949 (move `test-float-parse` tool into `src/tools` dir)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors
2025-06-03 09:51:59 +00:00
73 changed files with 1009 additions and 725 deletions

View File

@@ -4,7 +4,6 @@ members = [
# tidy-alphabetical-start
"compiler/rustc",
"src/build_helper",
"src/etc/test-float-parse",
"src/rustc-std-workspace/rustc-std-workspace-alloc",
"src/rustc-std-workspace/rustc-std-workspace-core",
"src/rustc-std-workspace/rustc-std-workspace-std",
@@ -41,6 +40,7 @@ members = [
"src/tools/rustdoc-themes",
"src/tools/rustfmt",
"src/tools/suggest-tests",
"src/tools/test-float-parse",
"src/tools/tidy",
"src/tools/tier-check",
"src/tools/unicode-table-generator",

View File

@@ -12,7 +12,6 @@ use std::panic;
use rustc_data_structures::flat_map_in_place::FlatMapInPlace;
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_span::source_map::Spanned;
use rustc_span::{Ident, Span};
use smallvec::{Array, SmallVec, smallvec};
use thin_vec::ThinVec;
@@ -499,58 +498,6 @@ fn walk_assoc_item_constraint<T: MutVisitor>(
vis.visit_span(span);
}
pub fn walk_ty<T: MutVisitor>(vis: &mut T, ty: &mut Ty) {
let Ty { id, kind, span, tokens: _ } = ty;
vis.visit_id(id);
match kind {
TyKind::Err(_guar) => {}
TyKind::Infer | TyKind::ImplicitSelf | TyKind::Dummy | TyKind::Never | TyKind::CVarArgs => {
}
TyKind::Slice(ty) => vis.visit_ty(ty),
TyKind::Ptr(MutTy { ty, mutbl: _ }) => vis.visit_ty(ty),
TyKind::Ref(lt, MutTy { ty, mutbl: _ }) | TyKind::PinnedRef(lt, MutTy { ty, mutbl: _ }) => {
visit_opt(lt, |lt| vis.visit_lifetime(lt));
vis.visit_ty(ty);
}
TyKind::BareFn(bft) => {
let BareFnTy { safety, ext: _, generic_params, decl, decl_span } = bft.deref_mut();
visit_safety(vis, safety);
generic_params.flat_map_in_place(|param| vis.flat_map_generic_param(param));
vis.visit_fn_decl(decl);
vis.visit_span(decl_span);
}
TyKind::UnsafeBinder(binder) => {
let UnsafeBinderTy { generic_params, inner_ty } = binder.deref_mut();
generic_params.flat_map_in_place(|param| vis.flat_map_generic_param(param));
vis.visit_ty(inner_ty);
}
TyKind::Tup(tys) => visit_thin_vec(tys, |ty| vis.visit_ty(ty)),
TyKind::Paren(ty) => vis.visit_ty(ty),
TyKind::Pat(ty, pat) => {
vis.visit_ty(ty);
vis.visit_ty_pat(pat);
}
TyKind::Path(qself, path) => {
vis.visit_qself(qself);
vis.visit_path(path);
}
TyKind::Array(ty, length) => {
vis.visit_ty(ty);
vis.visit_anon_const(length);
}
TyKind::Typeof(expr) => vis.visit_anon_const(expr),
TyKind::TraitObject(bounds, _syntax) => {
visit_vec(bounds, |bound| vis.visit_param_bound(bound, BoundKind::TraitObject))
}
TyKind::ImplTrait(id, bounds) => {
vis.visit_id(id);
visit_vec(bounds, |bound| vis.visit_param_bound(bound, BoundKind::Impl));
}
TyKind::MacCall(mac) => vis.visit_mac_call(mac),
}
vis.visit_span(span);
}
pub fn walk_ty_pat<T: MutVisitor>(vis: &mut T, ty: &mut TyPat) {
let TyPat { id, kind, span, tokens: _ } = ty;
vis.visit_id(id);
@@ -588,13 +535,6 @@ fn walk_ident<T: MutVisitor>(vis: &mut T, Ident { name: _, span }: &mut Ident) {
vis.visit_span(span);
}
fn walk_path_segment<T: MutVisitor>(vis: &mut T, segment: &mut PathSegment) {
let PathSegment { ident, id, args } = segment;
vis.visit_id(id);
vis.visit_ident(ident);
visit_opt(args, |args| vis.visit_generic_args(args));
}
fn walk_path<T: MutVisitor>(vis: &mut T, Path { segments, span, tokens: _ }: &mut Path) {
for segment in segments {
vis.visit_path_segment(segment);
@@ -729,18 +669,6 @@ fn walk_closure_binder<T: MutVisitor>(vis: &mut T, binder: &mut ClosureBinder) {
}
}
fn walk_coroutine_kind<T: MutVisitor>(vis: &mut T, coroutine_kind: &mut CoroutineKind) {
match coroutine_kind {
CoroutineKind::Async { span, closure_id, return_impl_trait_id }
| CoroutineKind::Gen { span, closure_id, return_impl_trait_id }
| CoroutineKind::AsyncGen { span, closure_id, return_impl_trait_id } => {
vis.visit_id(closure_id);
vis.visit_id(return_impl_trait_id);
vis.visit_span(span);
}
}
}
fn walk_fn<T: MutVisitor>(vis: &mut T, kind: FnKind<'_>) {
match kind {
FnKind::Fn(
@@ -991,13 +919,6 @@ pub fn walk_flat_map_expr_field<T: MutVisitor>(
smallvec![f]
}
pub fn walk_block<T: MutVisitor>(vis: &mut T, block: &mut Block) {
let Block { id, stmts, rules: _, span, tokens: _ } = block;
vis.visit_id(id);
stmts.flat_map_in_place(|stmt| vis.flat_map_stmt(stmt));
vis.visit_span(span);
}
pub fn walk_item_kind<K: WalkItemKind>(
kind: &mut K,
span: Span,
@@ -1041,57 +962,6 @@ pub fn walk_flat_map_assoc_item(
smallvec![item]
}
pub fn walk_pat<T: MutVisitor>(vis: &mut T, pat: &mut Pat) {
let Pat { id, kind, span, tokens: _ } = pat;
vis.visit_id(id);
match kind {
PatKind::Err(_guar) => {}
PatKind::Missing | PatKind::Wild | PatKind::Rest | PatKind::Never => {}
PatKind::Ident(_binding_mode, ident, sub) => {
vis.visit_ident(ident);
visit_opt(sub, |sub| vis.visit_pat(sub));
}
PatKind::Expr(e) => vis.visit_expr(e),
PatKind::TupleStruct(qself, path, elems) => {
vis.visit_qself(qself);
vis.visit_path(path);
visit_thin_vec(elems, |elem| vis.visit_pat(elem));
}
PatKind::Path(qself, path) => {
vis.visit_qself(qself);
vis.visit_path(path);
}
PatKind::Struct(qself, path, fields, _etc) => {
vis.visit_qself(qself);
vis.visit_path(path);
fields.flat_map_in_place(|field| vis.flat_map_pat_field(field));
}
PatKind::Box(inner) => vis.visit_pat(inner),
PatKind::Deref(inner) => vis.visit_pat(inner),
PatKind::Ref(inner, _mutbl) => vis.visit_pat(inner),
PatKind::Range(e1, e2, Spanned { span: _, node: _ }) => {
visit_opt(e1, |e| vis.visit_expr(e));
visit_opt(e2, |e| vis.visit_expr(e));
vis.visit_span(span);
}
PatKind::Guard(p, e) => {
vis.visit_pat(p);
vis.visit_expr(e);
}
PatKind::Tuple(elems) | PatKind::Slice(elems) | PatKind::Or(elems) => {
visit_thin_vec(elems, |elem| vis.visit_pat(elem))
}
PatKind::Paren(inner) => vis.visit_pat(inner),
PatKind::MacCall(mac) => vis.visit_mac_call(mac),
}
vis.visit_span(span);
}
fn walk_anon_const<T: MutVisitor>(vis: &mut T, AnonConst { id, value }: &mut AnonConst) {
vis.visit_id(id);
vis.visit_expr(value);
}
fn walk_inline_asm<T: MutVisitor>(vis: &mut T, asm: &mut InlineAsm) {
// FIXME: Visit spans inside all this currently ignored stuff.
let InlineAsm {

View File

@@ -210,7 +210,7 @@ pub trait Visitor<'ast>: Sized {
walk_poly_trait_ref(self, t)
}
fn visit_variant_data(&mut self, s: &'ast VariantData) -> Self::Result {
walk_struct_def(self, s)
walk_variant_data(self, s)
}
fn visit_field_def(&mut self, s: &'ast FieldDef) -> Self::Result {
walk_field_def(self, s)
@@ -233,10 +233,13 @@ pub trait Visitor<'ast>: Sized {
fn visit_mac_call(&mut self, mac: &'ast MacCall) -> Self::Result {
walk_mac(self, mac)
}
fn visit_macro_def(&mut self, _mac: &'ast MacroDef, _id: NodeId) -> Self::Result {
fn visit_id(&mut self, _id: NodeId) -> Self::Result {
Self::Result::output()
}
fn visit_path(&mut self, path: &'ast Path, _id: NodeId) -> Self::Result {
fn visit_macro_def(&mut self, _mac: &'ast MacroDef) -> Self::Result {
Self::Result::output()
}
fn visit_path(&mut self, path: &'ast Path) -> Self::Result {
walk_path(self, path)
}
fn visit_use_tree(
@@ -295,8 +298,8 @@ pub trait Visitor<'ast>: Sized {
fn visit_capture_by(&mut self, _capture_by: &'ast CaptureBy) -> Self::Result {
Self::Result::output()
}
fn visit_coroutine_kind(&mut self, _coroutine_kind: &'ast CoroutineKind) -> Self::Result {
Self::Result::output()
fn visit_coroutine_kind(&mut self, coroutine_kind: &'ast CoroutineKind) -> Self::Result {
walk_coroutine_kind(self, coroutine_kind)
}
fn visit_fn_decl(&mut self, fn_decl: &'ast FnDecl) -> Self::Result {
walk_fn_decl(self, fn_decl)
@@ -334,17 +337,14 @@ macro_rules! common_visitor_and_walkers {
$(${ignore($lt)}V::Result::output())?
}
// this is only used by the MutVisitor. We include this symmetry here to make writing other functions easier
/// helper since `Visitor` wants `NodeId` but `MutVisitor` wants `&mut NodeId`
$(${ignore($lt)}
#[expect(unused, rustc::pass_by_value)]
#[expect(rustc::pass_by_value)]
)?
#[inline]
)?
fn visit_id<$($lt,)? V: $Visitor$(<$lt>)?>(visitor: &mut V, id: &$($lt)? $($mut)? NodeId) $(-> <V as Visitor<$lt>>::Result)? {
$(
${ignore($mut)}
visitor.visit_id(id);
)?
$(${ignore($lt)}V::Result::output())?
// deref `&NodeId` into `NodeId` only for `Visitor`
visitor.visit_id( $(${ignore($lt)} * )? id)
}
// this is only used by the MutVisitor. We include this symmetry here to make writing other functions easier
@@ -577,8 +577,7 @@ macro_rules! common_visitor_and_walkers {
ItemKind::MacCall(m) => vis.visit_mac_call(m),
ItemKind::MacroDef(ident, def) => {
try_visit!(vis.visit_ident(ident));
// FIXME(fee1-dead) assymetry
vis.visit_macro_def(def$(${ignore($lt)}, id)?)
vis.visit_macro_def(def)
}
ItemKind::Delegation(box Delegation {
id,
@@ -591,7 +590,7 @@ macro_rules! common_visitor_and_walkers {
}) => {
try_visit!(visit_id(vis, id));
try_visit!(vis.visit_qself(qself));
try_visit!(vis.visit_path(path$(${ignore($lt)}, *id)?));
try_visit!(vis.visit_path(path));
try_visit!(vis.visit_ident(ident));
visit_opt!(vis, visit_ident, rename);
visit_opt!(vis, visit_block, body);
@@ -599,7 +598,7 @@ macro_rules! common_visitor_and_walkers {
}
ItemKind::DelegationMac(box DelegationMac { qself, prefix, suffixes, body }) => {
try_visit!(vis.visit_qself(qself));
try_visit!(vis.visit_path(prefix$(${ignore($lt)}, id)?));
try_visit!(vis.visit_path(prefix));
if let Some(suffixes) = suffixes {
for (ident, rename) in suffixes {
try_visit!(vis.visit_ident(ident));
@@ -642,8 +641,7 @@ macro_rules! common_visitor_and_walkers {
if let Some(define_opaque) = define_opaque {
for (id, path) in define_opaque {
try_visit!(visit_id(visitor, id));
// FIXME(fee1-dead): look into this weird assymetry
try_visit!(visitor.visit_path(path$(${ignore($lt)}, *id)?));
try_visit!(visitor.visit_path(path));
}
}
$(<V as Visitor<$lt>>::Result::output())?
@@ -699,7 +697,7 @@ macro_rules! common_visitor_and_walkers {
}) => {
try_visit!(visit_id(vis, id));
try_visit!(vis.visit_qself(qself));
try_visit!(vis.visit_path(path $(${ignore($lt)}, *id)?));
try_visit!(vis.visit_path(path));
try_visit!(vis.visit_ident(ident));
visit_opt!(vis, visit_ident, rename);
visit_opt!(vis, visit_block, body);
@@ -707,7 +705,7 @@ macro_rules! common_visitor_and_walkers {
}
AssocItemKind::DelegationMac(box DelegationMac { qself, prefix, suffixes, body }) => {
try_visit!(vis.visit_qself(qself));
try_visit!(vis.visit_path(prefix$(${ignore($lt)}, id)?));
try_visit!(vis.visit_path(prefix));
if let Some(suffixes) = suffixes {
for (ident, rename) in suffixes {
try_visit!(vis.visit_ident(ident));
@@ -773,6 +771,190 @@ macro_rules! common_visitor_and_walkers {
}
}
}
fn walk_coroutine_kind<$($lt,)? V: $Visitor$(<$lt>)?>(
vis: &mut V,
coroutine_kind: &$($lt)? $($mut)? CoroutineKind,
) $(-> <V as Visitor<$lt>>::Result)? {
match coroutine_kind {
CoroutineKind::Async { span, closure_id, return_impl_trait_id }
| CoroutineKind::Gen { span, closure_id, return_impl_trait_id }
| CoroutineKind::AsyncGen { span, closure_id, return_impl_trait_id } => {
try_visit!(visit_id(vis, closure_id));
try_visit!(visit_id(vis, return_impl_trait_id));
visit_span(vis, span)
}
}
}
pub fn walk_pat<$($lt,)? V: $Visitor$(<$lt>)?>(
vis: &mut V,
pattern: &$($lt)? $($mut)? Pat
) $(-> <V as Visitor<$lt>>::Result)? {
let Pat { id, kind, span, tokens: _ } = pattern;
try_visit!(visit_id(vis, id));
match kind {
PatKind::Err(_guar) => {}
PatKind::Missing | PatKind::Wild | PatKind::Rest | PatKind::Never => {}
PatKind::Ident(_bmode, ident, optional_subpattern) => {
try_visit!(vis.visit_ident(ident));
visit_opt!(vis, visit_pat, optional_subpattern);
}
PatKind::Expr(expression) => try_visit!(vis.visit_expr(expression)),
PatKind::TupleStruct(opt_qself, path, elems) => {
try_visit!(vis.visit_qself(opt_qself));
try_visit!(vis.visit_path(path));
walk_list!(vis, visit_pat, elems);
}
PatKind::Path(opt_qself, path) => {
try_visit!(vis.visit_qself(opt_qself));
try_visit!(vis.visit_path(path))
}
PatKind::Struct(opt_qself, path, fields, _rest) => {
try_visit!(vis.visit_qself(opt_qself));
try_visit!(vis.visit_path(path));
$(
${ignore($lt)}
walk_list!(vis, visit_pat_field, fields);
)?
$(
${ignore($mut)}
fields.flat_map_in_place(|field| vis.flat_map_pat_field(field));
)?
}
PatKind::Box(subpattern) | PatKind::Deref(subpattern) | PatKind::Paren(subpattern) => {
try_visit!(vis.visit_pat(subpattern));
}
PatKind::Ref(subpattern, _ /*mutbl*/) => {
try_visit!(vis.visit_pat(subpattern));
}
PatKind::Range(lower_bound, upper_bound, _end) => {
visit_opt!(vis, visit_expr, lower_bound);
visit_opt!(vis, visit_expr, upper_bound);
try_visit!(visit_span(vis, span));
}
PatKind::Guard(subpattern, guard_condition) => {
try_visit!(vis.visit_pat(subpattern));
try_visit!(vis.visit_expr(guard_condition));
}
PatKind::Tuple(elems) | PatKind::Slice(elems) | PatKind::Or(elems) => {
walk_list!(vis, visit_pat, elems);
}
PatKind::MacCall(mac) => try_visit!(vis.visit_mac_call(mac)),
}
visit_span(vis, span)
}
pub fn walk_anon_const<$($lt,)? V: $Visitor$(<$lt>)?>(
vis: &mut V,
constant: &$($lt)? $($mut)? AnonConst,
) $(-> <V as Visitor<$lt>>::Result)? {
let AnonConst { id, value } = constant;
try_visit!(visit_id(vis, id));
vis.visit_expr(value)
}
pub fn walk_path_segment<$($lt,)? V: $Visitor$(<$lt>)?>(
vis: &mut V,
segment: &$($lt)? $($mut)? PathSegment,
) $(-> <V as Visitor<$lt>>::Result)? {
let PathSegment { ident, id, args } = segment;
try_visit!(visit_id(vis, id));
try_visit!(vis.visit_ident(ident));
visit_opt!(vis, visit_generic_args, args);
$(<V as Visitor<$lt>>::Result::output())?
}
pub fn walk_block<$($lt,)? V: $Visitor$(<$lt>)?>(
vis: &mut V,
block: &$($lt)? $($mut)? Block
) $(-> <V as Visitor<$lt>>::Result)? {
let Block { stmts, id, rules: _, span, tokens: _ } = block;
try_visit!(visit_id(vis, id));
$(
${ignore($lt)}
walk_list!(vis, visit_stmt, stmts);
)?
$(
${ignore($mut)}
stmts.flat_map_in_place(|stmt| vis.flat_map_stmt(stmt));
)?
visit_span(vis, span)
}
pub fn walk_ty<$($lt,)? V: $Visitor$(<$lt>)?>(
vis: &mut V, ty: &$($lt)? $($mut)? Ty
) $(-> <V as Visitor<$lt>>::Result)? {
let Ty { id, kind, span, tokens: _ } = ty;
try_visit!(visit_id(vis, id));
match kind {
TyKind::Err(_guar) => {}
TyKind::Infer | TyKind::ImplicitSelf | TyKind::Dummy | TyKind::Never | TyKind::CVarArgs => {}
TyKind::Slice(ty) | TyKind::Paren(ty) => try_visit!(vis.visit_ty(ty)),
TyKind::Ptr(MutTy { ty, mutbl: _ }) => try_visit!(vis.visit_ty(ty)),
TyKind::Ref(opt_lifetime, MutTy { ty, mutbl: _ })
| TyKind::PinnedRef(opt_lifetime, MutTy { ty, mutbl: _ }) => {
// FIXME(fee1-dead) asymmetry
visit_opt!(vis, visit_lifetime, opt_lifetime$(${ignore($lt)}, LifetimeCtxt::Ref)?);
try_visit!(vis.visit_ty(ty));
}
TyKind::Tup(tuple_element_types) => {
walk_list!(vis, visit_ty, tuple_element_types);
}
TyKind::BareFn(function_declaration) => {
let BareFnTy { safety, ext: _, generic_params, decl, decl_span } =
&$($mut)? **function_declaration;
visit_safety(vis, safety);
$(
${ignore($lt)}
walk_list!(vis, visit_generic_param, generic_params);
)?
$(
${ignore($mut)}
generic_params.flat_map_in_place(|param| vis.flat_map_generic_param(param));
)?
try_visit!(vis.visit_fn_decl(decl));
try_visit!(visit_span(vis, decl_span));
}
TyKind::UnsafeBinder(binder) => {
$(
${ignore($lt)}
walk_list!(vis, visit_generic_param, &binder.generic_params);
)?
$(
${ignore($mut)}
binder.generic_params.flat_map_in_place(|param| vis.flat_map_generic_param(param));
)?
try_visit!(vis.visit_ty(&$($mut)?binder.inner_ty));
}
TyKind::Path(maybe_qself, path) => {
try_visit!(vis.visit_qself(maybe_qself));
try_visit!(vis.visit_path(path));
}
TyKind::Pat(ty, pat) => {
try_visit!(vis.visit_ty(ty));
try_visit!(vis.visit_ty_pat(pat));
}
TyKind::Array(ty, length) => {
try_visit!(vis.visit_ty(ty));
try_visit!(vis.visit_anon_const(length));
}
TyKind::TraitObject(bounds, _syntax) => {
walk_list!(vis, visit_param_bound, bounds, BoundKind::TraitObject);
}
TyKind::ImplTrait(id, bounds) => {
try_visit!(visit_id(vis, id));
walk_list!(vis, visit_param_bound, bounds, BoundKind::Impl);
}
TyKind::Typeof(expression) => try_visit!(vis.visit_anon_const(expression)),
TyKind::MacCall(mac) => try_visit!(vis.visit_mac_call(mac)),
}
visit_span(vis, span)
}
};
}
@@ -808,7 +990,8 @@ where
pub fn walk_trait_ref<'a, V: Visitor<'a>>(visitor: &mut V, trait_ref: &'a TraitRef) -> V::Result {
let TraitRef { path, ref_id } = trait_ref;
visitor.visit_path(path, *ref_id)
try_visit!(visitor.visit_path(path));
visitor.visit_id(*ref_id)
}
pub fn walk_enum_def<'a, V: Visitor<'a>>(
@@ -848,56 +1031,6 @@ pub fn walk_pat_field<'a, V: Visitor<'a>>(visitor: &mut V, fp: &'a PatField) ->
V::Result::output()
}
pub fn walk_ty<'a, V: Visitor<'a>>(visitor: &mut V, typ: &'a Ty) -> V::Result {
let Ty { id, kind, span: _, tokens: _ } = typ;
match kind {
TyKind::Slice(ty) | TyKind::Paren(ty) => try_visit!(visitor.visit_ty(ty)),
TyKind::Ptr(MutTy { ty, mutbl: _ }) => try_visit!(visitor.visit_ty(ty)),
TyKind::Ref(opt_lifetime, MutTy { ty, mutbl: _ })
| TyKind::PinnedRef(opt_lifetime, MutTy { ty, mutbl: _ }) => {
visit_opt!(visitor, visit_lifetime, opt_lifetime, LifetimeCtxt::Ref);
try_visit!(visitor.visit_ty(ty));
}
TyKind::Tup(tuple_element_types) => {
walk_list!(visitor, visit_ty, tuple_element_types);
}
TyKind::BareFn(function_declaration) => {
let BareFnTy { safety: _, ext: _, generic_params, decl, decl_span: _ } =
&**function_declaration;
walk_list!(visitor, visit_generic_param, generic_params);
try_visit!(visitor.visit_fn_decl(decl));
}
TyKind::UnsafeBinder(binder) => {
walk_list!(visitor, visit_generic_param, &binder.generic_params);
try_visit!(visitor.visit_ty(&binder.inner_ty));
}
TyKind::Path(maybe_qself, path) => {
try_visit!(visitor.visit_qself(maybe_qself));
try_visit!(visitor.visit_path(path, *id));
}
TyKind::Pat(ty, pat) => {
try_visit!(visitor.visit_ty(ty));
try_visit!(visitor.visit_ty_pat(pat));
}
TyKind::Array(ty, length) => {
try_visit!(visitor.visit_ty(ty));
try_visit!(visitor.visit_anon_const(length));
}
TyKind::TraitObject(bounds, _syntax) => {
walk_list!(visitor, visit_param_bound, bounds, BoundKind::TraitObject);
}
TyKind::ImplTrait(_id, bounds) => {
walk_list!(visitor, visit_param_bound, bounds, BoundKind::Impl);
}
TyKind::Typeof(expression) => try_visit!(visitor.visit_anon_const(expression)),
TyKind::Infer | TyKind::ImplicitSelf | TyKind::Dummy => {}
TyKind::Err(_guar) => {}
TyKind::MacCall(mac) => try_visit!(visitor.visit_mac_call(mac)),
TyKind::Never | TyKind::CVarArgs => {}
}
V::Result::output()
}
pub fn walk_ty_pat<'a, V: Visitor<'a>>(visitor: &mut V, tp: &'a TyPat) -> V::Result {
let TyPat { id: _, kind, span: _, tokens: _ } = tp;
match kind {
@@ -931,7 +1064,8 @@ pub fn walk_use_tree<'a, V: Visitor<'a>>(
id: NodeId,
) -> V::Result {
let UseTree { prefix, kind, span: _ } = use_tree;
try_visit!(visitor.visit_path(prefix, id));
try_visit!(visitor.visit_id(id));
try_visit!(visitor.visit_path(prefix));
match kind {
UseTreeKind::Simple(rename) => {
// The extra IDs are handled during AST lowering.
@@ -947,16 +1081,6 @@ pub fn walk_use_tree<'a, V: Visitor<'a>>(
V::Result::output()
}
pub fn walk_path_segment<'a, V: Visitor<'a>>(
visitor: &mut V,
segment: &'a PathSegment,
) -> V::Result {
let PathSegment { ident, id: _, args } = segment;
try_visit!(visitor.visit_ident(ident));
visit_opt!(visitor, visit_generic_args, args);
V::Result::output()
}
pub fn walk_generic_args<'a, V>(visitor: &mut V, generic_args: &'a GenericArgs) -> V::Result
where
V: Visitor<'a>,
@@ -1012,52 +1136,6 @@ pub fn walk_assoc_item_constraint<'a, V: Visitor<'a>>(
V::Result::output()
}
pub fn walk_pat<'a, V: Visitor<'a>>(visitor: &mut V, pattern: &'a Pat) -> V::Result {
let Pat { id, kind, span: _, tokens: _ } = pattern;
match kind {
PatKind::TupleStruct(opt_qself, path, elems) => {
try_visit!(visitor.visit_qself(opt_qself));
try_visit!(visitor.visit_path(path, *id));
walk_list!(visitor, visit_pat, elems);
}
PatKind::Path(opt_qself, path) => {
try_visit!(visitor.visit_qself(opt_qself));
try_visit!(visitor.visit_path(path, *id))
}
PatKind::Struct(opt_qself, path, fields, _rest) => {
try_visit!(visitor.visit_qself(opt_qself));
try_visit!(visitor.visit_path(path, *id));
walk_list!(visitor, visit_pat_field, fields);
}
PatKind::Box(subpattern) | PatKind::Deref(subpattern) | PatKind::Paren(subpattern) => {
try_visit!(visitor.visit_pat(subpattern));
}
PatKind::Ref(subpattern, _ /*mutbl*/) => {
try_visit!(visitor.visit_pat(subpattern));
}
PatKind::Ident(_bmode, ident, optional_subpattern) => {
try_visit!(visitor.visit_ident(ident));
visit_opt!(visitor, visit_pat, optional_subpattern);
}
PatKind::Expr(expression) => try_visit!(visitor.visit_expr(expression)),
PatKind::Range(lower_bound, upper_bound, _end) => {
visit_opt!(visitor, visit_expr, lower_bound);
visit_opt!(visitor, visit_expr, upper_bound);
}
PatKind::Guard(subpattern, guard_condition) => {
try_visit!(visitor.visit_pat(subpattern));
try_visit!(visitor.visit_expr(guard_condition));
}
PatKind::Missing | PatKind::Wild | PatKind::Rest | PatKind::Never => {}
PatKind::Err(_guar) => {}
PatKind::Tuple(elems) | PatKind::Slice(elems) | PatKind::Or(elems) => {
walk_list!(visitor, visit_pat, elems);
}
PatKind::MacCall(mac) => try_visit!(visitor.visit_mac_call(mac)),
}
V::Result::output()
}
pub fn walk_param_bound<'a, V: Visitor<'a>>(visitor: &mut V, bound: &'a GenericBound) -> V::Result {
match bound {
GenericBound::Trait(trait_ref) => visitor.visit_poly_trait_ref(trait_ref),
@@ -1075,7 +1153,10 @@ pub fn walk_precise_capturing_arg<'a, V: Visitor<'a>>(
) -> V::Result {
match arg {
PreciseCapturingArg::Lifetime(lt) => visitor.visit_lifetime(lt, LifetimeCtxt::GenericArg),
PreciseCapturingArg::Arg(path, id) => visitor.visit_path(path, *id),
PreciseCapturingArg::Arg(path, id) => {
try_visit!(visitor.visit_id(*id));
visitor.visit_path(path)
}
}
}
@@ -1216,11 +1297,9 @@ pub fn walk_fn<'a, V: Visitor<'a>>(visitor: &mut V, kind: FnKind<'a>) -> V::Resu
V::Result::output()
}
pub fn walk_struct_def<'a, V: Visitor<'a>>(
visitor: &mut V,
struct_definition: &'a VariantData,
) -> V::Result {
walk_list!(visitor, visit_field_def, struct_definition.fields());
pub fn walk_variant_data<'a, V: Visitor<'a>>(visitor: &mut V, data: &'a VariantData) -> V::Result {
visit_opt!(visitor, visit_id, data.ctor_node_id());
walk_list!(visitor, visit_field_def, data.fields());
V::Result::output()
}
@@ -1235,12 +1314,6 @@ pub fn walk_field_def<'a, V: Visitor<'a>>(visitor: &mut V, field: &'a FieldDef)
V::Result::output()
}
pub fn walk_block<'a, V: Visitor<'a>>(visitor: &mut V, block: &'a Block) -> V::Result {
let Block { stmts, id: _, rules: _, span: _, tokens: _ } = block;
walk_list!(visitor, visit_stmt, stmts);
V::Result::output()
}
pub fn walk_stmt<'a, V: Visitor<'a>>(visitor: &mut V, statement: &'a Stmt) -> V::Result {
let Stmt { id: _, kind, span: _ } = statement;
match kind {
@@ -1259,12 +1332,7 @@ pub fn walk_stmt<'a, V: Visitor<'a>>(visitor: &mut V, statement: &'a Stmt) -> V:
pub fn walk_mac<'a, V: Visitor<'a>>(visitor: &mut V, mac: &'a MacCall) -> V::Result {
let MacCall { path, args: _ } = mac;
visitor.visit_path(path, DUMMY_NODE_ID)
}
pub fn walk_anon_const<'a, V: Visitor<'a>>(visitor: &mut V, constant: &'a AnonConst) -> V::Result {
let AnonConst { id: _, value } = constant;
visitor.visit_expr(value)
visitor.visit_path(path)
}
pub fn walk_inline_asm<'a, V: Visitor<'a>>(visitor: &mut V, asm: &'a InlineAsm) -> V::Result {
@@ -1304,7 +1372,8 @@ pub fn walk_inline_asm_sym<'a, V: Visitor<'a>>(
InlineAsmSym { id, qself, path }: &'a InlineAsmSym,
) -> V::Result {
try_visit!(visitor.visit_qself(qself));
visitor.visit_path(path, *id)
try_visit!(visitor.visit_id(*id));
visitor.visit_path(path)
}
pub fn walk_format_args<'a, V: Visitor<'a>>(visitor: &mut V, fmt: &'a FormatArgs) -> V::Result {
@@ -1336,7 +1405,8 @@ pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) -> V
ExprKind::Struct(se) => {
let StructExpr { qself, path, fields, rest } = &**se;
try_visit!(visitor.visit_qself(qself));
try_visit!(visitor.visit_path(path, *id));
try_visit!(visitor.visit_id(*id));
try_visit!(visitor.visit_path(path));
walk_list!(visitor, visit_expr_field, fields);
match rest {
StructRest::Base(expr) => try_visit!(visitor.visit_expr(expr)),
@@ -1446,7 +1516,8 @@ pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) -> V
ExprKind::Underscore => {}
ExprKind::Path(maybe_qself, path) => {
try_visit!(visitor.visit_qself(maybe_qself));
try_visit!(visitor.visit_path(path, *id));
try_visit!(visitor.visit_id(*id));
try_visit!(visitor.visit_path(path));
}
ExprKind::Break(opt_label, opt_expr) => {
visit_opt!(visitor, visit_label, opt_label);
@@ -1509,7 +1580,8 @@ pub fn walk_vis<'a, V: Visitor<'a>>(visitor: &mut V, vis: &'a Visibility) -> V::
let Visibility { kind, span: _, tokens: _ } = vis;
match kind {
VisibilityKind::Restricted { path, id, shorthand: _ } => {
try_visit!(visitor.visit_path(path, *id));
try_visit!(visitor.visit_id(*id));
try_visit!(visitor.visit_path(path));
}
VisibilityKind::Public | VisibilityKind::Inherited => {}
}
@@ -1522,7 +1594,7 @@ pub fn walk_attribute<'a, V: Visitor<'a>>(visitor: &mut V, attr: &'a Attribute)
AttrKind::Normal(normal) => {
let NormalAttr { item, tokens: _ } = &**normal;
let AttrItem { unsafety: _, path, args, tokens: _ } = item;
try_visit!(visitor.visit_path(path, DUMMY_NODE_ID));
try_visit!(visitor.visit_path(path));
try_visit!(walk_attr_args(visitor, args));
}
AttrKind::DocComment(_kind, _sym) => {}

View File

@@ -446,13 +446,7 @@ impl<'a> SelfResolver<'a> {
}
impl<'ast, 'a> Visitor<'ast> for SelfResolver<'a> {
fn visit_path(&mut self, path: &'ast Path, id: NodeId) {
fn visit_id(&mut self, id: NodeId) {
self.try_replace_id(id);
visit::walk_path(self, path);
}
fn visit_path_segment(&mut self, seg: &'ast PathSegment) {
self.try_replace_id(seg.id);
visit::walk_path_segment(self, seg);
}
}

View File

@@ -74,8 +74,8 @@ impl<'ecx, 'tcx, T: EarlyLintPass> EarlyContextAndPass<'ecx, 'tcx, T> {
impl<'ast, 'ecx, 'tcx, T: EarlyLintPass> ast_visit::Visitor<'ast>
for EarlyContextAndPass<'ecx, 'tcx, T>
{
fn visit_coroutine_kind(&mut self, coroutine_kind: &'ast ast::CoroutineKind) -> Self::Result {
self.check_id(coroutine_kind.closure_id());
fn visit_id(&mut self, id: rustc_ast::NodeId) {
self.check_id(id);
}
fn visit_param(&mut self, param: &'ast ast::Param) {
@@ -101,7 +101,6 @@ impl<'ast, 'ecx, 'tcx, T: EarlyLintPass> ast_visit::Visitor<'ast>
fn visit_pat(&mut self, p: &'ast ast::Pat) {
lint_callback!(self, check_pat, p);
self.check_id(p.id);
ast_visit::walk_pat(self, p);
lint_callback!(self, check_pat_post, p);
}
@@ -112,11 +111,6 @@ impl<'ast, 'ecx, 'tcx, T: EarlyLintPass> ast_visit::Visitor<'ast>
});
}
fn visit_anon_const(&mut self, c: &'ast ast::AnonConst) {
self.check_id(c.id);
ast_visit::walk_anon_const(self, c);
}
fn visit_expr(&mut self, e: &'ast ast::Expr) {
self.with_lint_attrs(e.id, &e.attrs, |cx| {
lint_callback!(cx, check_expr, e);
@@ -157,13 +151,6 @@ impl<'ast, 'ecx, 'tcx, T: EarlyLintPass> ast_visit::Visitor<'ast>
ast_visit::walk_fn(self, fk);
}
fn visit_variant_data(&mut self, s: &'ast ast::VariantData) {
if let Some(ctor_node_id) = s.ctor_node_id() {
self.check_id(ctor_node_id);
}
ast_visit::walk_struct_def(self, s);
}
fn visit_field_def(&mut self, s: &'ast ast::FieldDef) {
self.with_lint_attrs(s.id, &s.attrs, |cx| {
ast_visit::walk_field_def(cx, s);
@@ -179,7 +166,6 @@ impl<'ast, 'ecx, 'tcx, T: EarlyLintPass> ast_visit::Visitor<'ast>
fn visit_ty(&mut self, t: &'ast ast::Ty) {
lint_callback!(self, check_ty, t);
self.check_id(t.id);
ast_visit::walk_ty(self, t);
}
@@ -196,7 +182,6 @@ impl<'ast, 'ecx, 'tcx, T: EarlyLintPass> ast_visit::Visitor<'ast>
fn visit_block(&mut self, b: &'ast ast::Block) {
lint_callback!(self, check_block, b);
self.check_id(b.id);
ast_visit::walk_block(self, b);
}
@@ -257,29 +242,13 @@ impl<'ast, 'ecx, 'tcx, T: EarlyLintPass> ast_visit::Visitor<'ast>
});
}
fn visit_lifetime(&mut self, lt: &'ast ast::Lifetime, _: ast_visit::LifetimeCtxt) {
self.check_id(lt.id);
ast_visit::walk_lifetime(self, lt);
}
fn visit_path(&mut self, p: &'ast ast::Path, id: ast::NodeId) {
self.check_id(id);
ast_visit::walk_path(self, p);
}
fn visit_path_segment(&mut self, s: &'ast ast::PathSegment) {
self.check_id(s.id);
ast_visit::walk_path_segment(self, s);
}
fn visit_attribute(&mut self, attr: &'ast ast::Attribute) {
lint_callback!(self, check_attribute, attr);
ast_visit::walk_attribute(self, attr);
}
fn visit_macro_def(&mut self, mac: &'ast ast::MacroDef, id: ast::NodeId) {
fn visit_macro_def(&mut self, mac: &'ast ast::MacroDef) {
lint_callback!(self, check_mac_def, mac);
self.check_id(id);
}
fn visit_mac_call(&mut self, mac: &'ast ast::MacCall) {

View File

@@ -126,7 +126,7 @@ fn lints_that_dont_need_to_run(tcx: TyCtxt<'_>, (): ()) -> UnordSet<LintId> {
.filter(|lint| {
// Lints that show up in future-compat reports must always be run.
let has_future_breakage =
lint.future_incompatible.is_some_and(|fut| fut.reason.has_future_breakage());
lint.future_incompatible.is_some_and(|fut| fut.report_in_deps);
!has_future_breakage && !lint.eval_always
})
.filter(|lint| {

View File

@@ -178,8 +178,9 @@ declare_lint! {
Warn,
"applying forbid to lint-groups",
@future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::FutureReleaseErrorReportInDeps,
reason: FutureIncompatibilityReason::FutureReleaseError,
reference: "issue #81670 <https://github.com/rust-lang/rust/issues/81670>",
report_in_deps: true,
};
}
@@ -214,7 +215,7 @@ declare_lint! {
Deny,
"ill-formed attribute inputs that were previously accepted and used in practice",
@future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::FutureReleaseErrorDontReportInDeps,
reason: FutureIncompatibilityReason::FutureReleaseError,
reference: "issue #57571 <https://github.com/rust-lang/rust/issues/57571>",
};
crate_level_only
@@ -251,8 +252,9 @@ declare_lint! {
Deny,
"conflicts between `#[repr(..)]` hints that were previously accepted and used in practice",
@future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::FutureReleaseErrorReportInDeps,
reason: FutureIncompatibilityReason::FutureReleaseError,
reference: "issue #68585 <https://github.com/rust-lang/rust/issues/68585>",
report_in_deps: true,
};
}
@@ -1240,8 +1242,9 @@ declare_lint! {
Deny,
"detect public re-exports of private extern crates",
@future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::FutureReleaseErrorReportInDeps,
reason: FutureIncompatibilityReason::FutureReleaseError,
reference: "issue #127909 <https://github.com/rust-lang/rust/issues/127909>",
report_in_deps: true,
};
}
@@ -1270,8 +1273,9 @@ declare_lint! {
Deny,
"type parameter default erroneously allowed in invalid location",
@future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::FutureReleaseErrorReportInDeps,
reason: FutureIncompatibilityReason::FutureReleaseError,
reference: "issue #36887 <https://github.com/rust-lang/rust/issues/36887>",
report_in_deps: true,
};
}
@@ -1409,7 +1413,7 @@ declare_lint! {
Deny,
"patterns in functions without body were erroneously allowed",
@future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::FutureReleaseErrorDontReportInDeps,
reason: FutureIncompatibilityReason::FutureReleaseError,
reference: "issue #35203 <https://github.com/rust-lang/rust/issues/35203>",
};
}
@@ -1453,8 +1457,9 @@ declare_lint! {
Deny,
"detects missing fragment specifiers in unused `macro_rules!` patterns",
@future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::FutureReleaseErrorReportInDeps,
reason: FutureIncompatibilityReason::FutureReleaseError,
reference: "issue #40107 <https://github.com/rust-lang/rust/issues/40107>",
report_in_deps: true,
};
}
@@ -1495,7 +1500,7 @@ declare_lint! {
Warn,
"detects generic lifetime arguments in path segments with late bound lifetime parameters",
@future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::FutureReleaseErrorDontReportInDeps,
reason: FutureIncompatibilityReason::FutureReleaseError,
reference: "issue #42868 <https://github.com/rust-lang/rust/issues/42868>",
};
}
@@ -2122,8 +2127,9 @@ declare_lint! {
Deny,
"detects proc macro derives using inaccessible names from parent modules",
@future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::FutureReleaseErrorReportInDeps,
reason: FutureIncompatibilityReason::FutureReleaseError,
reference: "issue #83583 <https://github.com/rust-lang/rust/issues/83583>",
report_in_deps: true,
};
}
@@ -2225,7 +2231,7 @@ declare_lint! {
"macro-expanded `macro_export` macros from the current crate \
cannot be referred to by absolute paths",
@future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::FutureReleaseErrorDontReportInDeps,
reason: FutureIncompatibilityReason::FutureReleaseError,
reference: "issue #52234 <https://github.com/rust-lang/rust/issues/52234>",
};
crate_level_only
@@ -2346,7 +2352,7 @@ declare_lint! {
Deny,
"ambiguous associated items",
@future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::FutureReleaseErrorDontReportInDeps,
reason: FutureIncompatibilityReason::FutureReleaseError,
reference: "issue #57644 <https://github.com/rust-lang/rust/issues/57644>",
};
}
@@ -2362,8 +2368,9 @@ declare_lint! {
Deny,
"a feature gate that doesn't break dependent crates",
@future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::FutureReleaseErrorReportInDeps,
reason: FutureIncompatibilityReason::FutureReleaseError,
reference: "issue #64266 <https://github.com/rust-lang/rust/issues/64266>",
report_in_deps: true,
};
}
@@ -2674,7 +2681,7 @@ declare_lint! {
Warn,
"detects a generic constant is used in a type without a emitting a warning",
@future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::FutureReleaseErrorDontReportInDeps,
reason: FutureIncompatibilityReason::FutureReleaseError,
reference: "issue #76200 <https://github.com/rust-lang/rust/issues/76200>",
};
}
@@ -2733,7 +2740,7 @@ declare_lint! {
Warn,
"uninhabited static",
@future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::FutureReleaseErrorDontReportInDeps,
reason: FutureIncompatibilityReason::FutureReleaseError,
reference: "issue #74840 <https://github.com/rust-lang/rust/issues/74840>",
};
}
@@ -2866,7 +2873,7 @@ declare_lint! {
Warn,
"detect unsupported use of `Self` from outer item",
@future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::FutureReleaseErrorDontReportInDeps,
reason: FutureIncompatibilityReason::FutureReleaseError,
reference: "issue #124186 <https://github.com/rust-lang/rust/issues/124186>",
};
}
@@ -2912,8 +2919,9 @@ declare_lint! {
Warn,
"trailing semicolon in macro body used as expression",
@future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::FutureReleaseErrorReportInDeps,
reason: FutureIncompatibilityReason::FutureReleaseError,
reference: "issue #79813 <https://github.com/rust-lang/rust/issues/79813>",
report_in_deps: true,
};
}
@@ -2959,7 +2967,7 @@ declare_lint! {
Warn,
"detects derive helper attributes that are used before they are introduced",
@future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::FutureReleaseErrorDontReportInDeps,
reason: FutureIncompatibilityReason::FutureReleaseError,
reference: "issue #79202 <https://github.com/rust-lang/rust/issues/79202>",
};
}
@@ -3126,7 +3134,7 @@ declare_lint! {
Warn,
"transparent type contains an external ZST that is marked #[non_exhaustive] or contains private fields",
@future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::FutureReleaseErrorDontReportInDeps,
reason: FutureIncompatibilityReason::FutureReleaseError,
reference: "issue #78586 <https://github.com/rust-lang/rust/issues/78586>",
};
}
@@ -3177,7 +3185,7 @@ declare_lint! {
Warn,
"unstable syntax can change at any point in the future, causing a hard error!",
@future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::FutureReleaseErrorDontReportInDeps,
reason: FutureIncompatibilityReason::FutureReleaseError,
reference: "issue #65860 <https://github.com/rust-lang/rust/issues/65860>",
};
}
@@ -3685,8 +3693,9 @@ declare_lint! {
Warn,
"use of unsupported calling convention for function pointer",
@future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::FutureReleaseErrorReportInDeps,
reason: FutureIncompatibilityReason::FutureReleaseError,
reference: "issue #130260 <https://github.com/rust-lang/rust/issues/130260>",
report_in_deps: true,
};
}
@@ -4368,7 +4377,7 @@ declare_lint! {
Warn,
"detects certain glob imports that require reporting an ambiguity error",
@future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::FutureReleaseErrorDontReportInDeps,
reason: FutureIncompatibilityReason::FutureReleaseError,
reference: "issue #114095 <https://github.com/rust-lang/rust/issues/114095>",
};
}
@@ -4523,7 +4532,7 @@ declare_lint! {
Deny,
"elided lifetimes cannot be used in associated constants in impls",
@future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::FutureReleaseErrorDontReportInDeps,
reason: FutureIncompatibilityReason::FutureReleaseError,
reference: "issue #115010 <https://github.com/rust-lang/rust/issues/115010>",
};
}
@@ -4570,7 +4579,7 @@ declare_lint! {
Warn,
"detects certain macro bindings that should not be re-exported",
@future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::FutureReleaseErrorDontReportInDeps,
reason: FutureIncompatibilityReason::FutureReleaseError,
reference: "issue #120192 <https://github.com/rust-lang/rust/issues/120192>",
};
}
@@ -4635,7 +4644,7 @@ declare_lint! {
Warn,
"impl contains type parameters that are not covered",
@future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::FutureReleaseErrorDontReportInDeps,
reason: FutureIncompatibilityReason::FutureReleaseError,
reference: "issue #124559 <https://github.com/rust-lang/rust/issues/124559>",
};
}
@@ -4799,7 +4808,7 @@ declare_lint! {
Warn,
"detects out of scope calls to `macro_rules` in key-value attributes",
@future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::FutureReleaseErrorDontReportInDeps,
reason: FutureIncompatibilityReason::FutureReleaseError,
reference: "issue #124535 <https://github.com/rust-lang/rust/issues/124535>",
};
}
@@ -5040,8 +5049,9 @@ declare_lint! {
Warn,
"detects code relying on rustc's non-spec-compliant wasm C ABI",
@future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::FutureReleaseErrorReportInDeps,
reason: FutureIncompatibilityReason::FutureReleaseError,
reference: "issue #138762 <https://github.com/rust-lang/rust/issues/138762>",
report_in_deps: true,
};
}
@@ -5081,7 +5091,8 @@ declare_lint! {
Warn,
"detects code that could be affected by ABI issues on aarch64 softfloat targets",
@future_incompatible = FutureIncompatibleInfo {
reason: FutureIncompatibilityReason::FutureReleaseErrorReportInDeps,
reason: FutureIncompatibilityReason::FutureReleaseError,
reference: "issue #134375 <https://github.com/rust-lang/rust/issues/134375>",
report_in_deps: true,
};
}

View File

@@ -361,6 +361,18 @@ pub struct FutureIncompatibleInfo {
/// Set to false for lints that already include a more detailed
/// explanation.
pub explain_reason: bool,
/// If set to `true`, this will make future incompatibility warnings show up in cargo's
/// reports.
///
/// When a future incompatibility warning is first inroduced, set this to `false`
/// (or, rather, don't override the default). This allows crate developers an opportunity
/// to fix the warning before blasting all dependents with a warning they can't fix
/// (dependents have to wait for a new release of the affected crate to be published).
///
/// After a lint has been in this state for a while, consider setting this to true, so it
/// warns for everyone. It is a good signal that it is ready if you can determine that all
/// or most affected crates on crates.io have been updated.
pub report_in_deps: bool,
}
/// The reason for future incompatibility
@@ -380,46 +392,24 @@ pub struct FutureIncompatibleInfo {
pub enum FutureIncompatibilityReason {
/// This will be an error in a future release for all editions
///
/// This will *not* show up in cargo's future breakage report.
/// The warning will hence only be seen in local crates, not in dependencies.
///
/// Choose this variant when you are first introducing a "future
/// incompatible" warning that is intended to eventually be fixed in the
/// future. This allows crate developers an opportunity to fix the warning
/// before blasting all dependents with a warning they can't fix
/// (dependents have to wait for a new release of the affected crate to be
/// published).
/// future.
///
/// After a lint has been in this state for a while, consider graduating
/// it to [`FutureIncompatibilityReason::FutureReleaseErrorReportInDeps`].
FutureReleaseErrorDontReportInDeps,
/// This will be an error in a future release, and
/// Cargo should create a report even for dependencies
///
/// This is the *only* reason that will make future incompatibility warnings show up in cargo's
/// reports. All other future incompatibility warnings are not visible when they occur in a
/// dependency.
///
/// Choose this variant after the lint has been sitting in the
/// [`FutureIncompatibilityReason::FutureReleaseErrorDontReportInDeps`]
/// state for a while, and you feel like it is ready to graduate to
/// warning everyone. It is a good signal that it is ready if you can
/// determine that all or most affected crates on crates.io have been
/// updated.
/// After a lint has been in this state for a while and you feel like it is ready to graduate
/// to warning everyone, consider setting [`FutureIncompatibleInfo::report_in_deps`] to true.
/// (see it's documentation for more guidance)
///
/// After some period of time, lints with this variant can be turned into
/// hard errors (and the lint removed). Preferably when there is some
/// confidence that the number of impacted projects is very small (few
/// should have a broken dependency in their dependency tree).
///
/// [`EditionAndFutureReleaseError`]: FutureIncompatibilityReason::EditionAndFutureReleaseError
FutureReleaseErrorReportInDeps,
FutureReleaseError,
/// Code that changes meaning in some way in a
/// future release.
///
/// Choose this variant when the semantics of existing code is changing,
/// (as opposed to
/// [`FutureIncompatibilityReason::FutureReleaseErrorDontReportInDeps`],
/// (as opposed to [`FutureIncompatibilityReason::FutureReleaseError`],
/// which is for when code is going to be rejected in the future).
FutureReleaseSemanticsChange,
/// Previously accepted code that will become an
@@ -454,13 +444,12 @@ pub enum FutureIncompatibilityReason {
/// This will be an error in the provided edition *and* in a future
/// release.
///
/// This variant a combination of [`FutureReleaseErrorDontReportInDeps`]
/// and [`EditionError`]. This is useful in rare cases when we
/// want to have "preview" of a breaking change in an edition, but do a
/// breaking change later on all editions anyway.
/// This variant a combination of [`FutureReleaseError`] and [`EditionError`].
/// This is useful in rare cases when we want to have "preview" of a breaking
/// change in an edition, but do a breaking change later on all editions anyway.
///
/// [`EditionError`]: FutureIncompatibilityReason::EditionError
/// [`FutureReleaseErrorDontReportInDeps`]: FutureIncompatibilityReason::FutureReleaseErrorDontReportInDeps
/// [`FutureReleaseError`]: FutureIncompatibilityReason::FutureReleaseError
EditionAndFutureReleaseError(Edition),
/// This will change meaning in the provided edition *and* in a future
/// release.
@@ -478,7 +467,7 @@ pub enum FutureIncompatibilityReason {
/// Choose this variant if the built-in text of the diagnostic of the
/// other variants doesn't match your situation. This is behaviorally
/// equivalent to
/// [`FutureIncompatibilityReason::FutureReleaseErrorDontReportInDeps`].
/// [`FutureIncompatibilityReason::FutureReleaseError`].
Custom(&'static str),
}
@@ -490,34 +479,20 @@ impl FutureIncompatibilityReason {
| Self::EditionAndFutureReleaseError(e)
| Self::EditionAndFutureReleaseSemanticsChange(e) => Some(e),
FutureIncompatibilityReason::FutureReleaseErrorDontReportInDeps
| FutureIncompatibilityReason::FutureReleaseErrorReportInDeps
FutureIncompatibilityReason::FutureReleaseError
| FutureIncompatibilityReason::FutureReleaseSemanticsChange
| FutureIncompatibilityReason::Custom(_) => None,
}
}
pub fn has_future_breakage(self) -> bool {
match self {
FutureIncompatibilityReason::FutureReleaseErrorReportInDeps => true,
FutureIncompatibilityReason::FutureReleaseErrorDontReportInDeps
| FutureIncompatibilityReason::FutureReleaseSemanticsChange
| FutureIncompatibilityReason::EditionError(_)
| FutureIncompatibilityReason::EditionSemanticsChange(_)
| FutureIncompatibilityReason::EditionAndFutureReleaseError(_)
| FutureIncompatibilityReason::EditionAndFutureReleaseSemanticsChange(_)
| FutureIncompatibilityReason::Custom(_) => false,
}
}
}
impl FutureIncompatibleInfo {
pub const fn default_fields_for_macro() -> Self {
FutureIncompatibleInfo {
reference: "",
reason: FutureIncompatibilityReason::FutureReleaseErrorDontReportInDeps,
reason: FutureIncompatibilityReason::FutureReleaseError,
explain_reason: true,
report_in_deps: false,
}
}
}

View File

@@ -228,10 +228,10 @@ fn main() {
let mut cmd = Command::new(&llvm_config);
cmd.arg(llvm_link_arg).arg("--libs");
// Don't link system libs if cross-compiling unless targeting Windows.
// Don't link system libs if cross-compiling unless targeting Windows from Windows host.
// On Windows system DLLs aren't linked directly, instead import libraries are used.
// These import libraries are independent of the host.
if !is_crossed || target.contains("windows") {
if !is_crossed || target.contains("windows") && host.contains("windows") {
cmd.arg("--system-libs");
}

View File

@@ -299,7 +299,7 @@ pub fn lint_level(
let has_future_breakage = future_incompatible.map_or(
// Default allow lints trigger too often for testing.
sess.opts.unstable_opts.future_incompat_test && lint.default_level != Level::Allow,
|incompat| incompat.reason.has_future_breakage(),
|incompat| incompat.report_in_deps,
);
// Convert lint level to error level.
@@ -370,8 +370,7 @@ pub fn lint_level(
if let Some(future_incompatible) = future_incompatible {
let explanation = match future_incompatible.reason {
FutureIncompatibilityReason::FutureReleaseErrorDontReportInDeps
| FutureIncompatibilityReason::FutureReleaseErrorReportInDeps => {
FutureIncompatibilityReason::FutureReleaseError => {
"this was previously accepted by the compiler but is being phased out; \
it will become a hard error in a future release!"
.to_owned()

View File

@@ -874,7 +874,7 @@ impl<'ra: 'ast, 'ast, 'tcx> Visitor<'ast> for LateResolutionVisitor<'_, 'ast, 'r
kind: LifetimeBinderKind::PolyTrait,
span,
},
|this| this.visit_path(path, ty.id),
|this| this.visit_path(path),
);
} else {
visit::walk_ty(self, ty)
@@ -1265,7 +1265,7 @@ impl<'ra: 'ast, 'ast, 'tcx> Visitor<'ast> for LateResolutionVisitor<'_, 'ast, 'r
AnonConstKind::ConstArg(IsRepeatExpr::No),
|this| {
this.smart_resolve_path(ty.id, &None, path, PathSource::Expr(None));
this.visit_path(path, ty.id);
this.visit_path(path);
},
);
@@ -3640,7 +3640,7 @@ impl<'a, 'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> {
if let Some(qself) = &delegation.qself {
self.visit_ty(&qself.ty);
}
self.visit_path(&delegation.path, delegation.id);
self.visit_path(&delegation.path);
let Some(body) = &delegation.body else { return };
self.with_rib(ValueNS, RibKind::FnOrCoroutine, |this| {
let span = delegation.path.segments.last().unwrap().ident.span;
@@ -4867,7 +4867,7 @@ impl<'a, 'ast, 'ra: 'ast, 'tcx> LateResolutionVisitor<'a, 'ast, 'ra, 'tcx> {
if let Some(qself) = &se.qself {
self.visit_ty(&qself.ty);
}
self.visit_path(&se.path, expr.id);
self.visit_path(&se.path);
walk_list!(self, resolve_expr_field, &se.fields, expr);
match &se.rest {
StructRest::Base(expr) => self.visit_expr(expr),

View File

@@ -58,7 +58,7 @@
//!
//! There are unit tests but they are woefully inadequate at ensuring correctness, they only cover
//! a small percentage of possible errors. Far more extensive tests are located in the directory
//! `src/etc/test-float-parse` as a Rust program.
//! `src/tools/test-float-parse` as a Rust program.
//!
//! A note on integer overflow: Many parts of this file perform arithmetic with the decimal
//! exponent `e`. Primarily, we shift the decimal point around: Before the first decimal digit,

View File

@@ -943,7 +943,7 @@ impl f64 {
/// This returns NaN when *either* argument is NaN, as opposed to
/// [`f64::max`] which only returns NaN when *both* arguments are NaN.
///
/// ```
/// ```ignore-arm-unknown-linux-gnueabihf (see https://github.com/rust-lang/rust/issues/141087)
/// #![feature(float_minimum_maximum)]
/// let x = 1.0_f64;
/// let y = 2.0_f64;
@@ -970,7 +970,7 @@ impl f64 {
/// This returns NaN when *either* argument is NaN, as opposed to
/// [`f64::min`] which only returns NaN when *both* arguments are NaN.
///
/// ```
/// ```ignore-arm-unknown-linux-gnueabihf (see https://github.com/rust-lang/rust/issues/141087)
/// #![feature(float_minimum_maximum)]
/// let x = 1.0_f64;
/// let y = 2.0_f64;

View File

@@ -51,7 +51,7 @@ check-aux:
$(Q)$(BOOTSTRAP) test --stage 2 \
src/tools/cargo \
src/tools/cargotest \
src/etc/test-float-parse \
src/tools/test-float-parse \
$(BOOTSTRAP_ARGS)
# Run standard library tests in Miri.
$(Q)MIRIFLAGS="-Zmiri-strict-provenance" \

View File

@@ -528,7 +528,7 @@ tool_check_step!(Miri { path: "src/tools/miri" });
tool_check_step!(CargoMiri { path: "src/tools/miri/cargo-miri" });
tool_check_step!(Rustfmt { path: "src/tools/rustfmt" });
tool_check_step!(MiroptTestTools { path: "src/tools/miropt-test-tools" });
tool_check_step!(TestFloatParse { path: "src/etc/test-float-parse" });
tool_check_step!(TestFloatParse { path: "src/tools/test-float-parse" });
tool_check_step!(FeaturesStatusDump { path: "src/tools/features-status-dump" });
tool_check_step!(Bootstrap { path: "src/bootstrap", default: false });

View File

@@ -355,7 +355,7 @@ lint_any!(
Rustfmt, "src/tools/rustfmt", "rustfmt";
RustInstaller, "src/tools/rust-installer", "rust-installer";
Tidy, "src/tools/tidy", "tidy";
TestFloatParse, "src/etc/test-float-parse", "test-float-parse";
TestFloatParse, "src/tools/test-float-parse", "test-float-parse";
);
#[derive(Debug, Clone, PartialEq, Eq, Hash)]

View File

@@ -3544,7 +3544,7 @@ impl Step for CodegenGCC {
}
/// Test step that does two things:
/// - Runs `cargo test` for the `src/etc/test-float-parse` tool.
/// - Runs `cargo test` for the `src/tools/test-float-parse` tool.
/// - Invokes the `test-float-parse` tool to test the standard library's
/// float parsing routines.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -3559,7 +3559,7 @@ impl Step for TestFloatParse {
const DEFAULT: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
run.path("src/etc/test-float-parse")
run.path("src/tools/test-float-parse")
}
fn make_run(run: RunConfig<'_>) {

View File

@@ -1269,7 +1269,7 @@ impl Step for TestFloatParse {
const DEFAULT: bool = false;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
run.path("src/etc/test-float-parse")
run.path("src/tools/test-float-parse")
}
fn run(self, builder: &Builder<'_>) -> ToolBuildResult {
@@ -1281,7 +1281,7 @@ impl Step for TestFloatParse {
target: bootstrap_host,
tool: "test-float-parse",
mode: Mode::ToolStd,
path: "src/etc/test-float-parse",
path: "src/tools/test-float-parse",
source_type: SourceType::InTree,
extra_features: Vec::new(),
allow_features: Self::ALLOW_FEATURES,

View File

@@ -192,6 +192,8 @@ to save information after compiling a crate to be reused when recompiling the
crate, improving re-compile times. This takes a path to a directory where
incremental files will be stored.
Using incremental compilation inhibits certain optimizations (for example by increasing the amount of codegen units) and is therefore not recommend for release builds.
## inline-threshold
This option is deprecated and does nothing.
@@ -213,6 +215,8 @@ This flag lets you append a single extra argument to the linker invocation.
"Append" is significant; you can pass this flag multiple times to add multiple arguments.
On Unix-like targets that use `cc` as the linker driver, use `-Clink-arg=-Wl,$ARG` to pass an argument to the actual linker.
## link-args
This flag lets you append multiple extra arguments to the linker invocation. The
@@ -248,6 +252,10 @@ path to the linker executable. If this flag is not specified, the linker will
be inferred based on the target. See also the [linker-flavor](#linker-flavor)
flag for another way to specify the linker.
Note that on Unix-like targets (for example, `*-unknown-linux-gnu` or `*-unknown-freebsd`)
the C compiler (for example `cc` or `clang`) is used as the "linker" here, serving as a linker driver.
It will invoke the actual linker with all the necessary flags to be able to link against the system libraries like libc.
## linker-flavor
This flag controls the linker flavor used by `rustc`. If a linker is given with
@@ -301,6 +309,12 @@ The list must be separated by spaces.
Pass `--help` to see a list of options.
<div class="warning">
Because this flag directly talks to LLVM, it is not subject to the usual stability guarantees of rustc's CLI interface.
</div>
## lto
This flag controls whether LLVM uses [link time
@@ -315,6 +329,7 @@ linking time. It takes one of the following values:
LTO](http://blog.llvm.org/2016/06/thinlto-scalable-and-incremental-lto.html).
This is similar to "fat", but takes substantially less time to run while
still achieving performance gains similar to "fat".
For larger projects like the Rust compiler, ThinLTO can even result in better performance than fat LTO.
If `-C lto` is not specified, then the compiler will attempt to perform "thin
local LTO" which performs "thin" LTO on the local crate only across its
@@ -343,6 +358,8 @@ between two different versions of the same crate being linked.
This flag tells the pass manager to use an empty list of passes, instead of the
usual pre-populated list of passes.
When combined with `-O --emit llvm-ir`, it can be used to see the optimized LLVM IR emitted by rustc before any optimizations are applied by LLVM.
## no-redzone
This flag allows you to disable [the
@@ -379,7 +396,7 @@ This flag controls the optimization level.
* `2`: some optimizations.
* `3`: all optimizations.
* `s`: optimize for binary size.
* `z`: optimize for binary size, but also turn off loop vectorization.
* `z`: optimize for binary size, but more aggressively. Often results in larger binaries than `s`
Note: The [`-O` flag][option-o-optimize] is an alias for `-C opt-level=3`.
@@ -407,6 +424,9 @@ This option lets you control what happens when the code panics.
If not specified, the default depends on the target.
If any crate in the crate graph uses `abort`, the final binary (`bin`, `dylib`, `cdylib`, `staticlib`) must also use `abort`.
If `std` is used as a `dylib` with `unwind`, the final binary must also use `unwind`.
## passes
This flag can be used to add extra [LLVM
@@ -416,6 +436,12 @@ The list must be separated by spaces.
See also the [`no-prepopulate-passes`](#no-prepopulate-passes) flag.
<div class="warning">
Because this flag directly talks to LLVM, it not subject to the usual stability guarantees of rustc's CLI interface.
</div>
## prefer-dynamic
By default, `rustc` prefers to statically link dependencies. This option will
@@ -523,12 +549,30 @@ The list of passes should be separated by spaces.
## rpath
This flag controls whether [`rpath`](https://en.wikipedia.org/wiki/Rpath) is
enabled. It takes one of the following values:
This flag controls whether rustc sets an [`rpath`](https://en.wikipedia.org/wiki/Rpath) for the binary.
It takes one of the following values:
* `y`, `yes`, `on`, `true` or no value: enable rpath.
* `n`, `no`, `off` or `false`: disable rpath (the default).
This flag only does something on Unix-like platforms (Mach-O and ELF), it is ignored on other platforms.
If enabled, rustc will add output-relative (using `@load_path` on Mach-O and `$ORIGIN` on ELF respectively) rpaths to all `dylib` dependencies.
For example, for the following directory structure, with `libdep.so` being a `dylib` crate compiled with `-Cprefer-dynamic`:
```text
dep
|- libdep.so
a.rs
```
`rustc a.rs --extern dep=dep/libdep.so -Crpath` will, on x86-64 Linux, result in approximately the following `DT_RUNPATH`: `$ORIGIN/dep:$ORIGIN/$RELATIVE_PATH_TO_SYSROOT/lib/rustlib/x86_64-unknown-linux-gnu/lib` (where `RELATIVE_PATH_TO_SYSROOT` depends on the build directory location).
This is primarily useful for local development, to ensure that all the `dylib` dependencies can be found appropriately.
To set the rpath to a different value (which can be useful for distribution), `-Clink-arg` with a platform-specific linker argument can be used to set the rpath directly.
## save-temps
This flag controls whether temporary files generated during compilation are
@@ -545,6 +589,8 @@ point instructions in software. It takes one of the following values:
* `y`, `yes`, `on`, `true` or no value: use soft floats.
* `n`, `no`, `off` or `false`: use hardware floats (the default).
This flag only works on `*eabihf` targets and **is unsound and deprecated**.
## split-debuginfo
This option controls the emission of "split debuginfo" for debug information

View File

@@ -388,7 +388,7 @@ pub enum AssocItemConstraintKind {
/// Rustdoc makes no guarantees about the inner value of Id's. Applications
/// should treat them as opaque keys to lookup items, and avoid attempting
/// to parse them, or otherwise depend on any implementation details.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
// FIXME(aDotInTheVoid): Consider making this non-public in rustdoc-types.
pub struct Id(pub u32);

View File

@@ -42,6 +42,49 @@ impl FilePosition {
FilePositionWrapper { file_id: self.file_id.file_id(db), offset: self.offset }
}
}
impl From<FileRange> for HirFileRange {
fn from(value: FileRange) -> Self {
HirFileRange { file_id: value.file_id.into(), range: value.range }
}
}
impl From<FilePosition> for HirFilePosition {
fn from(value: FilePosition) -> Self {
HirFilePosition { file_id: value.file_id.into(), offset: value.offset }
}
}
impl FilePositionWrapper<span::FileId> {
pub fn with_edition(self, db: &dyn ExpandDatabase, edition: span::Edition) -> FilePosition {
FilePositionWrapper {
file_id: EditionedFileId::new(db, self.file_id, edition),
offset: self.offset,
}
}
}
impl FileRangeWrapper<span::FileId> {
pub fn with_edition(self, db: &dyn ExpandDatabase, edition: span::Edition) -> FileRange {
FileRangeWrapper {
file_id: EditionedFileId::new(db, self.file_id, edition),
range: self.range,
}
}
}
impl<T> InFileWrapper<span::FileId, T> {
pub fn with_edition(self, db: &dyn ExpandDatabase, edition: span::Edition) -> InRealFile<T> {
InRealFile { file_id: EditionedFileId::new(db, self.file_id, edition), value: self.value }
}
}
impl HirFileRange {
pub fn file_range(self) -> Option<FileRange> {
Some(FileRange { file_id: self.file_id.file_id()?, range: self.range })
}
}
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub struct FileRangeWrapper<FileKind> {
pub file_id: FileKind,
@@ -194,6 +237,9 @@ impl<FileId: Copy, N: AstNode> InFileWrapper<FileId, N> {
pub fn syntax(&self) -> InFileWrapper<FileId, &SyntaxNode> {
self.with_value(self.value.syntax())
}
pub fn node_file_range(&self) -> FileRangeWrapper<FileId> {
FileRangeWrapper { file_id: self.file_id, range: self.value.syntax().text_range() }
}
}
impl<FileId: Copy, N: AstNode> InFileWrapper<FileId, &N> {
@@ -204,9 +250,9 @@ impl<FileId: Copy, N: AstNode> InFileWrapper<FileId, &N> {
}
// region:specific impls
impl<SN: Borrow<SyntaxNode>> InRealFile<SN> {
pub fn file_range(&self) -> FileRange {
FileRange { file_id: self.file_id, range: self.value.borrow().text_range() }
impl<FileId: Copy, SN: Borrow<SyntaxNode>> InFileWrapper<FileId, SN> {
pub fn file_range(&self) -> FileRangeWrapper<FileId> {
FileRangeWrapper { file_id: self.file_id, range: self.value.borrow().text_range() }
}
}

View File

@@ -392,6 +392,10 @@ impl HirFileId {
}
}
pub fn call_node(self, db: &dyn ExpandDatabase) -> Option<InFile<SyntaxNode>> {
Some(db.lookup_intern_macro_call(self.macro_file()?).to_node(db))
}
pub fn as_builtin_derive_attr_node(
&self,
db: &dyn ExpandDatabase,
@@ -848,7 +852,10 @@ impl ExpansionInfo {
map_node_range_up(db, &self.exp_map, range)
}
/// Maps up the text range out of the expansion into is macro call.
/// Maps up the text range out of the expansion into its macro call.
///
/// Note that this may return multiple ranges as we lose the precise association between input to output
/// and as such we may consider inputs that are unrelated.
pub fn map_range_up_once(
&self,
db: &dyn ExpandDatabase,
@@ -864,11 +871,10 @@ impl ExpansionInfo {
InFile { file_id, value: smallvec::smallvec![span.range + anchor_offset] }
}
SpanMap::ExpansionSpanMap(arg_map) => {
let arg_range = self
.arg
.value
.as_ref()
.map_or_else(|| TextRange::empty(TextSize::from(0)), |it| it.text_range());
let Some(arg_node) = &self.arg.value else {
return InFile::new(self.arg.file_id, smallvec::smallvec![]);
};
let arg_range = arg_node.text_range();
InFile::new(
self.arg.file_id,
arg_map

View File

@@ -20,12 +20,15 @@ pub fn prettify_macro_expansion(
let span_offset = syn.text_range().start();
let target_crate = target_crate_id.data(db);
let mut syntax_ctx_id_to_dollar_crate_replacement = FxHashMap::default();
syntax_bridge::prettify_macro_expansion::prettify_macro_expansion(syn, &mut |dollar_crate| {
syntax_bridge::prettify_macro_expansion::prettify_macro_expansion(
syn,
&mut |dollar_crate| {
let ctx = span_map.span_at(dollar_crate.text_range().start() + span_offset).ctx;
let replacement =
syntax_ctx_id_to_dollar_crate_replacement.entry(ctx).or_insert_with(|| {
let macro_call_id =
ctx.outer_expn(db).expect("`$crate` cannot come from `SyntaxContextId::ROOT`");
let macro_call_id = ctx
.outer_expn(db)
.expect("`$crate` cannot come from `SyntaxContextId::ROOT`");
let macro_call = db.lookup_intern_macro_call(macro_call_id.into());
let macro_def_crate = macro_call.def.krate;
// First, if this is the same crate as the macro, nothing will work but `crate`.
@@ -48,7 +51,7 @@ pub fn prettify_macro_expansion(
});
if replacement.text() == "$crate" {
// The parent may have many children, and looking for the token may yield incorrect results.
return dollar_crate.clone();
return None;
}
// We need to `clone_subtree()` but rowan doesn't provide such operation for tokens.
let parent = replacement.parent().unwrap().clone_subtree().clone_for_update();
@@ -56,6 +59,7 @@ pub fn prettify_macro_expansion(
.children_with_tokens()
.filter_map(NodeOrToken::into_token)
.find(|it| it.kind() == replacement.kind())
.unwrap()
})
},
|_| (),
)
}

View File

@@ -24,7 +24,7 @@ use hir_expand::{
attrs::collect_attrs,
builtin::{BuiltinFnLikeExpander, EagerExpander},
db::ExpandDatabase,
files::{FileRangeWrapper, InRealFile},
files::{FileRangeWrapper, HirFileRange, InRealFile},
inert_attr_macro::find_builtin_attr_idx,
mod_path::{ModPath, PathKind},
name::AsName,
@@ -262,6 +262,17 @@ impl<DB: HirDatabase> Semantics<'_, DB> {
self.imp.file_to_module_defs(file.into())
}
pub fn hir_file_to_module_def(&self, file: impl Into<HirFileId>) -> Option<Module> {
self.imp.hir_file_to_module_defs(file.into()).next()
}
pub fn hir_file_to_module_defs(
&self,
file: impl Into<HirFileId>,
) -> impl Iterator<Item = Module> {
self.imp.hir_file_to_module_defs(file.into())
}
pub fn to_adt_def(&self, a: &ast::Adt) -> Option<Adt> {
self.imp.to_def(a)
}
@@ -357,6 +368,15 @@ impl<'db> SemanticsImpl<'db> {
tree
}
pub fn adjust_edition(&self, file_id: HirFileId) -> HirFileId {
if let Some(editioned_file_id) = file_id.file_id() {
self.attach_first_edition(editioned_file_id.file_id(self.db))
.map_or(file_id, Into::into)
} else {
file_id
}
}
pub fn find_parent_file(&self, file_id: HirFileId) -> Option<InFile<SyntaxNode>> {
match file_id {
HirFileId::FileId(file_id) => {
@@ -653,7 +673,7 @@ impl<'db> SemanticsImpl<'db> {
string: &ast::String,
) -> Option<Vec<(TextRange, Option<Either<PathResolution, InlineAsmOperand>>)>> {
let string_start = string.syntax().text_range().start();
let token = self.wrap_token_infile(string.syntax().clone()).into_real_file().ok()?;
let token = self.wrap_token_infile(string.syntax().clone());
self.descend_into_macros_breakable(token, |token, _| {
(|| {
let token = token.value;
@@ -693,50 +713,95 @@ impl<'db> SemanticsImpl<'db> {
}
/// Retrieves the formatting part of the format_args! template string at the given offset.
///
// FIXME: Type the return type
/// Returns the range (pre-expansion) in the string literal corresponding to the resolution,
/// absolute file range (post-expansion)
/// of the part in the format string, the corresponding string token and the resolution if it
/// exists.
// FIXME: Remove this in favor of `check_for_format_args_template_with_file`
pub fn check_for_format_args_template(
&self,
original_token: SyntaxToken,
offset: TextSize,
) -> Option<(TextRange, Option<Either<PathResolution, InlineAsmOperand>>)> {
let string_start = original_token.text_range().start();
let original_token = self.wrap_token_infile(original_token).into_real_file().ok()?;
self.descend_into_macros_breakable(original_token, |token, _| {
) -> Option<(
TextRange,
HirFileRange,
ast::String,
Option<Either<PathResolution, InlineAsmOperand>>,
)> {
let original_token =
self.wrap_token_infile(original_token).map(ast::String::cast).transpose()?;
self.check_for_format_args_template_with_file(original_token, offset)
}
/// Retrieves the formatting part of the format_args! template string at the given offset.
///
// FIXME: Type the return type
/// Returns the range (pre-expansion) in the string literal corresponding to the resolution,
/// absolute file range (post-expansion)
/// of the part in the format string, the corresponding string token and the resolution if it
/// exists.
pub fn check_for_format_args_template_with_file(
&self,
original_token: InFile<ast::String>,
offset: TextSize,
) -> Option<(
TextRange,
HirFileRange,
ast::String,
Option<Either<PathResolution, InlineAsmOperand>>,
)> {
let relative_offset =
offset.checked_sub(original_token.value.syntax().text_range().start())?;
self.descend_into_macros_breakable(
original_token.as_ref().map(|it| it.syntax().clone()),
|token, _| {
(|| {
let token = token.value;
self.resolve_offset_in_format_args(
ast::String::cast(token)?,
offset.checked_sub(string_start)?,
let token = token.map(ast::String::cast).transpose()?;
self.resolve_offset_in_format_args(token.as_ref(), relative_offset).map(
|(range, res)| {
(
range + original_token.value.syntax().text_range().start(),
HirFileRange {
file_id: token.file_id,
range: range + token.value.syntax().text_range().start(),
},
token.value,
res,
)
},
)
.map(|(range, res)| (range + string_start, res))
})()
.map_or(ControlFlow::Continue(()), ControlFlow::Break)
})
},
)
}
fn resolve_offset_in_format_args(
&self,
string: ast::String,
InFile { value: string, file_id }: InFile<&ast::String>,
offset: TextSize,
) -> Option<(TextRange, Option<Either<PathResolution, InlineAsmOperand>>)> {
debug_assert!(offset <= string.syntax().text_range().len());
let literal = string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
let parent = literal.parent()?;
if let Some(format_args) = ast::FormatArgsExpr::cast(parent.clone()) {
let source_analyzer = &self.analyze_no_infer(format_args.syntax())?;
let format_args = self.wrap_node_infile(format_args);
let source_analyzer =
&self.analyze_impl(InFile::new(file_id, format_args.syntax()), None, false)?;
source_analyzer
.resolve_offset_in_format_args(self.db, format_args.as_ref(), offset)
.resolve_offset_in_format_args(self.db, InFile::new(file_id, &format_args), offset)
.map(|(range, res)| (range, res.map(Either::Left)))
} else {
let asm = ast::AsmExpr::cast(parent)?;
let source_analyzer = &self.analyze_no_infer(asm.syntax())?;
let source_analyzer =
self.analyze_impl(InFile::new(file_id, asm.syntax()), None, false)?;
let line = asm.template().position(|it| *it.syntax() == literal)?;
let asm = self.wrap_node_infile(asm);
source_analyzer.resolve_offset_in_asm_template(asm.as_ref(), line, offset).map(
|(owner, (expr, range, index))| {
source_analyzer
.resolve_offset_in_asm_template(InFile::new(file_id, &asm), line, offset)
.map(|(owner, (expr, range, index))| {
(range, Some(Either::Right(InlineAsmOperand { owner, expr, index })))
},
)
})
}
}
@@ -809,14 +874,11 @@ impl<'db> SemanticsImpl<'db> {
None => return res,
};
let file = self.find_file(node.syntax());
let Some(file_id) = file.file_id.file_id() else {
return res;
};
if first == last {
// node is just the token, so descend the token
self.descend_into_macros_impl(
InRealFile::new(file_id, first),
InFile::new(file.file_id, first),
&mut |InFile { value, .. }, _ctx| {
if let Some(node) = value
.parent_ancestors()
@@ -831,14 +893,14 @@ impl<'db> SemanticsImpl<'db> {
} else {
// Descend first and last token, then zip them to look for the node they belong to
let mut scratch: SmallVec<[_; 1]> = smallvec![];
self.descend_into_macros_impl(InRealFile::new(file_id, first), &mut |token, _ctx| {
self.descend_into_macros_impl(InFile::new(file.file_id, first), &mut |token, _ctx| {
scratch.push(token);
CONTINUE_NO_BREAKS
});
let mut scratch = scratch.into_iter();
self.descend_into_macros_impl(
InRealFile::new(file_id, last),
InFile::new(file.file_id, last),
&mut |InFile { value: last, file_id: last_fid }, _ctx| {
if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
if first_fid == last_fid {
@@ -900,22 +962,18 @@ impl<'db> SemanticsImpl<'db> {
token: SyntaxToken,
mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContext),
) {
if let Ok(token) = self.wrap_token_infile(token).into_real_file() {
self.descend_into_macros_impl(token, &mut |t, ctx| {
self.descend_into_macros_impl(self.wrap_token_infile(token), &mut |t, ctx| {
cb(t, ctx);
CONTINUE_NO_BREAKS
});
}
}
pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
let mut res = smallvec![];
if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() {
self.descend_into_macros_impl(token, &mut |t, _ctx| {
self.descend_into_macros_impl(self.wrap_token_infile(token.clone()), &mut |t, _ctx| {
res.push(t.value);
CONTINUE_NO_BREAKS
});
}
if res.is_empty() {
res.push(token);
}
@@ -928,15 +986,13 @@ impl<'db> SemanticsImpl<'db> {
) -> SmallVec<[InFile<SyntaxToken>; 1]> {
let mut res = smallvec![];
let token = self.wrap_token_infile(token);
if let Ok(token) = token.clone().into_real_file() {
self.descend_into_macros_impl(token, &mut |t, ctx| {
self.descend_into_macros_impl(token.clone(), &mut |t, ctx| {
if !ctx.is_opaque(self.db) {
// Don't descend into opaque contexts
res.push(t);
}
CONTINUE_NO_BREAKS
});
}
if res.is_empty() {
res.push(token);
}
@@ -945,7 +1001,7 @@ impl<'db> SemanticsImpl<'db> {
pub fn descend_into_macros_breakable<T>(
&self,
token: InRealFile<SyntaxToken>,
token: InFile<SyntaxToken>,
mut cb: impl FnMut(InFile<SyntaxToken>, SyntaxContext) -> ControlFlow<T>,
) -> Option<T> {
self.descend_into_macros_impl(token, &mut cb)
@@ -974,33 +1030,58 @@ impl<'db> SemanticsImpl<'db> {
r
}
/// Descends the token into expansions, returning the tokens that matches the input
/// token's [`SyntaxKind`] and text.
pub fn descend_into_macros_exact_with_file(
&self,
token: SyntaxToken,
) -> SmallVec<[InFile<SyntaxToken>; 1]> {
let mut r = smallvec![];
let text = token.text();
let kind = token.kind();
self.descend_into_macros_cb(token.clone(), |InFile { value, file_id }, ctx| {
let mapped_kind = value.kind();
let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier();
let matches = (kind == mapped_kind || any_ident_match())
&& text == value.text()
&& !ctx.is_opaque(self.db);
if matches {
r.push(InFile { value, file_id });
}
});
if r.is_empty() {
r.push(self.wrap_token_infile(token));
}
r
}
/// Descends the token into expansions, returning the first token that matches the input
/// token's [`SyntaxKind`] and text.
pub fn descend_into_macros_single_exact(&self, token: SyntaxToken) -> SyntaxToken {
let text = token.text();
let kind = token.kind();
if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() {
self.descend_into_macros_breakable(token, |InFile { value, file_id: _ }, _ctx| {
self.descend_into_macros_breakable(
self.wrap_token_infile(token.clone()),
|InFile { value, file_id: _ }, _ctx| {
let mapped_kind = value.kind();
let any_ident_match =
|| kind.is_any_identifier() && value.kind().is_any_identifier();
let matches = (kind == mapped_kind || any_ident_match()) && text == value.text();
if matches { ControlFlow::Break(value) } else { ControlFlow::Continue(()) }
})
} else {
None
}
},
)
.unwrap_or(token)
}
fn descend_into_macros_impl<T>(
&self,
InRealFile { value: token, file_id }: InRealFile<SyntaxToken>,
InFile { value: token, file_id }: InFile<SyntaxToken>,
f: &mut dyn FnMut(InFile<SyntaxToken>, SyntaxContext) -> ControlFlow<T>,
) -> Option<T> {
let _p = tracing::info_span!("descend_into_macros_impl").entered();
let span = self.db.real_span_map(file_id).span_for_range(token.text_range());
let span = self.db.span_map(file_id).span_for_range(token.text_range());
// Process the expansion of a call, pushing all tokens with our span in the expansion back onto our stack
let process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
@@ -1024,17 +1105,16 @@ impl<'db> SemanticsImpl<'db> {
// the tokens themselves aren't that interesting as the span that is being used to map
// things down never changes.
let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![];
let include = self.s2d_cache.borrow_mut().get_or_insert_include_for(self.db, file_id);
let include = file_id.file_id().and_then(|file_id| {
self.s2d_cache.borrow_mut().get_or_insert_include_for(self.db, file_id)
});
match include {
Some(include) => {
// include! inputs are always from real files, so they only need to be handled once upfront
process_expansion_for_token(&mut stack, include)?;
}
None => {
stack.push((
file_id.into(),
smallvec![(token, SyntaxContext::root(file_id.edition(self.db)))],
));
stack.push((file_id, smallvec![(token, span.ctx)]));
}
}
@@ -1678,6 +1758,11 @@ impl<'db> SemanticsImpl<'db> {
self.with_ctx(|ctx| ctx.file_to_def(file).to_owned()).into_iter().map(Module::from)
}
fn hir_file_to_module_defs(&self, file: HirFileId) -> impl Iterator<Item = Module> {
// FIXME: Do we need to care about inline modules for macro expansions?
self.file_to_module_defs(file.original_file_respecting_includes(self.db).file_id(self.db))
}
pub fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
self.analyze_no_infer(node).map(|SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
db: self.db,

View File

@@ -1303,6 +1303,7 @@ impl<'db> SourceAnalyzer<'db> {
false
}
/// Returns the range of the implicit template argument and its resolution at the given `offset`
pub(crate) fn resolve_offset_in_format_args(
&self,
db: &'db dyn HirDatabase,

View File

@@ -128,11 +128,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
format!("Import `{import_name}`"),
range,
|builder| {
let scope = match scope.clone() {
ImportScope::File(it) => ImportScope::File(builder.make_mut(it)),
ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)),
ImportScope::Block(it) => ImportScope::Block(builder.make_mut(it)),
};
let scope = builder.make_import_scope_mut(scope.clone());
insert_use(&scope, mod_path_to_ast(&import_path, edition), &ctx.config.insert_use);
},
);
@@ -153,11 +149,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
format!("Import `{import_name} as _`"),
range,
|builder| {
let scope = match scope.clone() {
ImportScope::File(it) => ImportScope::File(builder.make_mut(it)),
ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)),
ImportScope::Block(it) => ImportScope::Block(builder.make_mut(it)),
};
let scope = builder.make_import_scope_mut(scope.clone());
insert_use_as_alias(
&scope,
mod_path_to_ast(&import_path, edition),
@@ -1877,4 +1869,30 @@ fn main() {
",
);
}
#[test]
fn carries_cfg_attr() {
check_assist(
auto_import,
r#"
mod m {
pub struct S;
}
#[cfg(test)]
fn foo(_: S$0) {}
"#,
r#"
#[cfg(test)]
use m::S;
mod m {
pub struct S;
}
#[cfg(test)]
fn foo(_: S) {}
"#,
);
}
}

View File

@@ -312,12 +312,8 @@ fn replace_usages(
}
// add imports across modules where needed
if let Some((import_scope, path)) = import_data {
let scope = match import_scope {
ImportScope::File(it) => ImportScope::File(edit.make_mut(it)),
ImportScope::Module(it) => ImportScope::Module(edit.make_mut(it)),
ImportScope::Block(it) => ImportScope::Block(edit.make_mut(it)),
};
if let Some((scope, path)) = import_data {
let scope = edit.make_import_scope_mut(scope);
delayed_mutations.push((scope, path));
}
},

View File

@@ -996,7 +996,8 @@ pub struct $0Foo {
}
"#,
r#"
pub struct Foo(#[my_custom_attr] u32);
pub struct Foo(#[my_custom_attr]
u32);
"#,
);
}

View File

@@ -923,7 +923,8 @@ where
pub struct $0Foo(#[my_custom_attr] u32);
"#,
r#"
pub struct Foo { #[my_custom_attr] field1: u32 }
pub struct Foo { #[my_custom_attr]
field1: u32 }
"#,
);
}

View File

@@ -204,12 +204,7 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
.kind
.is_some_and(|kind| matches!(kind, FlowKind::Break(_, _) | FlowKind::Continue(_)))
{
let scope = match scope {
ImportScope::File(it) => ImportScope::File(builder.make_mut(it)),
ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)),
ImportScope::Block(it) => ImportScope::Block(builder.make_mut(it)),
};
let scope = builder.make_import_scope_mut(scope);
let control_flow_enum =
FamousDefs(&ctx.sema, module.krate()).core_ops_ControlFlow();

View File

@@ -81,11 +81,7 @@ pub(crate) fn replace_qualified_name_with_use(
|builder| {
// Now that we've brought the name into scope, re-qualify all paths that could be
// affected (that is, all paths inside the node we added the `use` to).
let scope = match scope {
ImportScope::File(it) => ImportScope::File(builder.make_mut(it)),
ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)),
ImportScope::Block(it) => ImportScope::Block(builder.make_mut(it)),
};
let scope = builder.make_import_scope_mut(scope);
shorten_paths(scope.as_syntax_node(), &original_path);
let path = drop_generic_args(&original_path);
let edition = ctx

View File

@@ -1,4 +1,3 @@
use ide_db::imports::insert_use::ImportScope;
use syntax::{
TextRange,
ast::{self, AstNode, HasArgList, prec::ExprPrecedence},
@@ -114,11 +113,7 @@ fn add_import(
);
if let Some(scope) = scope {
let scope = match scope {
ImportScope::File(it) => ImportScope::File(edit.make_mut(it)),
ImportScope::Module(it) => ImportScope::Module(edit.make_mut(it)),
ImportScope::Block(it) => ImportScope::Block(edit.make_mut(it)),
};
let scope = edit.make_import_scope_mut(scope);
ide_db::imports::insert_use::insert_use(&scope, import, &ctx.config.insert_use);
}
}

View File

@@ -60,107 +60,87 @@ pub struct InsertUseConfig {
}
#[derive(Debug, Clone)]
pub enum ImportScope {
pub struct ImportScope {
pub kind: ImportScopeKind,
pub required_cfgs: Vec<ast::Attr>,
}
#[derive(Debug, Clone)]
pub enum ImportScopeKind {
File(ast::SourceFile),
Module(ast::ItemList),
Block(ast::StmtList),
}
impl ImportScope {
// FIXME: Remove this?
#[cfg(test)]
fn from(syntax: SyntaxNode) -> Option<Self> {
use syntax::match_ast;
fn contains_cfg_attr(attrs: &dyn HasAttrs) -> bool {
attrs.attrs().any(|attr| attr.as_simple_call().is_some_and(|(ident, _)| ident == "cfg"))
}
match_ast! {
match syntax {
ast::Module(module) => module.item_list().map(ImportScope::Module),
ast::SourceFile(file) => Some(ImportScope::File(file)),
ast::Fn(func) => contains_cfg_attr(&func).then(|| func.body().and_then(|it| it.stmt_list().map(ImportScope::Block))).flatten(),
ast::Const(konst) => contains_cfg_attr(&konst).then(|| match konst.body()? {
ast::Expr::BlockExpr(block) => Some(block),
_ => None,
}).flatten().and_then(|it| it.stmt_list().map(ImportScope::Block)),
ast::Static(statik) => contains_cfg_attr(&statik).then(|| match statik.body()? {
ast::Expr::BlockExpr(block) => Some(block),
_ => None,
}).flatten().and_then(|it| it.stmt_list().map(ImportScope::Block)),
_ => None,
}
}
}
/// Determines the containing syntax node in which to insert a `use` statement affecting `position`.
/// Returns the original source node inside attributes.
pub fn find_insert_use_container(
position: &SyntaxNode,
sema: &Semantics<'_, RootDatabase>,
) -> Option<Self> {
fn contains_cfg_attr(attrs: &dyn HasAttrs) -> bool {
attrs.attrs().any(|attr| attr.as_simple_call().is_some_and(|(ident, _)| ident == "cfg"))
}
// The closest block expression ancestor
let mut block = None;
let mut required_cfgs = Vec::new();
// Walk up the ancestor tree searching for a suitable node to do insertions on
// with special handling on cfg-gated items, in which case we want to insert imports locally
// or FIXME: annotate inserted imports with the same cfg
for syntax in sema.ancestors_with_macros(position.clone()) {
if let Some(file) = ast::SourceFile::cast(syntax.clone()) {
return Some(ImportScope::File(file));
} else if let Some(item) = ast::Item::cast(syntax) {
return match item {
ast::Item::Const(konst) if contains_cfg_attr(&konst) => {
// FIXME: Instead of bailing out with None, we should note down that
// this import needs an attribute added
match sema.original_ast_node(konst)?.body()? {
ast::Expr::BlockExpr(block) => block,
_ => return None,
}
.stmt_list()
.map(ImportScope::Block)
}
ast::Item::Fn(func) if contains_cfg_attr(&func) => {
// FIXME: Instead of bailing out with None, we should note down that
// this import needs an attribute added
sema.original_ast_node(func)?.body()?.stmt_list().map(ImportScope::Block)
}
ast::Item::Static(statik) if contains_cfg_attr(&statik) => {
// FIXME: Instead of bailing out with None, we should note down that
// this import needs an attribute added
match sema.original_ast_node(statik)?.body()? {
ast::Expr::BlockExpr(block) => block,
_ => return None,
}
.stmt_list()
.map(ImportScope::Block)
}
ast::Item::Module(module) => {
return Some(ImportScope { kind: ImportScopeKind::File(file), required_cfgs });
} else if let Some(module) = ast::Module::cast(syntax.clone()) {
// early return is important here, if we can't find the original module
// in the input there is no way for us to insert an import anywhere.
sema.original_ast_node(module)?.item_list().map(ImportScope::Module)
return sema
.original_ast_node(module)?
.item_list()
.map(ImportScopeKind::Module)
.map(|kind| ImportScope { kind, required_cfgs });
} else if let Some(has_attrs) = ast::AnyHasAttrs::cast(syntax) {
if block.is_none() {
if let Some(b) = ast::BlockExpr::cast(has_attrs.syntax().clone()) {
if let Some(b) = sema.original_ast_node(b) {
block = b.stmt_list();
}
}
}
if has_attrs
.attrs()
.any(|attr| attr.as_simple_call().is_some_and(|(ident, _)| ident == "cfg"))
{
if let Some(b) = block {
return Some(ImportScope {
kind: ImportScopeKind::Block(b),
required_cfgs,
});
}
required_cfgs.extend(has_attrs.attrs().filter(|attr| {
attr.as_simple_call().is_some_and(|(ident, _)| ident == "cfg")
}));
}
_ => continue,
};
}
}
None
}
pub fn as_syntax_node(&self) -> &SyntaxNode {
match self {
ImportScope::File(file) => file.syntax(),
ImportScope::Module(item_list) => item_list.syntax(),
ImportScope::Block(block) => block.syntax(),
match &self.kind {
ImportScopeKind::File(file) => file.syntax(),
ImportScopeKind::Module(item_list) => item_list.syntax(),
ImportScopeKind::Block(block) => block.syntax(),
}
}
pub fn clone_for_update(&self) -> Self {
match self {
ImportScope::File(file) => ImportScope::File(file.clone_for_update()),
ImportScope::Module(item_list) => ImportScope::Module(item_list.clone_for_update()),
ImportScope::Block(block) => ImportScope::Block(block.clone_for_update()),
Self {
kind: match &self.kind {
ImportScopeKind::File(file) => ImportScopeKind::File(file.clone_for_update()),
ImportScopeKind::Module(item_list) => {
ImportScopeKind::Module(item_list.clone_for_update())
}
ImportScopeKind::Block(block) => ImportScopeKind::Block(block.clone_for_update()),
},
required_cfgs: self.required_cfgs.iter().map(|attr| attr.clone_for_update()).collect(),
}
}
}
@@ -216,6 +196,11 @@ fn insert_use_with_alias_option(
use_tree.wrap_in_tree_list();
}
let use_item = make::use_(None, use_tree).clone_for_update();
for attr in
scope.required_cfgs.iter().map(|attr| attr.syntax().clone_subtree().clone_for_update())
{
ted::insert(ted::Position::first_child_of(use_item.syntax()), attr);
}
// merge into existing imports if possible
if let Some(mb) = mb {
@@ -229,7 +214,6 @@ fn insert_use_with_alias_option(
}
}
}
// either we weren't allowed to merge or there is no import that fits the merge conditions
// so look for the place we have to insert to
insert_use_(scope, use_item, cfg.group);
@@ -316,10 +300,10 @@ fn guess_granularity_from_scope(scope: &ImportScope) -> ImportGranularityGuess {
}
_ => None,
};
let mut use_stmts = match scope {
ImportScope::File(f) => f.items(),
ImportScope::Module(m) => m.items(),
ImportScope::Block(b) => b.items(),
let mut use_stmts = match &scope.kind {
ImportScopeKind::File(f) => f.items(),
ImportScopeKind::Module(m) => m.items(),
ImportScopeKind::Block(b) => b.items(),
}
.filter_map(use_stmt);
let mut res = ImportGranularityGuess::Unknown;
@@ -463,12 +447,12 @@ fn insert_use_(scope: &ImportScope, use_item: ast::Use, group_imports: bool) {
}
}
let l_curly = match scope {
ImportScope::File(_) => None,
let l_curly = match &scope.kind {
ImportScopeKind::File(_) => None,
// don't insert the imports before the item list/block expr's opening curly brace
ImportScope::Module(item_list) => item_list.l_curly_token(),
ImportScopeKind::Module(item_list) => item_list.l_curly_token(),
// don't insert the imports before the item list's opening curly brace
ImportScope::Block(block) => block.l_curly_token(),
ImportScopeKind::Block(block) => block.l_curly_token(),
};
// there are no imports in this file at all
// so put the import after all inner module attributes and possible license header comments

View File

@@ -23,7 +23,7 @@ struct Struct;
}
#[test]
fn respects_cfg_attr_fn() {
fn respects_cfg_attr_fn_body() {
check(
r"bar::Bar",
r#"
@@ -40,6 +40,25 @@ fn foo() {
);
}
#[test]
fn respects_cfg_attr_fn_sig() {
check(
r"bar::Bar",
r#"
#[cfg(test)]
fn foo($0) {}
"#,
r#"
#[cfg(test)]
use bar::Bar;
#[cfg(test)]
fn foo() {}
"#,
ImportGranularity::Crate,
);
}
#[test]
fn respects_cfg_attr_const() {
check(
@@ -58,6 +77,51 @@ const FOO: Bar = {
);
}
#[test]
fn respects_cfg_attr_impl() {
check(
r"bar::Bar",
r#"
#[cfg(test)]
impl () {$0}
"#,
r#"
#[cfg(test)]
use bar::Bar;
#[cfg(test)]
impl () {}
"#,
ImportGranularity::Crate,
);
}
#[test]
fn respects_cfg_attr_multiple_layers() {
check(
r"bar::Bar",
r#"
#[cfg(test)]
impl () {
#[cfg(test2)]
fn f($0) {}
}
"#,
r#"
#[cfg(test)]
#[cfg(test2)]
use bar::Bar;
#[cfg(test)]
impl () {
#[cfg(test2)]
fn f() {}
}
"#,
ImportGranularity::Crate,
);
}
#[test]
fn insert_skips_lone_glob_imports() {
check(
@@ -813,7 +877,7 @@ use {std::io};",
}
#[test]
fn merge_groups_skip_attributed() {
fn merge_groups_cfg_vs_no_cfg() {
check_crate(
"std::io",
r#"
@@ -836,6 +900,25 @@ use {std::io};
);
}
#[test]
fn merge_groups_cfg_matching() {
check_crate(
"std::io",
r#"
#[cfg(feature = "gated")] use std::fmt::{Result, Display};
#[cfg(feature = "gated")]
fn f($0) {}
"#,
r#"
#[cfg(feature = "gated")] use std::{fmt::{Display, Result}, io};
#[cfg(feature = "gated")]
fn f() {}
"#,
);
}
#[test]
fn split_out_merge() {
// FIXME: This is suboptimal, we want to get `use std::fmt::{self, Result}`
@@ -1259,12 +1342,14 @@ fn check_with_config(
};
let sema = &Semantics::new(&db);
let source_file = sema.parse(file_id);
let syntax = source_file.syntax().clone_for_update();
let file = pos
.and_then(|pos| syntax.token_at_offset(pos.expect_offset()).next()?.parent())
.and_then(|pos| source_file.syntax().token_at_offset(pos.expect_offset()).next()?.parent())
.and_then(|it| ImportScope::find_insert_use_container(&it, sema))
.or_else(|| ImportScope::from(syntax))
.unwrap();
.unwrap_or_else(|| ImportScope {
kind: ImportScopeKind::File(source_file),
required_cfgs: vec![],
})
.clone_for_update();
let path = ast::SourceFile::parse(&format!("use {path};"), span::Edition::CURRENT)
.tree()
.syntax()
@@ -1349,7 +1434,7 @@ fn check_merge_only_fail(ra_fixture0: &str, ra_fixture1: &str, mb: MergeBehavior
}
fn check_guess(#[rust_analyzer::rust_fixture] ra_fixture: &str, expected: ImportGranularityGuess) {
let syntax = ast::SourceFile::parse(ra_fixture, span::Edition::CURRENT).tree().syntax().clone();
let file = ImportScope::from(syntax).unwrap();
let syntax = ast::SourceFile::parse(ra_fixture, span::Edition::CURRENT).tree();
let file = ImportScope { kind: ImportScopeKind::File(syntax), required_cfgs: vec![] };
assert_eq!(super::guess_granularity_from_scope(&file), expected);
}

View File

@@ -961,12 +961,16 @@ impl<'a> FindUsages<'a> {
// Search for occurrences of the items name
for offset in Self::match_indices(&text, finder, search_range) {
let ret = tree.token_at_offset(offset).any(|token| {
let Some(str_token) = ast::String::cast(token.clone()) else { return false };
if let Some((range, Some(nameres))) =
sema.check_for_format_args_template(token, offset)
if let Some((range, _frange, string_token, Some(nameres))) =
sema.check_for_format_args_template(token.clone(), offset)
{
return self
.found_format_args_ref(file_id, range, str_token, nameres, sink);
return self.found_format_args_ref(
file_id,
range,
string_token,
nameres,
sink,
);
}
false
});

View File

@@ -5,6 +5,7 @@
use std::{collections::hash_map::Entry, fmt, iter, mem};
use crate::imports::insert_use::{ImportScope, ImportScopeKind};
use crate::text_edit::{TextEdit, TextEditBuilder};
use crate::{SnippetCap, assists::Command, syntax_helpers::tree_diff::diff};
use base_db::AnchoredPathBuf;
@@ -367,6 +368,17 @@ impl SourceChangeBuilder {
pub fn make_mut<N: AstNode>(&mut self, node: N) -> N {
self.mutated_tree.get_or_insert_with(|| TreeMutator::new(node.syntax())).make_mut(&node)
}
pub fn make_import_scope_mut(&mut self, scope: ImportScope) -> ImportScope {
ImportScope {
kind: match scope.kind.clone() {
ImportScopeKind::File(it) => ImportScopeKind::File(self.make_mut(it)),
ImportScopeKind::Module(it) => ImportScopeKind::Module(self.make_mut(it)),
ImportScopeKind::Block(it) => ImportScopeKind::Block(self.make_mut(it)),
},
required_cfgs: scope.required_cfgs.iter().map(|it| self.make_mut(it.clone())).collect(),
}
}
/// Returns a copy of the `node`, suitable for mutation.
///
/// Syntax trees in rust-analyzer are typically immutable, and mutating

View File

@@ -137,11 +137,7 @@ pub(crate) fn json_in_items(
)
.with_fixes(Some(vec![{
let mut scb = SourceChangeBuilder::new(vfs_file_id);
let scope = match import_scope {
ImportScope::File(it) => ImportScope::File(scb.make_mut(it)),
ImportScope::Module(it) => ImportScope::Module(scb.make_mut(it)),
ImportScope::Block(it) => ImportScope::Block(scb.make_mut(it)),
};
let scope = scb.make_import_scope_mut(import_scope);
let current_module = semantics_scope.module();
let cfg = ImportPathConfig {

View File

@@ -67,7 +67,7 @@ pub(crate) fn goto_definition(
});
}
if let Some((range, resolution)) =
if let Some((range, _, _, resolution)) =
sema.check_for_format_args_template(original_token.clone(), offset)
{
return Some(RangeInfo::new(

View File

@@ -53,7 +53,9 @@ pub(crate) fn goto_type_definition(
}
});
};
if let Some((range, resolution)) = sema.check_for_format_args_template(token.clone(), offset) {
if let Some((range, _, _, resolution)) =
sema.check_for_format_args_template(token.clone(), offset)
{
if let Some(ty) = resolution.and_then(|res| match Definition::from(res) {
Definition::Const(it) => Some(it.ty(db)),
Definition::Static(it) => Some(it.ty(db)),

View File

@@ -11,7 +11,6 @@ use ide_db::{
preorder_expr_with_ctx_checker,
},
};
use span::FileId;
use syntax::{
AstNode,
SyntaxKind::{self, IDENT, INT_NUMBER},
@@ -61,13 +60,12 @@ pub(crate) fn highlight_related(
let file_id = sema
.attach_first_edition(file_id)
.unwrap_or_else(|| EditionedFileId::current_edition(sema.db, file_id));
let span_file_id = file_id.editioned_file_id(sema.db);
let syntax = sema.parse(file_id).syntax().clone();
let token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind {
T![?] => 4, // prefer `?` when the cursor is sandwiched like in `await$0?`
T![->] => 4,
kind if kind.is_keyword(span_file_id.edition()) => 3,
kind if kind.is_keyword(file_id.edition(sema.db)) => 3,
IDENT | INT_NUMBER => 2,
T![|] => 1,
_ => 0,
@@ -92,18 +90,11 @@ pub(crate) fn highlight_related(
T![unsafe] if token.parent().and_then(ast::BlockExpr::cast).is_some() => {
highlight_unsafe_points(sema, token).remove(&file_id)
}
T![|] if config.closure_captures => {
highlight_closure_captures(sema, token, file_id, span_file_id.file_id())
T![|] if config.closure_captures => highlight_closure_captures(sema, token, file_id),
T![move] if config.closure_captures => highlight_closure_captures(sema, token, file_id),
_ if config.references => {
highlight_references(sema, token, FilePosition { file_id, offset })
}
T![move] if config.closure_captures => {
highlight_closure_captures(sema, token, file_id, span_file_id.file_id())
}
_ if config.references => highlight_references(
sema,
token,
FilePosition { file_id, offset },
span_file_id.file_id(),
),
_ => None,
}
}
@@ -112,7 +103,6 @@ fn highlight_closure_captures(
sema: &Semantics<'_, RootDatabase>,
token: SyntaxToken,
file_id: EditionedFileId,
vfs_file_id: FileId,
) -> Option<Vec<HighlightedRange>> {
let closure = token.parent_ancestors().take(2).find_map(ast::ClosureExpr::cast)?;
let search_range = closure.body()?.syntax().text_range();
@@ -145,7 +135,7 @@ fn highlight_closure_captures(
.sources(sema.db)
.into_iter()
.flat_map(|x| x.to_nav(sema.db))
.filter(|decl| decl.file_id == vfs_file_id)
.filter(|decl| decl.file_id == file_id.file_id(sema.db))
.filter_map(|decl| decl.focus_range)
.map(move |range| HighlightedRange { range, category })
.chain(usages)
@@ -158,9 +148,8 @@ fn highlight_references(
sema: &Semantics<'_, RootDatabase>,
token: SyntaxToken,
FilePosition { file_id, offset }: FilePosition,
vfs_file_id: FileId,
) -> Option<Vec<HighlightedRange>> {
let defs = if let Some((range, resolution)) =
let defs = if let Some((range, _, _, resolution)) =
sema.check_for_format_args_template(token.clone(), offset)
{
match resolution.map(Definition::from) {
@@ -270,7 +259,7 @@ fn highlight_references(
.sources(sema.db)
.into_iter()
.flat_map(|x| x.to_nav(sema.db))
.filter(|decl| decl.file_id == vfs_file_id)
.filter(|decl| decl.file_id == file_id.file_id(sema.db))
.filter_map(|decl| decl.focus_range)
.map(|range| HighlightedRange { range, category })
.for_each(|x| {
@@ -288,7 +277,7 @@ fn highlight_references(
},
};
for nav in navs {
if nav.file_id != vfs_file_id {
if nav.file_id != file_id.file_id(sema.db) {
continue;
}
let hl_range = nav.focus_range.map(|range| {

View File

@@ -200,7 +200,7 @@ fn hover_offset(
});
}
if let Some((range, resolution)) =
if let Some((range, _, _, resolution)) =
sema.check_for_format_args_template(original_token.clone(), offset)
{
let res = hover_for_definition(

View File

@@ -514,7 +514,6 @@ impl Analysis {
self.with_db(|db| goto_type_definition::goto_type_definition(db, position))
}
/// Finds all usages of the reference at point.
pub fn find_all_refs(
&self,
position: FilePosition,

View File

@@ -8,6 +8,14 @@
//! for text occurrences of the identifier. If there's an `ast::NameRef`
//! at the index that the match starts at and its tree parent is
//! resolved to the search element definition, we get a reference.
//!
//! Special handling for constructors/initializations:
//! When searching for references to a struct/enum/variant, if the cursor is positioned on:
//! - `{` after a struct/enum/variant definition
//! - `(` for tuple structs/variants
//! - `;` for unit structs
//! - The type name in a struct/enum/variant definition
//! Then only constructor/initialization usages will be shown, filtering out other references.
use hir::{PathResolution, Semantics};
use ide_db::{
@@ -28,27 +36,76 @@ use syntax::{
use crate::{FilePosition, HighlightedRange, NavigationTarget, TryToNav, highlight_related};
/// Result of a reference search operation.
#[derive(Debug, Clone)]
pub struct ReferenceSearchResult {
/// Information about the declaration site of the searched item.
/// For ADTs (structs/enums), this points to the type definition.
/// May be None for primitives or items without clear declaration sites.
pub declaration: Option<Declaration>,
/// All references found, grouped by file.
/// For ADTs when searching from a constructor position (e.g. on '{', '(', ';'),
/// this only includes constructor/initialization usages.
/// The map key is the file ID, and the value is a vector of (range, category) pairs.
/// - range: The text range of the reference in the file
/// - category: Metadata about how the reference is used (read/write/etc)
pub references: IntMap<FileId, Vec<(TextRange, ReferenceCategory)>>,
}
/// Information about the declaration site of a searched item.
#[derive(Debug, Clone)]
pub struct Declaration {
/// Navigation information to jump to the declaration
pub nav: NavigationTarget,
/// Whether the declared item is mutable (relevant for variables)
pub is_mut: bool,
}
// Feature: Find All References
//
// Shows all references of the item at the cursor location
// Shows all references of the item at the cursor location. This includes:
// - Direct references to variables, functions, types, etc.
// - Constructor/initialization references when cursor is on struct/enum definition tokens
// - References in patterns and type contexts
// - References through dereferencing and borrowing
// - References in macro expansions
//
// Special handling for constructors:
// - When the cursor is on `{`, `(`, or `;` in a struct/enum definition
// - When the cursor is on the type name in a struct/enum definition
// These cases will show only constructor/initialization usages of the type
//
// | Editor | Shortcut |
// |---------|----------|
// | VS Code | <kbd>Shift+Alt+F12</kbd> |
//
// ![Find All References](https://user-images.githubusercontent.com/48062697/113020670-b7c34f00-917a-11eb-8003-370ac5f2b3cb.gif)
/// Find all references to the item at the given position.
///
/// # Arguments
/// * `sema` - Semantic analysis context
/// * `position` - Position in the file where to look for the item
/// * `search_scope` - Optional scope to limit the search (e.g. current crate only)
///
/// # Returns
/// Returns `None` if no valid item is found at the position.
/// Otherwise returns a vector of `ReferenceSearchResult`, usually with one element.
/// Multiple results can occur in case of ambiguity or when searching for trait items.
///
/// # Special cases
/// - Control flow keywords (break, continue, etc): Shows all related jump points
/// - Constructor search: When on struct/enum definition tokens (`{`, `(`, `;`), shows only initialization sites
/// - Format string arguments: Shows template parameter usages
/// - Lifetime parameters: Shows lifetime constraint usages
///
/// # Constructor search
/// When the cursor is on specific tokens in a struct/enum definition:
/// - `{` after struct/enum/variant: Shows record literal initializations
/// - `(` after tuple struct/variant: Shows tuple literal initializations
/// - `;` after unit struct: Shows unit literal initializations
/// - Type name in definition: Shows all initialization usages
/// In these cases, other kinds of references (like type references) are filtered out.
pub(crate) fn find_all_refs(
sema: &Semantics<'_, RootDatabase>,
position: FilePosition,
@@ -143,7 +200,7 @@ pub(crate) fn find_defs(
)
})?;
if let Some((_, resolution)) = sema.check_for_format_args_template(token.clone(), offset) {
if let Some((.., resolution)) = sema.check_for_format_args_template(token.clone(), offset) {
return resolution.map(Definition::from).map(|it| vec![it]);
}
@@ -219,7 +276,19 @@ fn retain_adt_literal_usages(
}
}
/// Returns `Some` if the cursor is at a position for an item to search for all its constructor/literal usages
/// Returns `Some` if the cursor is at a position where we should search for constructor/initialization usages.
/// This is used to implement the special constructor search behavior when the cursor is on specific tokens
/// in a struct/enum/variant definition.
///
/// # Returns
/// - `Some(name)` if the cursor is on:
/// - `{` after a struct/enum/variant definition
/// - `(` for tuple structs/variants
/// - `;` for unit structs
/// - The type name in a struct/enum/variant definition
/// - `None` otherwise
///
/// The returned name is the name of the type whose constructor usages should be searched for.
fn name_for_constructor_search(syntax: &SyntaxNode, position: FilePosition) -> Option<ast::Name> {
let token = syntax.token_at_offset(position.offset).right_biased()?;
let token_parent = token.parent()?;
@@ -257,6 +326,16 @@ fn name_for_constructor_search(syntax: &SyntaxNode, position: FilePosition) -> O
}
}
/// Checks if a name reference is part of an enum variant literal expression.
/// Used to filter references when searching for enum variant constructors.
///
/// # Arguments
/// * `sema` - Semantic analysis context
/// * `enum_` - The enum type to check against
/// * `name_ref` - The name reference to check
///
/// # Returns
/// `true` if the name reference is used as part of constructing a variant of the given enum.
fn is_enum_lit_name_ref(
sema: &Semantics<'_, RootDatabase>,
enum_: hir::Enum,
@@ -284,12 +363,19 @@ fn is_enum_lit_name_ref(
.unwrap_or(false)
}
/// Checks if a path ends with the given name reference.
/// Helper function for checking constructor usage patterns.
fn path_ends_with(path: Option<ast::Path>, name_ref: &ast::NameRef) -> bool {
path.and_then(|path| path.segment())
.and_then(|segment| segment.name_ref())
.map_or(false, |segment| segment == *name_ref)
}
/// Checks if a name reference is used in a literal (constructor) context.
/// Used to filter references when searching for struct/variant constructors.
///
/// # Returns
/// `true` if the name reference is used as part of a struct/variant literal expression.
fn is_lit_name_ref(name_ref: &ast::NameRef) -> bool {
name_ref.syntax().ancestors().find_map(|ancestor| {
match_ast! {

View File

@@ -203,7 +203,7 @@ fn find_definitions(
) -> RenameResult<impl Iterator<Item = (FileRange, SyntaxKind, Definition)>> {
let token = syntax.token_at_offset(offset).find(|t| matches!(t.kind(), SyntaxKind::STRING));
if let Some((range, Some(resolution))) =
if let Some((range, _, _, Some(resolution))) =
token.and_then(|token| sema.check_for_format_args_template(token, offset))
{
return Ok(vec![(

View File

@@ -542,7 +542,7 @@ fn descend_token(
let mut t = None;
let mut r = 0;
sema.descend_into_macros_breakable(token.clone(), |tok, _ctx| {
sema.descend_into_macros_breakable(token.clone().into(), |tok, _ctx| {
// FIXME: Consider checking ctx transparency for being opaque?
let my_rank = ranker.rank_token(&tok.value);

View File

@@ -74,7 +74,8 @@ fn check_(
"{}",
syntax_bridge::prettify_macro_expansion::prettify_macro_expansion(
node.syntax_node(),
&mut |it| it.clone()
&mut |_| None,
|_| ()
)
);
expect.assert_eq(&expect_res);

View File

@@ -6,7 +6,7 @@ use std::{
use ide_db::base_db::{
DbPanicContext,
salsa::{self, Cancelled},
salsa::{self, Cancelled, UnexpectedCycle},
};
use lsp_server::{ExtractError, Response, ResponseError};
use serde::{Serialize, de::DeserializeOwned};
@@ -349,11 +349,14 @@ where
let mut message = "request handler panicked".to_owned();
if let Some(panic_message) = panic_message {
message.push_str(": ");
message.push_str(panic_message)
message.push_str(panic_message);
} else if let Some(cycle) = panic.downcast_ref::<UnexpectedCycle>() {
tracing::error!("{cycle}");
message.push_str(": unexpected cycle");
} else if let Ok(cancelled) = panic.downcast::<Cancelled>() {
tracing::error!("Cancellation propagated out of salsa! This is a bug");
return Err(HandlerCancelledError::Inner(*cancelled));
}
};
Ok(lsp_server::Response::new_err(
id,

View File

@@ -103,6 +103,7 @@ pub(crate) fn file_range_uri(
pub(crate) fn assist_kind(kind: lsp_types::CodeActionKind) -> Option<AssistKind> {
let assist_kind = match &kind {
k if k == &lsp_types::CodeActionKind::EMPTY => AssistKind::Generate,
k if k == &lsp_types::CodeActionKind::QUICKFIX => AssistKind::QuickFix,
k if k == &lsp_types::CodeActionKind::REFACTOR => AssistKind::Refactor,
k if k == &lsp_types::CodeActionKind::REFACTOR_EXTRACT => AssistKind::RefactorExtract,

View File

@@ -112,7 +112,10 @@ pub struct EditionedFileId(u32);
impl fmt::Debug for EditionedFileId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("EditionedFileId").field(&self.file_id()).field(&self.edition()).finish()
f.debug_tuple("EditionedFileId")
.field(&self.file_id().index())
.field(&self.edition())
.finish()
}
}

View File

@@ -7,6 +7,13 @@ use syntax::{
ted::{self, Position},
};
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum PrettifyWsKind {
Space,
Indent(usize),
Newline,
}
/// Renders a [`SyntaxNode`] with whitespace inserted between tokens that require them.
///
/// This is an internal API that is only exported because `mbe` needs it for tests and cannot depend
@@ -15,7 +22,8 @@ use syntax::{
#[deprecated = "use `hir_expand::prettify_macro_expansion()` instead"]
pub fn prettify_macro_expansion(
syn: SyntaxNode,
dollar_crate_replacement: &mut dyn FnMut(&SyntaxToken) -> SyntaxToken,
dollar_crate_replacement: &mut dyn FnMut(&SyntaxToken) -> Option<SyntaxToken>,
inspect_mods: impl FnOnce(&[(Position, PrettifyWsKind)]),
) -> SyntaxNode {
let mut indent = 0;
let mut last: Option<SyntaxKind> = None;
@@ -27,14 +35,12 @@ pub fn prettify_macro_expansion(
let after = Position::after;
let do_indent = |pos: fn(_) -> Position, token: &SyntaxToken, indent| {
(pos(token.clone()), make::tokens::whitespace(&" ".repeat(4 * indent)))
};
let do_ws = |pos: fn(_) -> Position, token: &SyntaxToken| {
(pos(token.clone()), make::tokens::single_space())
};
let do_nl = |pos: fn(_) -> Position, token: &SyntaxToken| {
(pos(token.clone()), make::tokens::single_newline())
(pos(token.clone()), PrettifyWsKind::Indent(indent))
};
let do_ws =
|pos: fn(_) -> Position, token: &SyntaxToken| (pos(token.clone()), PrettifyWsKind::Space);
let do_nl =
|pos: fn(_) -> Position, token: &SyntaxToken| (pos(token.clone()), PrettifyWsKind::Newline);
for event in syn.preorder_with_tokens() {
let token = match event {
@@ -46,20 +52,19 @@ pub fn prettify_macro_expansion(
) =>
{
if indent > 0 {
mods.push((
Position::after(node.clone()),
make::tokens::whitespace(&" ".repeat(4 * indent)),
));
mods.push((Position::after(node.clone()), PrettifyWsKind::Indent(indent)));
}
if node.parent().is_some() {
mods.push((Position::after(node), make::tokens::single_newline()));
mods.push((Position::after(node), PrettifyWsKind::Newline));
}
continue;
}
_ => continue,
};
if token.kind() == SyntaxKind::IDENT && token.text() == "$crate" {
dollar_crate_replacements.push((token.clone(), dollar_crate_replacement(&token)));
if let Some(replacement) = dollar_crate_replacement(&token) {
dollar_crate_replacements.push((token.clone(), replacement));
}
}
let tok = &token;
@@ -129,8 +134,16 @@ pub fn prettify_macro_expansion(
last = Some(tok.kind());
}
inspect_mods(&mods);
for (pos, insert) in mods {
ted::insert(pos, insert);
ted::insert_raw(
pos,
match insert {
PrettifyWsKind::Space => make::tokens::single_space(),
PrettifyWsKind::Indent(indent) => make::tokens::whitespace(&" ".repeat(4 * indent)),
PrettifyWsKind::Newline => make::tokens::single_newline(),
},
);
}
for (old, new) in dollar_crate_replacements {
ted::replace(old, new);

View File

@@ -5,6 +5,7 @@
use std::{mem, ops::RangeInclusive};
use parser::T;
use rowan::TextSize;
use crate::{
SyntaxElement, SyntaxKind, SyntaxNode, SyntaxToken,
@@ -74,6 +75,12 @@ impl Position {
};
Position { repr }
}
pub fn offset(&self) -> TextSize {
match &self.repr {
PositionRepr::FirstChild(node) => node.text_range().start(),
PositionRepr::After(elem) => elem.text_range().end(),
}
}
}
pub fn insert(position: Position, elem: impl Element) {
@@ -207,5 +214,12 @@ fn ws_between(left: &SyntaxElement, right: &SyntaxElement) -> Option<SyntaxToken
}
return Some(make::tokens::whitespace(&format!("\n{indent}")));
}
if left.kind() == SyntaxKind::ATTR {
let mut indent = IndentLevel::from_element(right);
if right.kind() == SyntaxKind::ATTR {
indent.0 = IndentLevel::from_element(left).0.max(indent.0);
}
return Some(make::tokens::whitespace(&format!("\n{indent}")));
}
Some(make::tokens::single_space())
}

View File

@@ -39,6 +39,13 @@ impl VfsPath {
}
}
pub fn into_abs_path(self) -> Option<AbsPathBuf> {
match self.0 {
VfsPathRepr::PathBuf(it) => Some(it),
VfsPathRepr::VirtualPath(_) => None,
}
}
/// Creates a new `VfsPath` with `path` adjoined to `self`.
pub fn join(&self, path: &str) -> Option<VfsPath> {
match &self.0 {

View File

@@ -24,8 +24,8 @@ impl flags::Codegen {
grammar::generate(self.check);
assists_doc_tests::generate(self.check);
parser_inline_tests::generate(self.check);
feature_docs::generate(self.check)
// diagnostics_docs::generate(self.check) doesn't generate any tests
feature_docs::generate(self.check);
diagnostics_docs::generate(self.check);
// lints::generate(self.check) Updating clones the rust repo, so don't run it unless
// explicitly asked for
}

View File

@@ -74,13 +74,13 @@ pub(crate) const WORKSPACES: &[(&str, ExceptionList, Option<(&[&str], &[&str])>,
("compiler/rustc_codegen_gcc", EXCEPTIONS_GCC, None, &[]),
("src/bootstrap", EXCEPTIONS_BOOTSTRAP, None, &[]),
("src/ci/docker/host-x86_64/test-various/uefi_qemu_test", EXCEPTIONS_UEFI_QEMU_TEST, None, &[]),
("src/etc/test-float-parse", EXCEPTIONS, None, &[]),
("src/tools/cargo", EXCEPTIONS_CARGO, None, &["src/tools/cargo"]),
//("src/tools/miri/test-cargo-miri", &[], None), // FIXME uncomment once all deps are vendored
//("src/tools/miri/test_dependencies", &[], None), // FIXME uncomment once all deps are vendored
("src/tools/rust-analyzer", EXCEPTIONS_RUST_ANALYZER, None, &[]),
("src/tools/rustbook", EXCEPTIONS_RUSTBOOK, None, &["src/doc/book", "src/doc/reference"]),
("src/tools/rustc-perf", EXCEPTIONS_RUSTC_PERF, None, &["src/tools/rustc-perf"]),
("src/tools/test-float-parse", EXCEPTIONS, None, &[]),
// tidy-alphabetical-end
];

View File

@@ -1441,3 +1441,8 @@ days-threshold = 14
# Prevents mentions in commits to avoid users being spammed
# Documentation at: https://forge.rust-lang.org/triagebot/no-mentions.html
[no-mentions]
# Allow members to formally register concerns (`@rustbot concern my concern`)
# Documentation at: https://forge.rust-lang.org/triagebot/concern.html
[concern]
labels = ["S-waiting-on-concerns"]