Disallow shebang in --cfg and --check-cfg arguments

This commit is contained in:
Urgau
2025-09-03 18:52:07 +02:00
parent 99317ef14d
commit d224d3a8fa
7 changed files with 75 additions and 29 deletions

View File

@@ -21,7 +21,6 @@ use rustc_ast::tokenstream::{DelimSpan, TokenStream};
use rustc_ast::{AttrItem, Attribute, MetaItemInner, token};
use rustc_ast_pretty::pprust;
use rustc_errors::{Diag, EmissionGuarantee, FatalError, PResult, pluralize};
use rustc_lexer::FrontmatterAllowed;
use rustc_session::parse::ParseSess;
use rustc_span::source_map::SourceMap;
use rustc_span::{FileName, SourceFile, Span};
@@ -34,6 +33,8 @@ pub mod parser;
use parser::Parser;
use rustc_ast::token::Delimiter;
use crate::lexer::StripTokens;
pub mod lexer;
mod errors;
@@ -62,10 +63,10 @@ pub fn new_parser_from_source_str(
source: String,
) -> Result<Parser<'_>, Vec<Diag<'_>>> {
let source_file = psess.source_map().new_source_file(name, source);
new_parser_from_source_file(psess, source_file, FrontmatterAllowed::Yes)
new_parser_from_source_file(psess, source_file, StripTokens::ShebangAndFrontmatter)
}
/// Creates a new parser from a simple (no frontmatter) source string.
/// Creates a new parser from a simple (no shebang, no frontmatter) source string.
///
/// On failure, the errors must be consumed via `unwrap_or_emit_fatal`, `emit`, `cancel`,
/// etc., otherwise a panic will occur when they are dropped.
@@ -75,7 +76,7 @@ pub fn new_parser_from_simple_source_str(
source: String,
) -> Result<Parser<'_>, Vec<Diag<'_>>> {
let source_file = psess.source_map().new_source_file(name, source);
new_parser_from_source_file(psess, source_file, FrontmatterAllowed::No)
new_parser_from_source_file(psess, source_file, StripTokens::Nothing)
}
/// Creates a new parser from a filename. On failure, the errors must be consumed via
@@ -109,7 +110,7 @@ pub fn new_parser_from_file<'a>(
}
err.emit();
});
new_parser_from_source_file(psess, source_file, FrontmatterAllowed::Yes)
new_parser_from_source_file(psess, source_file, StripTokens::ShebangAndFrontmatter)
}
pub fn utf8_error<E: EmissionGuarantee>(
@@ -160,10 +161,10 @@ pub fn utf8_error<E: EmissionGuarantee>(
fn new_parser_from_source_file(
psess: &ParseSess,
source_file: Arc<SourceFile>,
frontmatter_allowed: FrontmatterAllowed,
strip_tokens: StripTokens,
) -> Result<Parser<'_>, Vec<Diag<'_>>> {
let end_pos = source_file.end_position();
let stream = source_file_to_stream(psess, source_file, None, frontmatter_allowed)?;
let stream = source_file_to_stream(psess, source_file, None, strip_tokens)?;
let mut parser = Parser::new(psess, stream, None);
if parser.token == token::Eof {
parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt(), None);
@@ -179,8 +180,8 @@ pub fn source_str_to_stream(
) -> Result<TokenStream, Vec<Diag<'_>>> {
let source_file = psess.source_map().new_source_file(name, source);
// used mainly for `proc_macro` and the likes, not for our parsing purposes, so don't parse
// frontmatters as frontmatters.
source_file_to_stream(psess, source_file, override_span, FrontmatterAllowed::No)
// frontmatters as frontmatters, but for compatibility reason still strip the shebang
source_file_to_stream(psess, source_file, override_span, StripTokens::Shebang)
}
/// Given a source file, produces a sequence of token trees. Returns any buffered errors from
@@ -189,7 +190,7 @@ fn source_file_to_stream<'psess>(
psess: &'psess ParseSess,
source_file: Arc<SourceFile>,
override_span: Option<Span>,
frontmatter_allowed: FrontmatterAllowed,
strip_tokens: StripTokens,
) -> Result<TokenStream, Vec<Diag<'psess>>> {
let src = source_file.src.as_ref().unwrap_or_else(|| {
psess.dcx().bug(format!(
@@ -198,13 +199,7 @@ fn source_file_to_stream<'psess>(
));
});
lexer::lex_token_trees(
psess,
src.as_str(),
source_file.start_pos,
override_span,
frontmatter_allowed,
)
lexer::lex_token_trees(psess, src.as_str(), source_file.start_pos, override_span, strip_tokens)
}
/// Runs the given subparser `f` on the tokens of the given `attr`'s item.