Disallow shebang in --cfg and --check-cfg arguments

This commit is contained in:
Urgau
2025-09-03 18:52:07 +02:00
parent 99317ef14d
commit d224d3a8fa
7 changed files with 75 additions and 29 deletions

View File

@@ -44,19 +44,44 @@ pub(crate) struct UnmatchedDelim {
pub candidate_span: Option<Span>, pub candidate_span: Option<Span>,
} }
/// Which tokens should be stripped before lexing the tokens.
pub(crate) enum StripTokens {
/// Strip both shebang and frontmatter.
ShebangAndFrontmatter,
/// Strip the shebang but not frontmatter.
///
/// That means that char sequences looking like frontmatter are simply
/// interpreted as regular Rust lexemes.
Shebang,
/// Strip nothing.
///
/// In other words, char sequences looking like a shebang or frontmatter
/// are simply interpreted as regular Rust lexemes.
Nothing,
}
pub(crate) fn lex_token_trees<'psess, 'src>( pub(crate) fn lex_token_trees<'psess, 'src>(
psess: &'psess ParseSess, psess: &'psess ParseSess,
mut src: &'src str, mut src: &'src str,
mut start_pos: BytePos, mut start_pos: BytePos,
override_span: Option<Span>, override_span: Option<Span>,
frontmatter_allowed: FrontmatterAllowed, strip_tokens: StripTokens,
) -> Result<TokenStream, Vec<Diag<'psess>>> { ) -> Result<TokenStream, Vec<Diag<'psess>>> {
// Skip `#!`, if present. match strip_tokens {
if let Some(shebang_len) = rustc_lexer::strip_shebang(src) { StripTokens::Shebang | StripTokens::ShebangAndFrontmatter => {
src = &src[shebang_len..]; if let Some(shebang_len) = rustc_lexer::strip_shebang(src) {
start_pos = start_pos + BytePos::from_usize(shebang_len); src = &src[shebang_len..];
start_pos = start_pos + BytePos::from_usize(shebang_len);
}
}
StripTokens::Nothing => {}
} }
let frontmatter_allowed = match strip_tokens {
StripTokens::ShebangAndFrontmatter => FrontmatterAllowed::Yes,
StripTokens::Shebang | StripTokens::Nothing => FrontmatterAllowed::No,
};
let cursor = Cursor::new(src, frontmatter_allowed); let cursor = Cursor::new(src, frontmatter_allowed);
let mut lexer = Lexer { let mut lexer = Lexer {
psess, psess,

View File

@@ -21,7 +21,6 @@ use rustc_ast::tokenstream::{DelimSpan, TokenStream};
use rustc_ast::{AttrItem, Attribute, MetaItemInner, token}; use rustc_ast::{AttrItem, Attribute, MetaItemInner, token};
use rustc_ast_pretty::pprust; use rustc_ast_pretty::pprust;
use rustc_errors::{Diag, EmissionGuarantee, FatalError, PResult, pluralize}; use rustc_errors::{Diag, EmissionGuarantee, FatalError, PResult, pluralize};
use rustc_lexer::FrontmatterAllowed;
use rustc_session::parse::ParseSess; use rustc_session::parse::ParseSess;
use rustc_span::source_map::SourceMap; use rustc_span::source_map::SourceMap;
use rustc_span::{FileName, SourceFile, Span}; use rustc_span::{FileName, SourceFile, Span};
@@ -34,6 +33,8 @@ pub mod parser;
use parser::Parser; use parser::Parser;
use rustc_ast::token::Delimiter; use rustc_ast::token::Delimiter;
use crate::lexer::StripTokens;
pub mod lexer; pub mod lexer;
mod errors; mod errors;
@@ -62,10 +63,10 @@ pub fn new_parser_from_source_str(
source: String, source: String,
) -> Result<Parser<'_>, Vec<Diag<'_>>> { ) -> Result<Parser<'_>, Vec<Diag<'_>>> {
let source_file = psess.source_map().new_source_file(name, source); let source_file = psess.source_map().new_source_file(name, source);
new_parser_from_source_file(psess, source_file, FrontmatterAllowed::Yes) new_parser_from_source_file(psess, source_file, StripTokens::ShebangAndFrontmatter)
} }
/// Creates a new parser from a simple (no frontmatter) source string. /// Creates a new parser from a simple (no shebang, no frontmatter) source string.
/// ///
/// On failure, the errors must be consumed via `unwrap_or_emit_fatal`, `emit`, `cancel`, /// On failure, the errors must be consumed via `unwrap_or_emit_fatal`, `emit`, `cancel`,
/// etc., otherwise a panic will occur when they are dropped. /// etc., otherwise a panic will occur when they are dropped.
@@ -75,7 +76,7 @@ pub fn new_parser_from_simple_source_str(
source: String, source: String,
) -> Result<Parser<'_>, Vec<Diag<'_>>> { ) -> Result<Parser<'_>, Vec<Diag<'_>>> {
let source_file = psess.source_map().new_source_file(name, source); let source_file = psess.source_map().new_source_file(name, source);
new_parser_from_source_file(psess, source_file, FrontmatterAllowed::No) new_parser_from_source_file(psess, source_file, StripTokens::Nothing)
} }
/// Creates a new parser from a filename. On failure, the errors must be consumed via /// Creates a new parser from a filename. On failure, the errors must be consumed via
@@ -109,7 +110,7 @@ pub fn new_parser_from_file<'a>(
} }
err.emit(); err.emit();
}); });
new_parser_from_source_file(psess, source_file, FrontmatterAllowed::Yes) new_parser_from_source_file(psess, source_file, StripTokens::ShebangAndFrontmatter)
} }
pub fn utf8_error<E: EmissionGuarantee>( pub fn utf8_error<E: EmissionGuarantee>(
@@ -160,10 +161,10 @@ pub fn utf8_error<E: EmissionGuarantee>(
fn new_parser_from_source_file( fn new_parser_from_source_file(
psess: &ParseSess, psess: &ParseSess,
source_file: Arc<SourceFile>, source_file: Arc<SourceFile>,
frontmatter_allowed: FrontmatterAllowed, strip_tokens: StripTokens,
) -> Result<Parser<'_>, Vec<Diag<'_>>> { ) -> Result<Parser<'_>, Vec<Diag<'_>>> {
let end_pos = source_file.end_position(); let end_pos = source_file.end_position();
let stream = source_file_to_stream(psess, source_file, None, frontmatter_allowed)?; let stream = source_file_to_stream(psess, source_file, None, strip_tokens)?;
let mut parser = Parser::new(psess, stream, None); let mut parser = Parser::new(psess, stream, None);
if parser.token == token::Eof { if parser.token == token::Eof {
parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt(), None); parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt(), None);
@@ -179,8 +180,8 @@ pub fn source_str_to_stream(
) -> Result<TokenStream, Vec<Diag<'_>>> { ) -> Result<TokenStream, Vec<Diag<'_>>> {
let source_file = psess.source_map().new_source_file(name, source); let source_file = psess.source_map().new_source_file(name, source);
// used mainly for `proc_macro` and the likes, not for our parsing purposes, so don't parse // used mainly for `proc_macro` and the likes, not for our parsing purposes, so don't parse
// frontmatters as frontmatters. // frontmatters as frontmatters, but for compatibility reason still strip the shebang
source_file_to_stream(psess, source_file, override_span, FrontmatterAllowed::No) source_file_to_stream(psess, source_file, override_span, StripTokens::Shebang)
} }
/// Given a source file, produces a sequence of token trees. Returns any buffered errors from /// Given a source file, produces a sequence of token trees. Returns any buffered errors from
@@ -189,7 +190,7 @@ fn source_file_to_stream<'psess>(
psess: &'psess ParseSess, psess: &'psess ParseSess,
source_file: Arc<SourceFile>, source_file: Arc<SourceFile>,
override_span: Option<Span>, override_span: Option<Span>,
frontmatter_allowed: FrontmatterAllowed, strip_tokens: StripTokens,
) -> Result<TokenStream, Vec<Diag<'psess>>> { ) -> Result<TokenStream, Vec<Diag<'psess>>> {
let src = source_file.src.as_ref().unwrap_or_else(|| { let src = source_file.src.as_ref().unwrap_or_else(|| {
psess.dcx().bug(format!( psess.dcx().bug(format!(
@@ -198,13 +199,7 @@ fn source_file_to_stream<'psess>(
)); ));
}); });
lexer::lex_token_trees( lexer::lex_token_trees(psess, src.as_str(), source_file.start_pos, override_span, strip_tokens)
psess,
src.as_str(),
source_file.start_pos,
override_span,
frontmatter_allowed,
)
} }
/// Runs the given subparser `f` on the tokens of the given `attr`'s item. /// Runs the given subparser `f` on the tokens of the given `attr`'s item.

View File

@@ -0,0 +1,3 @@
error: invalid `--cfg` argument: `#!/usr/bin/shebang
key` (expected `key` or `key="value"`)

View File

@@ -0,0 +1,6 @@
error: invalid `--check-cfg` argument: `#!/usr/bin/shebang
cfg(key)`
|
= note: expected `cfg(name, values("value1", "value2", ... "valueN"))`
= note: visit <https://doc.rust-lang.org/nightly/rustc/check-cfg.html> for more details

View File

@@ -1,15 +1,14 @@
use run_make_support::{cwd, diff, rustc}; use run_make_support::{cwd, diff, rustc};
fn test_and_compare(flag: &str, val: &str) { fn test_and_compare(test_name: &str, flag: &str, val: &str) {
let mut cmd = rustc(); let mut cmd = rustc();
let output = let output = cmd.input("").arg("--crate-type=lib").arg(flag).arg(val).run_fail();
cmd.input("").arg("--crate-type=lib").arg(&format!("--{flag}")).arg(val).run_fail();
assert_eq!(output.stdout_utf8(), ""); assert_eq!(output.stdout_utf8(), "");
diff() diff()
.expected_file(format!("{flag}.stderr")) .expected_file(format!("{test_name}.stderr"))
.actual_text("output", output.stderr_utf8()) .actual_text("stderr", output.stderr_utf8())
.run(); .run();
} }
@@ -17,7 +16,8 @@ fn main() {
// Verify that frontmatter isn't allowed in `--cfg` arguments. // Verify that frontmatter isn't allowed in `--cfg` arguments.
// https://github.com/rust-lang/rust/issues/146130 // https://github.com/rust-lang/rust/issues/146130
test_and_compare( test_and_compare(
"cfg", "cfg-frontmatter",
"--cfg",
r#"--- r#"---
--- ---
key"#, key"#,
@@ -26,9 +26,26 @@ key"#,
// Verify that frontmatter isn't allowed in `--check-cfg` arguments. // Verify that frontmatter isn't allowed in `--check-cfg` arguments.
// https://github.com/rust-lang/rust/issues/146130 // https://github.com/rust-lang/rust/issues/146130
test_and_compare( test_and_compare(
"check-cfg", "check-cfg-frontmatter",
"--check-cfg",
r#"--- r#"---
--- ---
cfg(key)"#,
);
// Verify that shebang isn't allowed in `--cfg` arguments.
test_and_compare(
"cfg-shebang",
"--cfg",
r#"#!/usr/bin/shebang
key"#,
);
// Verify that shebang isn't allowed in `--check-cfg` arguments.
test_and_compare(
"check-cfg-shebang",
"--check-cfg",
r#"#!/usr/bin/shebang
cfg(key)"#, cfg(key)"#,
); );
} }