Revert to f6f7c5

This commit is contained in:
DJMcNab
2018-12-19 18:33:36 +00:00
parent 7a8560ba38
commit cd8e33fb7e
3 changed files with 22 additions and 36 deletions

View File

@@ -158,19 +158,18 @@ fn current_op(p: &Parser) -> (u8, Op) {
// Parses expression with binding power of at least bp. // Parses expression with binding power of at least bp.
fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> BlockLike { fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> BlockLike {
let mut lhs = match lhs(p, r) { let mut lhs = match lhs(p, r) {
(Some(lhs), macro_blocklike) => { Some(lhs) => {
// test stmt_bin_expr_ambiguity // test stmt_bin_expr_ambiguity
// fn foo() { // fn foo() {
// let _ = {1} & 2; // let _ = {1} & 2;
// {1} &2; // {1} &2;
// } // }
if r.prefer_stmt && (is_block(lhs.kind()) || macro_blocklike == Some(BlockLike::Block)) if r.prefer_stmt && is_block(lhs.kind()) {
{
return BlockLike::Block; return BlockLike::Block;
} }
lhs lhs
} }
(None, _) => return BlockLike::NotBlock, None => return BlockLike::NotBlock,
}; };
loop { loop {
@@ -214,7 +213,7 @@ const LHS_FIRST: TokenSet = token_set_union![
atom::ATOM_EXPR_FIRST, atom::ATOM_EXPR_FIRST,
]; ];
fn lhs(p: &mut Parser, r: Restrictions) -> (Option<CompletedMarker>, Option<BlockLike>) { fn lhs(p: &mut Parser, r: Restrictions) -> Option<CompletedMarker> {
let m; let m;
let kind = match p.current() { let kind = match p.current() {
// test ref_expr // test ref_expr
@@ -247,29 +246,18 @@ fn lhs(p: &mut Parser, r: Restrictions) -> (Option<CompletedMarker>, Option<Bloc
if p.at_ts(EXPR_FIRST) { if p.at_ts(EXPR_FIRST) {
expr_bp(p, r, 2); expr_bp(p, r, 2);
} }
return (Some(m.complete(p, RANGE_EXPR)), None); return Some(m.complete(p, RANGE_EXPR));
} }
_ => { _ => {
let (lhs_marker, macro_block_like) = atom::atom_expr(p, r); let lhs = atom::atom_expr(p, r)?;
return Some(postfix_expr(p, r, lhs));
if macro_block_like == Some(BlockLike::Block) {
return (lhs_marker, macro_block_like);
}
if let Some(lhs_marker) = lhs_marker {
return (Some(postfix_expr(p, r, lhs_marker)), macro_block_like);
} else {
return (None, None);
}
} }
}; };
expr_bp(p, r, 255); expr_bp(p, r, 255);
(Some(m.complete(p, kind)), None) Some(m.complete(p, kind))
} }
fn postfix_expr(p: &mut Parser, r: Restrictions, mut lhs: CompletedMarker) -> CompletedMarker { fn postfix_expr(p: &mut Parser, r: Restrictions, mut lhs: CompletedMarker) -> CompletedMarker {
// Calls are disallowed if the type is a block and we prefer statements because the call cannot be disambiguated from a tuple
// E.g. `while true {break}();` is parsed as
// `while true {break}; ();`
let mut allow_calls = !r.prefer_stmt || !is_block(lhs.kind()); let mut allow_calls = !r.prefer_stmt || !is_block(lhs.kind());
loop { loop {
lhs = match p.current() { lhs = match p.current() {
@@ -418,22 +406,21 @@ fn arg_list(p: &mut Parser) {
// let _ = ::a::<b>; // let _ = ::a::<b>;
// let _ = format!(); // let _ = format!();
// } // }
fn path_expr(p: &mut Parser, r: Restrictions) -> (CompletedMarker, Option<BlockLike>) { fn path_expr(p: &mut Parser, r: Restrictions) -> CompletedMarker {
assert!(paths::is_path_start(p) || p.at(L_ANGLE)); assert!(paths::is_path_start(p) || p.at(L_ANGLE));
let m = p.start(); let m = p.start();
paths::expr_path(p); paths::expr_path(p);
let res = match p.current() { match p.current() {
L_CURLY if !r.forbid_structs => { L_CURLY if !r.forbid_structs => {
named_field_list(p); named_field_list(p);
m.complete(p, STRUCT_LIT) m.complete(p, STRUCT_LIT)
} }
EXCL => { EXCL => {
let block_like = items::macro_call_after_excl(p); // TODO: Use return type (BlockLike) items::macro_call_after_excl(p); // TODO: Use return type (BlockLike)
return (m.complete(p, MACRO_CALL), Some(block_like)); m.complete(p, MACRO_CALL)
} }
_ => m.complete(p, PATH_EXPR), _ => m.complete(p, PATH_EXPR),
}; }
(res, None)
} }
// test struct_lit // test struct_lit

View File

@@ -61,16 +61,12 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet = token_set_union![
const EXPR_RECOVERY_SET: TokenSet = token_set![LET_KW]; const EXPR_RECOVERY_SET: TokenSet = token_set![LET_KW];
pub(super) fn atom_expr( pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<CompletedMarker> {
p: &mut Parser,
r: Restrictions,
) -> (Option<CompletedMarker>, Option<BlockLike>) {
if let Some(m) = literal(p) { if let Some(m) = literal(p) {
return (Some(m), None); return Some(m);
} }
if paths::is_path_start(p) || p.at(L_ANGLE) { if paths::is_path_start(p) || p.at(L_ANGLE) {
let path_expr = path_expr(p, r); return Some(path_expr(p, r));
return (Some(path_expr.0), path_expr.1);
} }
let la = p.nth(1); let la = p.nth(1);
let done = match p.current() { let done = match p.current() {
@@ -98,7 +94,7 @@ pub(super) fn atom_expr(
// } // }
p.error("expected a loop"); p.error("expected a loop");
m.complete(p, ERROR); m.complete(p, ERROR);
return (None, None); return None;
} }
} }
} }
@@ -115,10 +111,10 @@ pub(super) fn atom_expr(
BREAK_KW => break_expr(p), BREAK_KW => break_expr(p),
_ => { _ => {
p.err_recover("expected expression", EXPR_RECOVERY_SET); p.err_recover("expected expression", EXPR_RECOVERY_SET);
return (None, None); return None;
} }
}; };
(Some(done), None) Some(done)
} }
// test tuple_expr // test tuple_expr

View File

@@ -43,6 +43,7 @@ fn parser_fuzz_tests() {
fn self_hosting_parsing() { fn self_hosting_parsing() {
let empty_vec = vec![]; let empty_vec = vec![];
let dir = project_dir(); let dir = project_dir();
let mut count = 0u32;
for entry in walkdir::WalkDir::new(dir) for entry in walkdir::WalkDir::new(dir)
.into_iter() .into_iter()
.filter_entry(|entry| { .filter_entry(|entry| {
@@ -63,6 +64,7 @@ fn self_hosting_parsing() {
!entry.path().is_dir() && (entry.path().extension() == Some(std::ffi::OsStr::new("rs"))) !entry.path().is_dir() && (entry.path().extension() == Some(std::ffi::OsStr::new("rs")))
}) })
{ {
count += 1;
let text = read_text(entry.path()); let text = read_text(entry.path());
let node = SourceFileNode::parse(&text); let node = SourceFileNode::parse(&text);
let errors = node.errors(); let errors = node.errors();
@@ -72,6 +74,7 @@ fn self_hosting_parsing() {
entry entry
); );
} }
panic!("{}", count)
} }
/// Read file and normalize newlines. /// Read file and normalize newlines.
/// ///