Rollup merge of #94143 - est31:let_else_const_eval, r=lcnr

rustc_const_eval: adopt let else in more places

Continuation of #89933, #91018, #91481, #93046, #93590, #94011.

I have extended my clippy lint to also recognize tuple passing and match statements. The diff caused by fixing it is way above 1 thousand lines. Thus, I split it up into multiple pull requests to make reviewing easier. This PR handles rustc_const_eval.
This commit is contained in:
Matthias Krüger
2022-02-21 19:36:48 +01:00
committed by GitHub
13 changed files with 86 additions and 123 deletions

View File

@@ -631,15 +631,11 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// the last field). Can't have foreign types here, how would we
// adjust alignment and size for them?
let field = layout.field(self, layout.fields.count() - 1);
let (unsized_size, unsized_align) =
match self.size_and_align_of(metadata, &field)? {
Some(size_and_align) => size_and_align,
None => {
// A field with an extern type. We don't know the actual dynamic size
// or the alignment.
return Ok(None);
}
};
let Some((unsized_size, unsized_align)) = self.size_and_align_of(metadata, &field)? else {
// A field with an extern type. We don't know the actual dynamic size
// or the alignment.
return Ok(None);
};
// FIXME (#26403, #27023): We should be adding padding
// to `sized_size` (to accommodate the `unsized_align`

View File

@@ -84,22 +84,19 @@ fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval:
trace!("intern_shallow {:?} with {:?}", alloc_id, mode);
// remove allocation
let tcx = ecx.tcx;
let (kind, mut alloc) = match ecx.memory.alloc_map.remove(&alloc_id) {
Some(entry) => entry,
None => {
// Pointer not found in local memory map. It is either a pointer to the global
// map, or dangling.
// If the pointer is dangling (neither in local nor global memory), we leave it
// to validation to error -- it has the much better error messages, pointing out where
// in the value the dangling reference lies.
// The `delay_span_bug` ensures that we don't forget such a check in validation.
if tcx.get_global_alloc(alloc_id).is_none() {
tcx.sess.delay_span_bug(ecx.tcx.span, "tried to intern dangling pointer");
}
// treat dangling pointers like other statics
// just to stop trying to recurse into them
return Some(IsStaticOrFn);
let Some((kind, mut alloc)) = ecx.memory.alloc_map.remove(&alloc_id) else {
// Pointer not found in local memory map. It is either a pointer to the global
// map, or dangling.
// If the pointer is dangling (neither in local nor global memory), we leave it
// to validation to error -- it has the much better error messages, pointing out where
// in the value the dangling reference lies.
// The `delay_span_bug` ensures that we don't forget such a check in validation.
if tcx.get_global_alloc(alloc_id).is_none() {
tcx.sess.delay_span_bug(ecx.tcx.span, "tried to intern dangling pointer");
}
// treat dangling pointers like other statics
// just to stop trying to recurse into them
return Some(IsStaticOrFn);
};
// This match is just a canary for future changes to `MemoryKind`, which most likely need
// changes in this function.

View File

@@ -291,21 +291,18 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
);
}
let (alloc_kind, mut alloc) = match self.alloc_map.remove(&alloc_id) {
Some(alloc) => alloc,
None => {
// Deallocating global memory -- always an error
return Err(match self.tcx.get_global_alloc(alloc_id) {
Some(GlobalAlloc::Function(..)) => {
err_ub_format!("deallocating {}, which is a function", alloc_id)
}
Some(GlobalAlloc::Static(..) | GlobalAlloc::Memory(..)) => {
err_ub_format!("deallocating {}, which is static memory", alloc_id)
}
None => err_ub!(PointerUseAfterFree(alloc_id)),
let Some((alloc_kind, mut alloc)) = self.alloc_map.remove(&alloc_id) else {
// Deallocating global memory -- always an error
return Err(match self.tcx.get_global_alloc(alloc_id) {
Some(GlobalAlloc::Function(..)) => {
err_ub_format!("deallocating {}, which is a function", alloc_id)
}
.into());
Some(GlobalAlloc::Static(..) | GlobalAlloc::Memory(..)) => {
err_ub_format!("deallocating {}, which is static memory", alloc_id)
}
None => err_ub!(PointerUseAfterFree(alloc_id)),
}
.into());
};
if alloc.mutability == Mutability::Not {
@@ -957,9 +954,9 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
ptr: Pointer<Option<M::PointerTag>>,
size: Size,
) -> InterpResult<'tcx, &[u8]> {
let alloc_ref = match self.get(ptr, size, Align::ONE)? {
Some(a) => a,
None => return Ok(&[]), // zero-sized access
let Some(alloc_ref) = self.get(ptr, size, Align::ONE)? else {
// zero-sized access
return Ok(&[]);
};
// Side-step AllocRef and directly access the underlying bytes more efficiently.
// (We are staying inside the bounds here so all is good.)
@@ -983,17 +980,14 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
assert_eq!(lower, len, "can only write iterators with a precise length");
let size = Size::from_bytes(len);
let alloc_ref = match self.get_mut(ptr, size, Align::ONE)? {
Some(alloc_ref) => alloc_ref,
None => {
// zero-sized access
assert_matches!(
src.next(),
None,
"iterator said it was empty but returned an element"
);
return Ok(());
}
let Some(alloc_ref) = self.get_mut(ptr, size, Align::ONE)? else {
// zero-sized access
assert_matches!(
src.next(),
None,
"iterator said it was empty but returned an element"
);
return Ok(());
};
// Side-step AllocRef and directly access the underlying bytes more efficiently.
@@ -1043,18 +1037,18 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
// and once below to get the underlying `&[mut] Allocation`.
// Source alloc preparations and access hooks.
let (src_alloc_id, src_offset, src) = match src_parts {
None => return Ok(()), // Zero-sized *source*, that means dst is also zero-sized and we have nothing to do.
Some(src_ptr) => src_ptr,
let Some((src_alloc_id, src_offset, src)) = src_parts else {
// Zero-sized *source*, that means dst is also zero-sized and we have nothing to do.
return Ok(());
};
let src_alloc = self.get_raw(src_alloc_id)?;
let src_range = alloc_range(src_offset, size);
M::memory_read(&self.extra, &src_alloc.extra, src.provenance, src_range)?;
// We need the `dest` ptr for the next operation, so we get it now.
// We already did the source checks and called the hooks so we are good to return early.
let (dest_alloc_id, dest_offset, dest) = match dest_parts {
None => return Ok(()), // Zero-sized *destiantion*.
Some(dest_ptr) => dest_ptr,
let Some((dest_alloc_id, dest_offset, dest)) = dest_parts else {
// Zero-sized *destination*.
return Ok(());
};
// This checks relocation edges on the src, which needs to happen before

View File

@@ -258,15 +258,12 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
return Ok(None);
}
let alloc = match self.get_alloc(mplace)? {
Some(ptr) => ptr,
None => {
return Ok(Some(ImmTy {
// zero-sized type
imm: Scalar::ZST.into(),
layout: mplace.layout,
}));
}
let Some(alloc) = self.get_alloc(mplace)? else {
return Ok(Some(ImmTy {
// zero-sized type
imm: Scalar::ZST.into(),
layout: mplace.layout,
}));
};
match mplace.layout.abi {

View File

@@ -420,9 +420,8 @@ where
) -> InterpResult<'tcx, impl Iterator<Item = InterpResult<'tcx, MPlaceTy<'tcx, Tag>>> + 'a>
{
let len = base.len(self)?; // also asserts that we have a type where this makes sense
let stride = match base.layout.fields {
FieldsShape::Array { stride, .. } => stride,
_ => span_bug!(self.cur_span(), "mplace_array_fields: expected an array layout"),
let FieldsShape::Array { stride, .. } = base.layout.fields else {
span_bug!(self.cur_span(), "mplace_array_fields: expected an array layout");
};
let layout = base.layout.field(self, 0);
let dl = &self.tcx.data_layout;
@@ -747,9 +746,9 @@ where
// Invalid places are a thing: the return place of a diverging function
let tcx = *self.tcx;
let mut alloc = match self.get_alloc_mut(dest)? {
Some(a) => a,
None => return Ok(()), // zero-sized access
let Some(mut alloc) = self.get_alloc_mut(dest)? else {
// zero-sized access
return Ok(());
};
// FIXME: We should check that there are dest.layout.size many bytes available in

View File

@@ -46,15 +46,12 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
return Ok(false);
}
let loc = match self.frame().loc {
Ok(loc) => loc,
Err(_) => {
// We are unwinding and this fn has no cleanup code.
// Just go on unwinding.
trace!("unwinding: skipping frame");
self.pop_stack_frame(/* unwinding */ true)?;
return Ok(true);
}
let Ok(loc) = self.frame().loc else {
// We are unwinding and this fn has no cleanup code.
// Just go on unwinding.
trace!("unwinding: skipping frame");
self.pop_stack_frame(/* unwinding */ true)?;
return Ok(true);
};
let basic_block = &self.body().basic_blocks()[loc.block];

View File

@@ -321,10 +321,9 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
| ty::InstanceDef::CloneShim(..)
| ty::InstanceDef::Item(_) => {
// We need MIR for this fn
let (body, instance) =
match M::find_mir_or_eval_fn(self, instance, caller_abi, args, ret, unwind)? {
Some(body) => body,
None => return Ok(()),
let Some((body, instance)) =
M::find_mir_or_eval_fn(self, instance, caller_abi, args, ret, unwind)? else {
return Ok(());
};
// Compute callee information using the `instance` returned by

View File

@@ -851,12 +851,9 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
// to reject those pointers, we just do not have the machinery to
// talk about parts of a pointer.
// We also accept uninit, for consistency with the slow path.
let alloc = match self.ecx.memory.get(mplace.ptr, size, mplace.align)? {
Some(a) => a,
None => {
// Size 0, nothing more to check.
return Ok(());
}
let Some(alloc) = self.ecx.memory.get(mplace.ptr, size, mplace.align)? else {
// Size 0, nothing more to check.
return Ok(());
};
let allow_uninit_and_ptr = !M::enforce_number_validity(self.ecx);