Remove unnecessary cloning

This commit is contained in:
Lukas Wirth 2024-01-06 17:48:07 +01:00
parent c9c4053eed
commit 5125063a21
7 changed files with 54 additions and 50 deletions

View File

@ -142,15 +142,15 @@ pub fn intern_const_ref(
LiteralConstRef::Int(i) => {
// FIXME: We should handle failure of layout better.
let size = layout.map(|it| it.size.bytes_usize()).unwrap_or(16);
ConstScalar::Bytes(i.to_le_bytes()[0..size].to_vec(), MemoryMap::default())
ConstScalar::Bytes(i.to_le_bytes()[0..size].into(), MemoryMap::default())
}
LiteralConstRef::UInt(i) => {
let size = layout.map(|it| it.size.bytes_usize()).unwrap_or(16);
ConstScalar::Bytes(i.to_le_bytes()[0..size].to_vec(), MemoryMap::default())
ConstScalar::Bytes(i.to_le_bytes()[0..size].into(), MemoryMap::default())
}
LiteralConstRef::Bool(b) => ConstScalar::Bytes(vec![*b as u8], MemoryMap::default()),
LiteralConstRef::Bool(b) => ConstScalar::Bytes(Box::new([*b as u8]), MemoryMap::default()),
LiteralConstRef::Char(c) => {
ConstScalar::Bytes((*c as u32).to_le_bytes().to_vec(), MemoryMap::default())
ConstScalar::Bytes((*c as u32).to_le_bytes().into(), MemoryMap::default())
}
LiteralConstRef::Unknown => ConstScalar::Unknown,
};

View File

@ -219,7 +219,7 @@ impl MemoryMap {
/// A concrete constant value
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ConstScalar {
Bytes(Vec<u8>, MemoryMap),
Bytes(Box<[u8]>, MemoryMap),
// FIXME: this is a hack to get around chalk not being able to represent unevaluatable
// constants
UnevaluatedConst(GeneralConstId, Substitution),

View File

@ -98,16 +98,16 @@ pub enum Operand {
}
impl Operand {
fn from_concrete_const(data: Vec<u8>, memory_map: MemoryMap, ty: Ty) -> Self {
fn from_concrete_const(data: Box<[u8]>, memory_map: MemoryMap, ty: Ty) -> Self {
Operand::Constant(intern_const_scalar(ConstScalar::Bytes(data, memory_map), ty))
}
fn from_bytes(data: Vec<u8>, ty: Ty) -> Self {
fn from_bytes(data: Box<[u8]>, ty: Ty) -> Self {
Operand::from_concrete_const(data, MemoryMap::default(), ty)
}
fn const_zst(ty: Ty) -> Operand {
Self::from_bytes(vec![], ty)
Self::from_bytes(Box::default(), ty)
}
fn from_fn(
@ -118,7 +118,7 @@ impl Operand {
let ty =
chalk_ir::TyKind::FnDef(CallableDefId::FunctionId(func_id).to_chalk(db), generic_args)
.intern(Interner);
Operand::from_bytes(vec![], ty)
Operand::from_bytes(Box::default(), ty)
}
}

View File

@ -527,14 +527,15 @@ pub fn interpret_mir(
if evaluator.ptr_size() != std::mem::size_of::<usize>() {
not_supported!("targets with different pointer size from host");
}
let bytes = evaluator.interpret_mir(body.clone(), None.into_iter())?;
let interval = evaluator.interpret_mir(body.clone(), None.into_iter())?;
let bytes = interval.get(&evaluator)?;
let mut memory_map = evaluator.create_memory_map(
&bytes,
bytes,
&ty,
&Locals { ptr: ArenaMap::new(), body, drop_flags: DropFlags::default() },
)?;
memory_map.vtable = evaluator.vtable_map.clone();
return Ok(intern_const_scalar(ConstScalar::Bytes(bytes, memory_map), ty));
return Ok(intern_const_scalar(ConstScalar::Bytes(bytes.into(), memory_map), ty));
})();
(
it,
@ -803,11 +804,11 @@ impl Evaluator<'_> {
})
}
fn interpret_mir(
&mut self,
fn interpret_mir<'slf>(
&'slf mut self,
body: Arc<MirBody>,
args: impl Iterator<Item = IntervalOrOwned>,
) -> Result<Vec<u8>> {
) -> Result<Interval> {
if let Some(it) = self.stack_depth_limit.checked_sub(1) {
self.stack_depth_limit = it;
} else {
@ -957,7 +958,7 @@ impl Evaluator<'_> {
None => {
self.code_stack = prev_code_stack;
self.stack_depth_limit += 1;
return Ok(return_interval.get(self)?.to_vec());
return Ok(return_interval);
}
Some(bb) => {
// We don't support const promotion, so we can't truncate the stack yet.
@ -2173,7 +2174,7 @@ impl Evaluator<'_> {
let arg_bytes = iter::once(Ok(closure_data))
.chain(args.iter().map(|it| Ok(it.get(&self)?.to_owned())))
.collect::<Result<Vec<_>>>()?;
let bytes = self
let interval = self
.interpret_mir(mir_body, arg_bytes.into_iter().map(IntervalOrOwned::Owned))
.map_err(|e| {
MirEvalError::InFunction(
@ -2181,7 +2182,7 @@ impl Evaluator<'_> {
vec![(Either::Right(closure), span, locals.body.owner)],
)
})?;
destination.write_from_bytes(self, &bytes)?;
destination.write_from_interval(self, interval)?;
Ok(None)
}
@ -2374,7 +2375,7 @@ impl Evaluator<'_> {
vec![(Either::Left(def), span, locals.body.owner)],
)
})?;
destination.write_from_bytes(self, &result)?;
destination.write_from_interval(self, result)?;
None
})
}
@ -2680,11 +2681,12 @@ pub fn render_const_using_debug_impl(
) else {
not_supported!("std::fmt::format not found");
};
let message_string = evaluator.interpret_mir(
let interval = evaluator.interpret_mir(
db.mir_body(format_fn.into()).map_err(|e| MirEvalError::MirLowerError(format_fn, e))?,
[IntervalOrOwned::Borrowed(Interval { addr: a3, size: evaluator.ptr_size() * 6 })]
.into_iter(),
)?;
let message_string = interval.get(&evaluator)?;
let addr =
Address::from_bytes(&message_string[evaluator.ptr_size()..2 * evaluator.ptr_size()])?;
let size = from_bytes!(usize, message_string[2 * evaluator.ptr_size()..]);

View File

@ -322,12 +322,13 @@ impl Evaluator<'_> {
let hir_def::resolver::ValueNs::FunctionId(format_fn) = format_fn else {
not_supported!("std::fmt::format is not a function")
};
let message_string = self.interpret_mir(
let interval = self.interpret_mir(
self.db
.mir_body(format_fn.into())
.map_err(|e| MirEvalError::MirLowerError(format_fn, e))?,
args.map(|x| IntervalOrOwned::Owned(x.clone())),
)?;
let message_string = interval.get(self)?;
let addr =
Address::from_bytes(&message_string[self.ptr_size()..2 * self.ptr_size()])?;
let size = from_bytes!(usize, message_string[2 * self.ptr_size()..]);

View File

@ -540,7 +540,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.write_bytes_to_place(
then_target,
place.clone(),
vec![1],
Box::new([1]),
TyBuilder::bool(),
MirSpan::Unknown,
)?;
@ -548,7 +548,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.write_bytes_to_place(
else_target,
place,
vec![0],
Box::new([0]),
TyBuilder::bool(),
MirSpan::Unknown,
)?;
@ -602,7 +602,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
generic_args,
)
.intern(Interner);
let func = Operand::from_bytes(vec![], ty);
let func = Operand::from_bytes(Box::default(), ty);
return self.lower_call_and_args(
func,
iter::once(*callee).chain(args.iter().copied()),
@ -615,7 +615,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
let callee_ty = self.expr_ty_after_adjustments(*callee);
match &callee_ty.kind(Interner) {
chalk_ir::TyKind::FnDef(..) => {
let func = Operand::from_bytes(vec![], callee_ty.clone());
let func = Operand::from_bytes(Box::default(), callee_ty.clone());
self.lower_call_and_args(
func,
args.iter().copied(),
@ -1113,7 +1113,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
Some("start") => lp.take(),
Some("end") => rp.take(),
Some("exhausted") => {
Some(Operand::from_bytes(vec![0], TyBuilder::bool()))
Some(Operand::from_bytes(Box::new([0]), TyBuilder::bool()))
}
_ => None,
};
@ -1395,17 +1395,18 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
fn lower_literal_to_operand(&mut self, ty: Ty, l: &Literal) -> Result<Operand> {
let size = self
.db
.layout_of_ty(ty.clone(), self.db.trait_environment_for_body(self.owner))?
.size
.bytes_usize();
let bytes = match l {
let size = || {
self.db
.layout_of_ty(ty.clone(), self.db.trait_environment_for_body(self.owner))
.map(|it| it.size.bytes_usize())
};
const USIZE_SIZE: usize = mem::size_of::<usize>();
let bytes: Box<[_]> = match l {
hir_def::hir::Literal::String(b) => {
let b = b.as_bytes();
let mut data = Vec::with_capacity(mem::size_of::<usize>() * 2);
data.extend(0usize.to_le_bytes());
data.extend(b.len().to_le_bytes());
let mut data = Box::new([0; { 2 * USIZE_SIZE }]);
data[..USIZE_SIZE].copy_from_slice(&0usize.to_le_bytes());
data[USIZE_SIZE..].copy_from_slice(&b.len().to_le_bytes());
let mut mm = MemoryMap::default();
mm.insert(0, b.to_vec());
return Ok(Operand::from_concrete_const(data, mm, ty));
@ -1413,28 +1414,28 @@ impl<'ctx> MirLowerCtx<'ctx> {
hir_def::hir::Literal::CString(b) => {
let bytes = b.iter().copied().chain(iter::once(0)).collect::<Vec<_>>();
let mut data = Vec::with_capacity(mem::size_of::<usize>() * 2);
data.extend(0usize.to_le_bytes());
data.extend(bytes.len().to_le_bytes());
let mut data = Box::new([0; { 2 * USIZE_SIZE }]);
data[..USIZE_SIZE].copy_from_slice(&0usize.to_le_bytes());
data[USIZE_SIZE..].copy_from_slice(&bytes.len().to_le_bytes());
let mut mm = MemoryMap::default();
mm.insert(0, bytes);
return Ok(Operand::from_concrete_const(data, mm, ty));
}
hir_def::hir::Literal::ByteString(b) => {
let mut data = Vec::with_capacity(mem::size_of::<usize>() * 2);
data.extend(0usize.to_le_bytes());
data.extend(b.len().to_le_bytes());
let mut data = Box::new([0; { 2 * USIZE_SIZE }]);
data[..USIZE_SIZE].copy_from_slice(&0usize.to_le_bytes());
data[USIZE_SIZE..].copy_from_slice(&b.len().to_le_bytes());
let mut mm = MemoryMap::default();
mm.insert(0, b.to_vec());
return Ok(Operand::from_concrete_const(data, mm, ty));
}
hir_def::hir::Literal::Char(c) => u32::from(*c).to_le_bytes().into(),
hir_def::hir::Literal::Bool(b) => vec![*b as u8],
hir_def::hir::Literal::Int(it, _) => it.to_le_bytes()[0..size].into(),
hir_def::hir::Literal::Uint(it, _) => it.to_le_bytes()[0..size].into(),
hir_def::hir::Literal::Float(f, _) => match size {
8 => f.into_f64().to_le_bytes().into(),
4 => f.into_f32().to_le_bytes().into(),
hir_def::hir::Literal::Char(c) => Box::new(u32::from(*c).to_le_bytes()),
hir_def::hir::Literal::Bool(b) => Box::new([*b as u8]),
hir_def::hir::Literal::Int(it, _) => Box::from(&it.to_le_bytes()[0..size()?]),
hir_def::hir::Literal::Uint(it, _) => Box::from(&it.to_le_bytes()[0..size()?]),
hir_def::hir::Literal::Float(f, _) => match size()? {
8 => Box::new(f.into_f64().to_le_bytes()),
4 => Box::new(f.into_f32().to_le_bytes()),
_ => {
return Err(MirLowerError::TypeError("float with size other than 4 or 8 bytes"))
}
@ -1483,7 +1484,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
&mut self,
prev_block: BasicBlockId,
place: Place,
cv: Vec<u8>,
cv: Box<[u8]>,
ty: Ty,
span: MirSpan,
) -> Result<()> {

View File

@ -244,7 +244,7 @@ impl MirLowerCtx<'_> {
);
} else {
let c = Operand::from_concrete_const(
pattern_len.to_le_bytes().to_vec(),
pattern_len.to_le_bytes().into(),
MemoryMap::default(),
TyBuilder::usize(),
);