Implement some intrinsics

This commit is contained in:
hkalbasi 2023-03-20 21:48:01 +03:30
parent 8e73ea5253
commit 3303a6eff5
7 changed files with 521 additions and 145 deletions

View File

@ -11,6 +11,8 @@ use super::{
ConstEvalError, ConstEvalError,
}; };
mod intrinsics;
fn simplify(e: ConstEvalError) -> ConstEvalError { fn simplify(e: ConstEvalError) -> ConstEvalError {
match e { match e {
ConstEvalError::MirEvalError(MirEvalError::InFunction(_, e)) => { ConstEvalError::MirEvalError(MirEvalError::InFunction(_, e)) => {
@ -82,6 +84,49 @@ fn bit_op() {
check_number(r#"const GOAL: i8 = 1 << 8"#, 0); check_number(r#"const GOAL: i8 = 1 << 8"#, 0);
} }
#[test]
fn casts() {
check_number(r#"const GOAL: usize = 12 as *const i32 as usize"#, 12);
check_number(
r#"
//- minicore: coerce_unsized, index, slice
const GOAL: i32 = {
let a = [10, 20, 3, 15];
let x: &[i32] = &a;
let y: *const [i32] = x;
let z = y as *const i32;
unsafe { *z }
};
"#,
10,
);
check_number(
r#"
//- minicore: coerce_unsized, index, slice
const GOAL: i16 = {
let a = &mut 5;
let z = a as *mut _;
unsafe { *z }
};
"#,
5,
);
check_number(
r#"
//- minicore: coerce_unsized, index, slice
const GOAL: usize = {
let a = [10, 20, 3, 15];
let x: &[i32] = &a;
let y: *const [i32] = x;
let z = y as *const [u8]; // slice fat pointer cast don't touch metadata
let w = unsafe { &*z };
w.len()
};
"#,
4,
);
}
#[test] #[test]
fn locals() { fn locals() {
check_number( check_number(
@ -279,20 +324,6 @@ fn function_call() {
); );
} }
#[test]
fn intrinsics() {
check_number(
r#"
extern "rust-intrinsic" {
pub fn size_of<T>() -> usize;
}
const GOAL: usize = size_of::<i32>();
"#,
4,
);
}
#[test] #[test]
fn trait_basic() { fn trait_basic() {
check_number( check_number(
@ -1353,6 +1384,17 @@ fn array_and_index() {
check_number( check_number(
r#" r#"
//- minicore: coerce_unsized, index, slice //- minicore: coerce_unsized, index, slice
const GOAL: usize = {
let a = [1, 2, 3];
let x: &[i32] = &a;
let y = &*x;
y.len()
};"#,
3,
);
check_number(
r#"
//- minicore: coerce_unsized, index, slice
const GOAL: usize = [1, 2, 3, 4, 5].len();"#, const GOAL: usize = [1, 2, 3, 4, 5].len();"#,
5, 5,
); );

View File

@ -0,0 +1,162 @@
use super::*;
#[test]
fn size_of() {
check_number(
r#"
extern "rust-intrinsic" {
pub fn size_of<T>() -> usize;
}
const GOAL: usize = size_of::<i32>();
"#,
4,
);
}
#[test]
fn transmute() {
check_number(
r#"
extern "rust-intrinsic" {
pub fn transmute<T, U>(e: T) -> U;
}
const GOAL: i32 = transmute((1i16, 1i16));
"#,
0x00010001,
);
}
#[test]
fn const_eval_select() {
check_number(
r#"
extern "rust-intrinsic" {
pub fn const_eval_select<ARG, F, G, RET>(arg: ARG, called_in_const: F, called_at_rt: G) -> RET
where
G: FnOnce<ARG, Output = RET>,
F: FnOnce<ARG, Output = RET>;
}
const fn in_const(x: i32, y: i32) -> i32 {
x + y
}
fn in_rt(x: i32, y: i32) -> i32 {
x + y
}
const GOAL: i32 = const_eval_select((2, 3), in_const, in_rt);
"#,
5,
);
}
#[test]
fn wrapping_add() {
check_number(
r#"
extern "rust-intrinsic" {
pub fn wrapping_add<T>(a: T, b: T) -> T;
}
const GOAL: u8 = wrapping_add(10, 250);
"#,
4,
);
}
#[test]
fn offset() {
check_number(
r#"
//- minicore: coerce_unsized, index, slice
extern "rust-intrinsic" {
pub fn offset<T>(dst: *const T, offset: isize) -> *const T;
}
const GOAL: u8 = unsafe {
let ar: &[(u8, u8, u8)] = &[
(10, 11, 12),
(20, 21, 22),
(30, 31, 32),
(40, 41, 42),
(50, 51, 52),
];
let ar: *const [(u8, u8, u8)] = ar;
let ar = ar as *const (u8, u8, u8);
let element = offset(ar, 2);
element.1
};
"#,
31,
);
}
#[test]
fn arith_offset() {
check_number(
r#"
//- minicore: coerce_unsized, index, slice
extern "rust-intrinsic" {
pub fn arith_offset<T>(dst: *const T, offset: isize) -> *const T;
}
const GOAL: u8 = unsafe {
let ar: &[(u8, u8, u8)] = &[
(10, 11, 12),
(20, 21, 22),
(30, 31, 32),
(40, 41, 42),
(50, 51, 52),
];
let ar: *const [(u8, u8, u8)] = ar;
let ar = ar as *const (u8, u8, u8);
let element = arith_offset(arith_offset(ar, 102), -100);
element.1
};
"#,
31,
);
}
#[test]
fn copy_nonoverlapping() {
check_number(
r#"
extern "rust-intrinsic" {
pub fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: usize);
}
const GOAL: u8 = unsafe {
let mut x = 2;
let y = 5;
copy_nonoverlapping(&y, &mut x, 1);
x
};
"#,
5,
);
}
#[test]
fn copy() {
check_number(
r#"
//- minicore: coerce_unsized, index, slice
extern "rust-intrinsic" {
pub fn copy<T>(src: *const T, dst: *mut T, count: usize);
}
const GOAL: i32 = unsafe {
let mut x = [1i32, 2, 3, 4, 5];
let y = (&mut x as *mut _) as *mut i32;
let z = (y as usize + 4) as *const i32;
copy(z, y, 4);
x[0] + x[1] + x[2] + x[3] + x[4]
};
"#,
19,
);
}

View File

@ -630,8 +630,15 @@ impl<'a> InferenceContext<'a> {
Expr::Cast { expr, type_ref } => { Expr::Cast { expr, type_ref } => {
let cast_ty = self.make_ty(type_ref); let cast_ty = self.make_ty(type_ref);
// FIXME: propagate the "castable to" expectation // FIXME: propagate the "castable to" expectation
let _inner_ty = self.infer_expr_no_expect(*expr); let inner_ty = self.infer_expr_no_expect(*expr);
// FIXME check the cast... match (inner_ty.kind(Interner), cast_ty.kind(Interner)) {
(TyKind::Ref(_, _, inner), TyKind::Raw(_, cast)) => {
// FIXME: record invalid cast diagnostic in case of mismatch
self.unify(inner, cast);
}
// FIXME check the other kinds of cast...
_ => (),
}
cast_ty cast_ty
} }
Expr::Ref { expr, rawness, mutability } => { Expr::Ref { expr, rawness, mutability } => {

View File

@ -96,11 +96,18 @@ enum Address {
use Address::*; use Address::*;
#[derive(Debug, Clone, Copy)]
struct Interval { struct Interval {
addr: Address, addr: Address,
size: usize, size: usize,
} }
#[derive(Debug, Clone)]
struct IntervalAndTy {
interval: Interval,
ty: Ty,
}
impl Interval { impl Interval {
fn new(addr: Address, size: usize) -> Self { fn new(addr: Address, size: usize) -> Self {
Self { addr, size } Self { addr, size }
@ -110,11 +117,37 @@ impl Interval {
memory.read_memory(self.addr, self.size) memory.read_memory(self.addr, self.size)
} }
fn write_from_bytes(&self, memory: &mut Evaluator<'_>, bytes: &[u8]) -> Result<()> {
memory.write_memory(self.addr, bytes)
}
fn write_from_interval(&self, memory: &mut Evaluator<'_>, interval: Interval) -> Result<()> {
// FIXME: this could be more efficent
let bytes = &interval.get(memory)?.to_vec();
memory.write_memory(self.addr, bytes)
}
fn slice(self, range: Range<usize>) -> Interval { fn slice(self, range: Range<usize>) -> Interval {
Interval { addr: self.addr.offset(range.start), size: range.len() } Interval { addr: self.addr.offset(range.start), size: range.len() }
} }
} }
impl IntervalAndTy {
fn get<'a>(&self, memory: &'a Evaluator<'a>) -> Result<&'a [u8]> {
memory.read_memory(self.interval.addr, self.interval.size)
}
fn new(
addr: Address,
ty: Ty,
evaluator: &Evaluator<'_>,
locals: &Locals<'_>,
) -> Result<IntervalAndTy> {
let size = evaluator.size_of_sized(&ty, locals, "type of interval")?;
Ok(IntervalAndTy { interval: Interval { addr, size }, ty })
}
}
enum IntervalOrOwned { enum IntervalOrOwned {
Owned(Vec<u8>), Owned(Vec<u8>),
Borrowed(Interval), Borrowed(Interval),
@ -135,7 +168,7 @@ impl Address {
fn from_usize(x: usize) -> Self { fn from_usize(x: usize) -> Self {
if x > usize::MAX / 2 { if x > usize::MAX / 2 {
Stack(usize::MAX - x) Stack(x - usize::MAX / 2)
} else { } else {
Heap(x) Heap(x)
} }
@ -147,7 +180,7 @@ impl Address {
fn to_usize(&self) -> usize { fn to_usize(&self) -> usize {
let as_num = match self { let as_num = match self {
Stack(x) => usize::MAX - *x, Stack(x) => *x + usize::MAX / 2,
Heap(x) => *x, Heap(x) => *x,
}; };
as_num as_num
@ -174,7 +207,7 @@ pub enum MirEvalError {
/// Means that code had undefined behavior. We don't try to actively detect UB, but if it was detected /// Means that code had undefined behavior. We don't try to actively detect UB, but if it was detected
/// then use this type of error. /// then use this type of error.
UndefinedBehavior(&'static str), UndefinedBehavior(&'static str),
Panic, Panic(String),
MirLowerError(FunctionId, MirLowerError), MirLowerError(FunctionId, MirLowerError),
TypeIsUnsized(Ty, &'static str), TypeIsUnsized(Ty, &'static str),
NotSupported(String), NotSupported(String),
@ -197,7 +230,7 @@ impl std::fmt::Debug for MirEvalError {
Self::UndefinedBehavior(arg0) => { Self::UndefinedBehavior(arg0) => {
f.debug_tuple("UndefinedBehavior").field(arg0).finish() f.debug_tuple("UndefinedBehavior").field(arg0).finish()
} }
Self::Panic => write!(f, "Panic"), Self::Panic(msg) => write!(f, "Panic with message:\n{msg:?}"),
Self::TargetDataLayoutNotAvailable => write!(f, "TargetDataLayoutNotAvailable"), Self::TargetDataLayoutNotAvailable => write!(f, "TargetDataLayoutNotAvailable"),
Self::TypeIsUnsized(ty, it) => write!(f, "{ty:?} is unsized. {it} should be sized."), Self::TypeIsUnsized(ty, it) => write!(f, "{ty:?} is unsized. {it} should be sized."),
Self::ExecutionLimitExceeded => write!(f, "execution limit exceeded"), Self::ExecutionLimitExceeded => write!(f, "execution limit exceeded"),
@ -289,7 +322,19 @@ impl Evaluator<'_> {
} }
fn place_addr(&self, p: &Place, locals: &Locals<'_>) -> Result<Address> { fn place_addr(&self, p: &Place, locals: &Locals<'_>) -> Result<Address> {
Ok(self.place_addr_and_ty(p, locals)?.0) Ok(self.place_addr_and_ty_and_metadata(p, locals)?.0)
}
fn place_interval(&self, p: &Place, locals: &Locals<'_>) -> Result<Interval> {
let place_addr_and_ty = self.place_addr_and_ty_and_metadata(p, locals)?;
Ok(Interval {
addr: place_addr_and_ty.0,
size: self.size_of_sized(
&place_addr_and_ty.1,
locals,
"Type of place that we need its interval",
)?,
})
} }
fn ptr_size(&self) -> usize { fn ptr_size(&self) -> usize {
@ -299,10 +344,15 @@ impl Evaluator<'_> {
} }
} }
fn place_addr_and_ty<'a>(&'a self, p: &Place, locals: &'a Locals<'a>) -> Result<(Address, Ty)> { fn place_addr_and_ty_and_metadata<'a>(
&'a self,
p: &Place,
locals: &'a Locals<'a>,
) -> Result<(Address, Ty, Option<Interval>)> {
let mut addr = locals.ptr[p.local]; let mut addr = locals.ptr[p.local];
let mut ty: Ty = let mut ty: Ty =
self.ty_filler(&locals.body.locals[p.local].ty, locals.subst, locals.body.owner)?; self.ty_filler(&locals.body.locals[p.local].ty, locals.subst, locals.body.owner)?;
let mut metadata = None; // locals are always sized
for proj in &p.projection { for proj in &p.projection {
match proj { match proj {
ProjectionElem::Deref => { ProjectionElem::Deref => {
@ -314,12 +364,18 @@ impl Evaluator<'_> {
)) ))
} }
}; };
metadata = if self.size_of(&ty, locals)?.is_none() {
Some(Interval { addr: addr.offset(self.ptr_size()), size: self.ptr_size() })
} else {
None
};
let x = from_bytes!(usize, self.read_memory(addr, self.ptr_size())?); let x = from_bytes!(usize, self.read_memory(addr, self.ptr_size())?);
addr = Address::from_usize(x); addr = Address::from_usize(x);
} }
ProjectionElem::Index(op) => { ProjectionElem::Index(op) => {
let offset = let offset =
from_bytes!(usize, self.read_memory(locals.ptr[*op], self.ptr_size())?); from_bytes!(usize, self.read_memory(locals.ptr[*op], self.ptr_size())?);
metadata = None; // Result of index is always sized
match &ty.data(Interner).kind { match &ty.data(Interner).kind {
TyKind::Ref(_, _, inner) => match &inner.data(Interner).kind { TyKind::Ref(_, _, inner) => match &inner.data(Interner).kind {
TyKind::Slice(inner) => { TyKind::Slice(inner) => {
@ -357,6 +413,7 @@ impl Evaluator<'_> {
.clone(); .clone();
let offset = layout.fields.offset(f).bytes_usize(); let offset = layout.fields.offset(f).bytes_usize();
addr = addr.offset(offset); addr = addr.offset(offset);
metadata = None; // tuple field is always sized
} }
_ => return Err(MirEvalError::TypeError("Only tuple has tuple fields")), _ => return Err(MirEvalError::TypeError("Only tuple has tuple fields")),
}, },
@ -386,6 +443,8 @@ impl Evaluator<'_> {
.offset(u32::from(f.local_id.into_raw()) as usize) .offset(u32::from(f.local_id.into_raw()) as usize)
.bytes_usize(); .bytes_usize();
addr = addr.offset(offset); addr = addr.offset(offset);
// FIXME: support structs with unsized fields
metadata = None;
} }
_ => return Err(MirEvalError::TypeError("Only adt has fields")), _ => return Err(MirEvalError::TypeError("Only adt has fields")),
}, },
@ -396,7 +455,7 @@ impl Evaluator<'_> {
ProjectionElem::OpaqueCast(_) => not_supported!("opaque cast"), ProjectionElem::OpaqueCast(_) => not_supported!("opaque cast"),
} }
} }
Ok((addr, ty)) Ok((addr, ty, metadata))
} }
fn layout(&self, ty: &Ty) -> Result<Layout> { fn layout(&self, ty: &Ty) -> Result<Layout> {
@ -411,16 +470,23 @@ impl Evaluator<'_> {
} }
fn place_ty<'a>(&'a self, p: &Place, locals: &'a Locals<'a>) -> Result<Ty> { fn place_ty<'a>(&'a self, p: &Place, locals: &'a Locals<'a>) -> Result<Ty> {
Ok(self.place_addr_and_ty(p, locals)?.1) Ok(self.place_addr_and_ty_and_metadata(p, locals)?.1)
} }
fn operand_ty<'a>(&'a self, o: &'a Operand, locals: &'a Locals<'a>) -> Result<Ty> { fn operand_ty(&self, o: &Operand, locals: &Locals<'_>) -> Result<Ty> {
Ok(match o { Ok(match o {
Operand::Copy(p) | Operand::Move(p) => self.place_ty(p, locals)?, Operand::Copy(p) | Operand::Move(p) => self.place_ty(p, locals)?,
Operand::Constant(c) => c.data(Interner).ty.clone(), Operand::Constant(c) => c.data(Interner).ty.clone(),
}) })
} }
fn operand_ty_and_eval(&mut self, o: &Operand, locals: &Locals<'_>) -> Result<IntervalAndTy> {
Ok(IntervalAndTy {
interval: self.eval_operand(o, locals)?,
ty: self.operand_ty(o, locals)?,
})
}
fn interpret_mir( fn interpret_mir(
&mut self, &mut self,
body: &MirBody, body: &MirBody,
@ -498,14 +564,19 @@ impl Evaluator<'_> {
cleanup: _, cleanup: _,
from_hir_call: _, from_hir_call: _,
} => { } => {
let destination = self.place_interval(destination, &locals)?;
let fn_ty = self.operand_ty(func, &locals)?; let fn_ty = self.operand_ty(func, &locals)?;
let args = args
.iter()
.map(|x| self.operand_ty_and_eval(x, &locals))
.collect::<Result<Vec<_>>>()?;
match &fn_ty.data(Interner).kind { match &fn_ty.data(Interner).kind {
TyKind::Function(_) => { TyKind::Function(_) => {
let bytes = self.eval_operand(func, &locals)?; let bytes = self.eval_operand(func, &locals)?;
self.exec_fn_pointer(bytes, destination, args, &locals)?; self.exec_fn_pointer(bytes, destination, &args, &locals)?;
} }
TyKind::FnDef(def, generic_args) => { TyKind::FnDef(def, generic_args) => {
self.exec_fn_def(*def, generic_args, destination, args, &locals)?; self.exec_fn_def(*def, generic_args, destination, &args, &locals)?;
} }
x => not_supported!("unknown function type {x:?}"), x => not_supported!("unknown function type {x:?}"),
} }
@ -545,8 +616,12 @@ impl Evaluator<'_> {
Ok(match r { Ok(match r {
Rvalue::Use(x) => Borrowed(self.eval_operand(x, locals)?), Rvalue::Use(x) => Borrowed(self.eval_operand(x, locals)?),
Rvalue::Ref(_, p) => { Rvalue::Ref(_, p) => {
let addr = self.place_addr(p, locals)?; let (addr, _, metadata) = self.place_addr_and_ty_and_metadata(p, locals)?;
Owned(addr.to_bytes()) let mut r = addr.to_bytes();
if let Some(metadata) = metadata {
r.extend(metadata.get(self)?);
}
Owned(r)
} }
Rvalue::Len(_) => not_supported!("rvalue len"), Rvalue::Len(_) => not_supported!("rvalue len"),
Rvalue::UnaryOp(op, val) => { Rvalue::UnaryOp(op, val) => {
@ -624,8 +699,12 @@ impl Evaluator<'_> {
let r = match op { let r = match op {
BinOp::Add => l128.overflowing_add(r128).0, BinOp::Add => l128.overflowing_add(r128).0,
BinOp::Mul => l128.overflowing_mul(r128).0, BinOp::Mul => l128.overflowing_mul(r128).0,
BinOp::Div => l128.checked_div(r128).ok_or(MirEvalError::Panic)?, BinOp::Div => l128.checked_div(r128).ok_or_else(|| {
BinOp::Rem => l128.checked_rem(r128).ok_or(MirEvalError::Panic)?, MirEvalError::Panic(format!("Overflow in {op:?}"))
})?,
BinOp::Rem => l128.checked_rem(r128).ok_or_else(|| {
MirEvalError::Panic(format!("Overflow in {op:?}"))
})?,
BinOp::Sub => l128.overflowing_sub(r128).0, BinOp::Sub => l128.overflowing_sub(r128).0,
BinOp::BitAnd => l128 & r128, BinOp::BitAnd => l128 & r128,
BinOp::BitOr => l128 | r128, BinOp::BitOr => l128 | r128,
@ -635,16 +714,16 @@ impl Evaluator<'_> {
let r = r.to_le_bytes(); let r = r.to_le_bytes();
for &k in &r[lc.len()..] { for &k in &r[lc.len()..] {
if k != 0 && (k != 255 || !is_signed) { if k != 0 && (k != 255 || !is_signed) {
return Err(MirEvalError::Panic); return Err(MirEvalError::Panic(format!("Overflow in {op:?}")));
} }
} }
Owned(r[0..lc.len()].into()) Owned(r[0..lc.len()].into())
} }
BinOp::Shl | BinOp::Shr => { BinOp::Shl | BinOp::Shr => {
let shift_amout = if r128 < 0 { let shift_amout = if r128 < 0 {
return Err(MirEvalError::Panic); return Err(MirEvalError::Panic(format!("Overflow in {op:?}")));
} else if r128 > 128 { } else if r128 > 128 {
return Err(MirEvalError::Panic); return Err(MirEvalError::Panic(format!("Overflow in {op:?}")));
} else { } else {
r128 as u8 r128 as u8
}; };
@ -720,47 +799,54 @@ impl Evaluator<'_> {
} }
Rvalue::ShallowInitBox(_, _) => not_supported!("shallow init box"), Rvalue::ShallowInitBox(_, _) => not_supported!("shallow init box"),
Rvalue::CopyForDeref(_) => not_supported!("copy for deref"), Rvalue::CopyForDeref(_) => not_supported!("copy for deref"),
Rvalue::Aggregate(kind, values) => match kind { Rvalue::Aggregate(kind, values) => {
AggregateKind::Array(_) => { let values = values
let mut r = vec![]; .iter()
for x in values { .map(|x| self.eval_operand(x, locals))
let value = self.eval_operand(x, locals)?.get(&self)?; .collect::<Result<Vec<_>>>()?;
r.extend(value); match kind {
AggregateKind::Array(_) => {
let mut r = vec![];
for x in values {
let value = x.get(&self)?;
r.extend(value);
}
Owned(r)
}
AggregateKind::Tuple(ty) => {
let layout = self.layout(&ty)?;
Owned(self.make_by_layout(
layout.size.bytes_usize(),
&layout,
None,
values.iter().copied(),
)?)
}
AggregateKind::Union(x, f) => {
let layout = self.layout_adt((*x).into(), Substitution::empty(Interner))?;
let offset = layout
.fields
.offset(u32::from(f.local_id.into_raw()) as usize)
.bytes_usize();
let op = values[0].get(&self)?;
let mut result = vec![0; layout.size.bytes_usize()];
result[offset..offset + op.len()].copy_from_slice(op);
Owned(result)
}
AggregateKind::Adt(x, subst) => {
let subst = self.subst_filler(subst, locals);
let (size, variant_layout, tag) =
self.layout_of_variant(*x, subst, locals)?;
Owned(self.make_by_layout(
size,
&variant_layout,
tag,
values.iter().copied(),
)?)
} }
Owned(r)
} }
AggregateKind::Tuple(ty) => { }
let layout = self.layout(&ty)?;
Owned(self.make_by_layout(
layout.size.bytes_usize(),
&layout,
None,
values,
locals,
)?)
}
AggregateKind::Union(x, f) => {
let layout = self.layout_adt((*x).into(), Substitution::empty(Interner))?;
let offset = layout
.fields
.offset(u32::from(f.local_id.into_raw()) as usize)
.bytes_usize();
let op = self.eval_operand(&values[0], locals)?.get(&self)?;
let mut result = vec![0; layout.size.bytes_usize()];
result[offset..offset + op.len()].copy_from_slice(op);
Owned(result)
}
AggregateKind::Adt(x, subst) => {
let subst = self.subst_filler(subst, locals);
let (size, variant_layout, tag) = self.layout_of_variant(*x, subst, locals)?;
Owned(self.make_by_layout(size, &variant_layout, tag, values, locals)?)
}
},
Rvalue::Cast(kind, operand, target_ty) => match kind { Rvalue::Cast(kind, operand, target_ty) => match kind {
CastKind::PointerExposeAddress => not_supported!("exposing pointer address"),
CastKind::PointerFromExposedAddress => {
not_supported!("creating pointer from exposed address")
}
CastKind::Pointer(cast) => match cast { CastKind::Pointer(cast) => match cast {
PointerCast::ReifyFnPointer => { PointerCast::ReifyFnPointer => {
let current_ty = self.operand_ty(operand, locals)?; let current_ty = self.operand_ty(operand, locals)?;
@ -818,7 +904,9 @@ impl Evaluator<'_> {
x => not_supported!("pointer cast {x:?}"), x => not_supported!("pointer cast {x:?}"),
}, },
CastKind::DynStar => not_supported!("dyn star cast"), CastKind::DynStar => not_supported!("dyn star cast"),
CastKind::IntToInt => { CastKind::IntToInt
| CastKind::PointerExposeAddress
| CastKind::PointerFromExposedAddress => {
// FIXME: handle signed cast // FIXME: handle signed cast
let current = pad16(self.eval_operand(operand, locals)?.get(&self)?, false); let current = pad16(self.eval_operand(operand, locals)?.get(&self)?, false);
let dest_size = let dest_size =
@ -828,7 +916,12 @@ impl Evaluator<'_> {
CastKind::FloatToInt => not_supported!("float to int cast"), CastKind::FloatToInt => not_supported!("float to int cast"),
CastKind::FloatToFloat => not_supported!("float to float cast"), CastKind::FloatToFloat => not_supported!("float to float cast"),
CastKind::IntToFloat => not_supported!("float to int cast"), CastKind::IntToFloat => not_supported!("float to int cast"),
CastKind::PtrToPtr => not_supported!("ptr to ptr cast"), CastKind::PtrToPtr => {
let current = pad16(self.eval_operand(operand, locals)?.get(&self)?, false);
let dest_size =
self.size_of_sized(target_ty, locals, "destination of ptr to ptr cast")?;
Owned(current[0..dest_size].to_vec())
}
CastKind::FnPtrToPtr => not_supported!("fn ptr to ptr cast"), CastKind::FnPtrToPtr => not_supported!("fn ptr to ptr cast"),
}, },
}) })
@ -895,16 +988,15 @@ impl Evaluator<'_> {
size: usize, // Not neccessarily equal to variant_layout.size size: usize, // Not neccessarily equal to variant_layout.size
variant_layout: &Layout, variant_layout: &Layout,
tag: Option<(usize, usize, i128)>, tag: Option<(usize, usize, i128)>,
values: &[Operand], values: impl Iterator<Item = Interval>,
locals: &Locals<'_>,
) -> Result<Vec<u8>> { ) -> Result<Vec<u8>> {
let mut result = vec![0; size]; let mut result = vec![0; size];
if let Some((offset, size, value)) = tag { if let Some((offset, size, value)) = tag {
result[offset..offset + size].copy_from_slice(&value.to_le_bytes()[0..size]); result[offset..offset + size].copy_from_slice(&value.to_le_bytes()[0..size]);
} }
for (i, op) in values.iter().enumerate() { for (i, op) in values.enumerate() {
let offset = variant_layout.fields.offset(i).bytes_usize(); let offset = variant_layout.fields.offset(i).bytes_usize();
let op = self.eval_operand(op, locals)?.get(&self)?; let op = op.get(&self)?;
result[offset..offset + op.len()].copy_from_slice(op); result[offset..offset + op.len()].copy_from_slice(op);
} }
Ok(result) Ok(result)
@ -1196,28 +1288,89 @@ impl Evaluator<'_> {
} }
fn exec_intrinsic( fn exec_intrinsic(
&self, &mut self,
as_str: &str, as_str: &str,
mut arg_bytes: impl Iterator<Item = Vec<u8>>, args: &[IntervalAndTy],
generic_args: Substitution, generic_args: Substitution,
destination: Interval,
locals: &Locals<'_>, locals: &Locals<'_>,
) -> Result<Vec<u8>> { ) -> Result<()> {
match as_str { match as_str {
"size_of" => { "size_of" => {
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else { let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
return Err(MirEvalError::TypeError("size_of generic arg is not provided")); return Err(MirEvalError::TypeError("size_of generic arg is not provided"));
}; };
let size = self.size_of(ty, locals)?; let size = self.size_of_sized(ty, locals, "size_of arg")?;
match size { destination.write_from_bytes(self, &size.to_le_bytes()[0..destination.size])
Some(x) => Ok(x.to_le_bytes().to_vec()), }
None => return Err(MirEvalError::TypeError("size_of arg is unsized")), "wrapping_add" => {
} let [lhs, rhs] = args else {
return Err(MirEvalError::TypeError("const_eval_select args are not provided"));
};
let lhs = u128::from_le_bytes(pad16(lhs.get(self)?, false));
let rhs = u128::from_le_bytes(pad16(rhs.get(self)?, false));
let ans = lhs.wrapping_add(rhs);
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
}
"copy" | "copy_nonoverlapping" => {
let [src, dst, offset] = args else {
return Err(MirEvalError::TypeError("copy_nonoverlapping args are not provided"));
};
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
return Err(MirEvalError::TypeError("copy_nonoverlapping generic arg is not provided"));
};
let src = Address::from_bytes(src.get(self)?)?;
let dst = Address::from_bytes(dst.get(self)?)?;
let offset = from_bytes!(usize, offset.get(self)?);
let size = self.size_of_sized(ty, locals, "copy_nonoverlapping ptr type")?;
let size = offset * size;
let src = Interval { addr: src, size };
let dst = Interval { addr: dst, size };
dst.write_from_interval(self, src)
}
"offset" | "arith_offset" => {
let [ptr, offset] = args else {
return Err(MirEvalError::TypeError("offset args are not provided"));
};
let Some(ty) = generic_args.as_slice(Interner).get(0).and_then(|x| x.ty(Interner)) else {
return Err(MirEvalError::TypeError("offset generic arg is not provided"));
};
let ptr = u128::from_le_bytes(pad16(ptr.get(self)?, false));
let offset = u128::from_le_bytes(pad16(offset.get(self)?, false));
let size = self.size_of_sized(ty, locals, "offset ptr type")? as u128;
let ans = ptr + offset * size;
destination.write_from_bytes(self, &ans.to_le_bytes()[0..destination.size])
}
"assert_inhabited" | "assert_zero_valid" | "assert_uninit_valid" => {
// FIXME: We should actually implement these checks
Ok(())
}
"forget" => {
// We don't call any drop glue yet, so there is nothing here
Ok(())
} }
"transmute" => { "transmute" => {
let Some(arg) = arg_bytes.next() else { let [arg] = args else {
return Err(MirEvalError::TypeError("trasmute arg is not provided")); return Err(MirEvalError::TypeError("trasmute arg is not provided"));
}; };
Ok(arg) destination.write_from_interval(self, arg.interval)
}
"const_eval_select" => {
let [tuple, const_fn, _] = args else {
return Err(MirEvalError::TypeError("const_eval_select args are not provided"));
};
let mut args = vec![const_fn.clone()];
let TyKind::Tuple(_, fields) = tuple.ty.kind(Interner) else {
return Err(MirEvalError::TypeError("const_eval_select arg[0] is not a tuple"));
};
let layout = self.layout(&tuple.ty)?;
for (i, field) in fields.iter(Interner).enumerate() {
let field = field.assert_ty_ref(Interner).clone();
let offset = layout.fields.offset(i).bytes_usize();
let addr = tuple.interval.addr.offset(offset);
args.push(IntervalAndTy::new(addr, field, self, locals)?);
}
self.exec_fn_trait(&args, destination, locals)
} }
_ => not_supported!("unknown intrinsic {as_str}"), _ => not_supported!("unknown intrinsic {as_str}"),
} }
@ -1226,8 +1379,8 @@ impl Evaluator<'_> {
fn exec_fn_pointer( fn exec_fn_pointer(
&mut self, &mut self,
bytes: Interval, bytes: Interval,
destination: &Place, destination: Interval,
args: &[Operand], args: &[IntervalAndTy],
locals: &Locals<'_>, locals: &Locals<'_>,
) -> Result<()> { ) -> Result<()> {
let id = from_bytes!(usize, bytes.get(self)?); let id = from_bytes!(usize, bytes.get(self)?);
@ -1244,38 +1397,41 @@ impl Evaluator<'_> {
&mut self, &mut self,
def: FnDefId, def: FnDefId,
generic_args: &Substitution, generic_args: &Substitution,
destination: &Place, destination: Interval,
args: &[Operand], args: &[IntervalAndTy],
locals: &Locals<'_>, locals: &Locals<'_>,
) -> Result<()> { ) -> Result<()> {
let def: CallableDefId = from_chalk(self.db, def); let def: CallableDefId = from_chalk(self.db, def);
let generic_args = self.subst_filler(generic_args, &locals); let generic_args = self.subst_filler(generic_args, &locals);
match def { match def {
CallableDefId::FunctionId(def) => { CallableDefId::FunctionId(def) => {
let dest_addr = self.place_addr(destination, &locals)?; if let Some(_) = self.detect_fn_trait(def) {
if let Some(x) = self.detect_fn_trait(def) { self.exec_fn_trait(&args, destination, locals)?;
self.exec_fn_trait(x, &args, destination, locals)?;
return Ok(()); return Ok(());
} }
let arg_bytes = args self.exec_fn_with_args(def, args, generic_args, locals, destination)?;
.iter()
.map(|x| Ok(self.eval_operand(x, &locals)?.get(&self)?.to_owned()))
.collect::<Result<Vec<_>>>()?;
self.exec_fn_with_args(def, arg_bytes, generic_args, locals, dest_addr)?;
} }
CallableDefId::StructId(id) => { CallableDefId::StructId(id) => {
let (size, variant_layout, tag) = let (size, variant_layout, tag) =
self.layout_of_variant(id.into(), generic_args.clone(), &locals)?; self.layout_of_variant(id.into(), generic_args.clone(), &locals)?;
let result = self.make_by_layout(size, &variant_layout, tag, args, &locals)?; let result = self.make_by_layout(
let dest_addr = self.place_addr(destination, &locals)?; size,
self.write_memory(dest_addr, &result)?; &variant_layout,
tag,
args.iter().map(|x| x.interval),
)?;
destination.write_from_bytes(self, &result)?;
} }
CallableDefId::EnumVariantId(id) => { CallableDefId::EnumVariantId(id) => {
let (size, variant_layout, tag) = let (size, variant_layout, tag) =
self.layout_of_variant(id.into(), generic_args.clone(), &locals)?; self.layout_of_variant(id.into(), generic_args.clone(), &locals)?;
let result = self.make_by_layout(size, &variant_layout, tag, args, &locals)?; let result = self.make_by_layout(
let dest_addr = self.place_addr(destination, &locals)?; size,
self.write_memory(dest_addr, &result)?; &variant_layout,
tag,
args.iter().map(|x| x.interval),
)?;
destination.write_from_bytes(self, &result)?;
} }
} }
Ok(()) Ok(())
@ -1284,10 +1440,10 @@ impl Evaluator<'_> {
fn exec_fn_with_args( fn exec_fn_with_args(
&mut self, &mut self,
def: FunctionId, def: FunctionId,
arg_bytes: Vec<Vec<u8>>, args: &[IntervalAndTy],
generic_args: Substitution, generic_args: Substitution,
locals: &Locals<'_>, locals: &Locals<'_>,
dest_addr: Address, destination: Interval,
) -> Result<()> { ) -> Result<()> {
let function_data = self.db.function_data(def); let function_data = self.db.function_data(def);
let is_intrinsic = match &function_data.abi { let is_intrinsic = match &function_data.abi {
@ -1301,14 +1457,18 @@ impl Evaluator<'_> {
_ => false, _ => false,
}, },
}; };
let result = if is_intrinsic { if is_intrinsic {
self.exec_intrinsic( return self.exec_intrinsic(
function_data.name.as_text().unwrap_or_default().as_str(), function_data.name.as_text().unwrap_or_default().as_str(),
arg_bytes.iter().cloned(), args,
generic_args, generic_args,
destination,
&locals, &locals,
)? );
} else if let Some(x) = self.detect_lang_function(def) { }
let arg_bytes =
args.iter().map(|x| Ok(x.get(&self)?.to_owned())).collect::<Result<Vec<_>>>()?;
let result = if let Some(x) = self.detect_lang_function(def) {
self.exec_lang_item(x, &arg_bytes)? self.exec_lang_item(x, &arg_bytes)?
} else { } else {
if let Some(self_ty_idx) = if let Some(self_ty_idx) =
@ -1321,9 +1481,12 @@ impl Evaluator<'_> {
let ty = self let ty = self
.vtable_map .vtable_map
.ty_of_bytes(&arg_bytes[0][self.ptr_size()..self.ptr_size() * 2])?; .ty_of_bytes(&arg_bytes[0][self.ptr_size()..self.ptr_size() * 2])?;
let mut args_for_target = args.to_vec();
args_for_target[0] = IntervalAndTy {
interval: args_for_target[0].interval.slice(0..self.ptr_size()),
ty: ty.clone(),
};
let ty = GenericArgData::Ty(ty.clone()).intern(Interner); let ty = GenericArgData::Ty(ty.clone()).intern(Interner);
let mut args_for_target = arg_bytes;
args_for_target[0] = args_for_target[0][0..self.ptr_size()].to_vec();
let generics_for_target = Substitution::from_iter( let generics_for_target = Substitution::from_iter(
Interner, Interner,
generic_args.iter(Interner).enumerate().map(|(i, x)| { generic_args.iter(Interner).enumerate().map(|(i, x)| {
@ -1336,10 +1499,10 @@ impl Evaluator<'_> {
); );
return self.exec_fn_with_args( return self.exec_fn_with_args(
def, def,
args_for_target, &args_for_target,
generics_for_target, generics_for_target,
locals, locals,
dest_addr, destination,
); );
} }
let (imp, generic_args) = let (imp, generic_args) =
@ -1351,20 +1514,19 @@ impl Evaluator<'_> {
self.interpret_mir(&mir_body, arg_bytes.iter().cloned(), generic_args) self.interpret_mir(&mir_body, arg_bytes.iter().cloned(), generic_args)
.map_err(|e| MirEvalError::InFunction(imp, Box::new(e)))? .map_err(|e| MirEvalError::InFunction(imp, Box::new(e)))?
}; };
self.write_memory(dest_addr, &result)?; destination.write_from_bytes(self, &result)?;
Ok(()) Ok(())
} }
fn exec_fn_trait( fn exec_fn_trait(
&mut self, &mut self,
ft: FnTrait, args: &[IntervalAndTy],
args: &[Operand], destination: Interval,
destination: &Place,
locals: &Locals<'_>, locals: &Locals<'_>,
) -> Result<()> { ) -> Result<()> {
let func = args.get(0).ok_or(MirEvalError::TypeError("fn trait with no arg"))?; let func = args.get(0).ok_or(MirEvalError::TypeError("fn trait with no arg"))?;
let mut func_ty = self.operand_ty(func, locals)?; let mut func_ty = func.ty.clone();
let mut func_data = self.eval_operand(func, locals)?; let mut func_data = func.interval;
while let TyKind::Ref(_, _, z) = func_ty.kind(Interner) { while let TyKind::Ref(_, _, z) = func_ty.kind(Interner) {
func_ty = z.clone(); func_ty = z.clone();
if matches!(func_ty.kind(Interner), TyKind::Dyn(_)) { if matches!(func_ty.kind(Interner), TyKind::Dyn(_)) {
@ -1383,7 +1545,7 @@ impl Evaluator<'_> {
TyKind::Function(_) => { TyKind::Function(_) => {
self.exec_fn_pointer(func_data, destination, &args[1..], locals)?; self.exec_fn_pointer(func_data, destination, &args[1..], locals)?;
} }
x => not_supported!("Call {ft:?} trait methods with type {x:?}"), x => not_supported!("Call FnTrait methods with type {x:?}"),
} }
Ok(()) Ok(())
} }
@ -1392,7 +1554,10 @@ impl Evaluator<'_> {
use LangItem::*; use LangItem::*;
let mut args = args.iter(); let mut args = args.iter();
match x { match x {
PanicFmt | BeginPanic => Err(MirEvalError::Panic), // FIXME: we want to find the panic message from arguments, but it wouldn't work
// currently even if we do that, since macro expansion of panic related macros
// is dummy.
PanicFmt | BeginPanic => Err(MirEvalError::Panic("<format-args>".to_string())),
SliceLen => { SliceLen => {
let arg = args let arg = args
.next() .next()

View File

@ -1285,6 +1285,11 @@ fn cast_kind(source_ty: &Ty, target_ty: &Ty) -> Result<CastKind> {
(_, chalk_ir::Scalar::Float(_)) => CastKind::IntToFloat, (_, chalk_ir::Scalar::Float(_)) => CastKind::IntToFloat,
(_, _) => CastKind::IntToInt, (_, _) => CastKind::IntToInt,
}, },
(TyKind::Scalar(_), TyKind::Raw(..)) => CastKind::PointerFromExposedAddress,
(TyKind::Raw(..), TyKind::Scalar(_)) => CastKind::PointerExposeAddress,
(TyKind::Raw(..) | TyKind::Ref(..), TyKind::Raw(..) | TyKind::Ref(..)) => {
CastKind::PtrToPtr
}
// Enum to int casts // Enum to int casts
(TyKind::Scalar(_), TyKind::Adt(..)) | (TyKind::Adt(..), TyKind::Scalar(_)) => { (TyKind::Scalar(_), TyKind::Adt(..)) | (TyKind::Adt(..), TyKind::Scalar(_)) => {
CastKind::IntToInt CastKind::IntToInt

View File

@ -2696,6 +2696,21 @@ fn f() {
) )
} }
#[test]
fn infer_ref_to_raw_cast() {
check_types(
r#"
struct S;
fn f() {
let s = &mut S;
let s = s as *mut _;
//^ *mut S
}
"#,
);
}
#[test] #[test]
fn infer_missing_type() { fn infer_missing_type() {
check_types( check_types(
@ -3258,25 +3273,6 @@ fn f<T>(t: Ark<T>) {
); );
} }
// FIXME
#[test]
fn castable_to2() {
check_infer(
r#"
fn func() {
let x = &0u32 as *const _;
}
"#,
expect![[r#"
10..44 '{ ...t _; }': ()
20..21 'x': *const {unknown}
24..29 '&0u32': &u32
24..41 '&0u32 ...onst _': *const {unknown}
25..29 '0u32': u32
"#]],
);
}
#[test] #[test]
fn issue_14275() { fn issue_14275() {
// FIXME: evaluate const generic // FIXME: evaluate const generic

View File

@ -409,7 +409,6 @@ pub(super) fn definition(
} }
match it.eval(db) { match it.eval(db) {
Ok(()) => Some("pass".into()), Ok(()) => Some("pass".into()),
Err(MirEvalError::Panic) => Some("fail".into()),
Err(MirEvalError::MirLowerError(f, e)) => { Err(MirEvalError::MirLowerError(f, e)) => {
let name = &db.function_data(f).name; let name = &db.function_data(f).name;
Some(format!("error: fail to lower {name} due {e:?}")) Some(format!("error: fail to lower {name} due {e:?}"))