mirror of
https://github.com/EmbarkStudios/rust-gpu.git
synced 2024-11-21 22:34:34 +00:00
rustup: update to nightly-2021-08-10.
This commit is contained in:
parent
1a3a12f505
commit
7501f1895c
@ -488,13 +488,8 @@ fn check_mod_attrs(tcx: TyCtxt<'_>, module_def_id: LocalDefId) {
|
||||
module_def_id,
|
||||
&mut check_spirv_attr_visitor.as_deep_visitor(),
|
||||
);
|
||||
// FIXME(eddyb) use `tcx.hir().visit_exported_macros_in_krate(...)` after rustup.
|
||||
for id in tcx.hir().krate().exported_macros {
|
||||
check_spirv_attr_visitor.visit_macro_def(match tcx.hir().find(id.hir_id()) {
|
||||
Some(hir::Node::MacroDef(macro_def)) => macro_def,
|
||||
_ => unreachable!(),
|
||||
});
|
||||
}
|
||||
tcx.hir()
|
||||
.visit_exported_macros_in_krate(check_spirv_attr_visitor);
|
||||
check_invalid_macro_level_spirv_attr(
|
||||
tcx,
|
||||
&check_spirv_attr_visitor.sym,
|
||||
|
@ -1,4 +1,5 @@
|
||||
use super::Builder;
|
||||
use crate::abi::ConvSpirvType;
|
||||
use crate::builder_spirv::{BuilderCursor, SpirvConst, SpirvValue, SpirvValueExt, SpirvValueKind};
|
||||
use crate::spirv_type::SpirvType;
|
||||
use rspirv::dr::{InsertPoint, Instruction, Operand};
|
||||
@ -313,7 +314,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
} else {
|
||||
for index in 0..count {
|
||||
let const_index = self.constant_u32(self.span(), index as u32);
|
||||
let gep_ptr = self.gep(ptr, &[const_index]);
|
||||
let gep_ptr = self.gep(pat.ty, ptr, &[const_index]);
|
||||
self.store(pat, gep_ptr, Align::from_bytes(0).unwrap());
|
||||
}
|
||||
}
|
||||
@ -339,11 +340,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
self.store(zero, index, zero_align);
|
||||
self.br(header.llbb());
|
||||
|
||||
let current_index = header.load(index, zero_align);
|
||||
let current_index = header.load(count.ty, index, zero_align);
|
||||
let cond = header.icmp(IntPredicate::IntULT, current_index, count);
|
||||
header.cond_br(cond, body.llbb(), exit.llbb());
|
||||
|
||||
let gep_ptr = body.gep(ptr, &[current_index]);
|
||||
let gep_ptr = body.gep(pat.ty, ptr, &[current_index]);
|
||||
body.store(pat, gep_ptr, zero_align);
|
||||
let current_index_plus_1 = body.add(current_index, one);
|
||||
body.store(current_index_plus_1, index, zero_align);
|
||||
@ -623,6 +624,7 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
|
||||
|
||||
fn invoke(
|
||||
&mut self,
|
||||
llty: Self::Type,
|
||||
llfn: Self::Value,
|
||||
args: &[Self::Value],
|
||||
then: Self::BasicBlock,
|
||||
@ -630,7 +632,7 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
|
||||
funclet: Option<&Self::Funclet>,
|
||||
) -> Self::Value {
|
||||
// Exceptions don't exist, jump directly to then block
|
||||
let result = self.call(llfn, args, funclet);
|
||||
let result = self.call(llty, llfn, args, funclet);
|
||||
self.emit().branch(then).unwrap();
|
||||
result
|
||||
}
|
||||
@ -842,12 +844,15 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
|
||||
self.fatal("array alloca not supported yet")
|
||||
}
|
||||
|
||||
fn load(&mut self, ptr: Self::Value, _align: Align) -> Self::Value {
|
||||
fn load(&mut self, ty: Self::Type, ptr: Self::Value, _align: Align) -> Self::Value {
|
||||
if let Some(value) = ptr.const_fold_load(self) {
|
||||
return value;
|
||||
}
|
||||
let ty = match self.lookup_type(ptr.ty) {
|
||||
SpirvType::Pointer { pointee } => pointee,
|
||||
SpirvType::Pointer { pointee } => {
|
||||
assert_ty_eq!(self, ty, pointee);
|
||||
pointee
|
||||
}
|
||||
ty => self.fatal(&format!(
|
||||
"load called on variable that wasn't a pointer: {:?}",
|
||||
ty
|
||||
@ -859,16 +864,25 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
|
||||
.with_type(ty)
|
||||
}
|
||||
|
||||
fn volatile_load(&mut self, ptr: Self::Value) -> Self::Value {
|
||||
fn volatile_load(&mut self, ty: Self::Type, ptr: Self::Value) -> Self::Value {
|
||||
// TODO: Implement this
|
||||
let result = self.load(ptr, Align::from_bytes(0).unwrap());
|
||||
let result = self.load(ty, ptr, Align::from_bytes(0).unwrap());
|
||||
self.zombie(result.def(self), "volatile load is not supported yet");
|
||||
result
|
||||
}
|
||||
|
||||
fn atomic_load(&mut self, ptr: Self::Value, order: AtomicOrdering, _size: Size) -> Self::Value {
|
||||
fn atomic_load(
|
||||
&mut self,
|
||||
ty: Self::Type,
|
||||
ptr: Self::Value,
|
||||
order: AtomicOrdering,
|
||||
_size: Size,
|
||||
) -> Self::Value {
|
||||
let ty = match self.lookup_type(ptr.ty) {
|
||||
SpirvType::Pointer { pointee } => pointee,
|
||||
SpirvType::Pointer { pointee } => {
|
||||
assert_ty_eq!(self, ty, pointee);
|
||||
pointee
|
||||
}
|
||||
ty => self.fatal(&format!(
|
||||
"atomic_load called on variable that wasn't a pointer: {:?}",
|
||||
ty
|
||||
@ -903,14 +917,23 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
|
||||
let val = if let Some(llextra) = place.llextra {
|
||||
OperandValue::Ref(place.llval, Some(llextra), place.align)
|
||||
} else if self.cx.is_backend_immediate(place.layout) {
|
||||
let llval = self.load(place.llval, place.align);
|
||||
let llval = self.load(
|
||||
place.layout.spirv_type(self.span(), self),
|
||||
place.llval,
|
||||
place.align,
|
||||
);
|
||||
OperandValue::Immediate(self.to_immediate(llval, place.layout))
|
||||
} else if let Abi::ScalarPair(ref a, ref b) = place.layout.abi {
|
||||
let b_offset = a.value.size(self).align_to(b.value.align(self).abi);
|
||||
|
||||
let pair_ty = place.layout.spirv_type(self.span(), self);
|
||||
let mut load = |i, scalar: &Scalar, align| {
|
||||
let llptr = self.struct_gep(place.llval, i as u64);
|
||||
let load = self.load(llptr, align);
|
||||
let llptr = self.struct_gep(pair_ty, place.llval, i as u64);
|
||||
let load = self.load(
|
||||
self.scalar_pair_element_backend_type(place.layout, i, false),
|
||||
llptr,
|
||||
align,
|
||||
);
|
||||
// WARN! This does not go through to_immediate due to only having a Scalar, not a Ty, but it still does
|
||||
// whatever to_immediate does!
|
||||
if scalar.is_bool() {
|
||||
@ -943,12 +966,12 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
|
||||
let zero = self.const_usize(0);
|
||||
let start = dest.project_index(&mut self, zero).llval;
|
||||
|
||||
let align = dest
|
||||
.align
|
||||
.restrict_for_offset(dest.layout.field(self.cx(), 0).size);
|
||||
let elem_layout = dest.layout.field(self.cx(), 0);
|
||||
let elem_ty = elem_layout.spirv_type(self.span(), &self);
|
||||
let align = dest.align.restrict_for_offset(elem_layout.size);
|
||||
|
||||
for i in 0..count {
|
||||
let current = self.inbounds_gep(start, &[self.const_usize(i)]);
|
||||
let current = self.inbounds_gep(elem_ty, start, &[self.const_usize(i)]);
|
||||
cg_elem.val.store(
|
||||
&mut self,
|
||||
PlaceRef::new_sized_aligned(current, cg_elem.layout, align),
|
||||
@ -1026,17 +1049,25 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
fn gep(&mut self, ptr: Self::Value, indices: &[Self::Value]) -> Self::Value {
|
||||
self.gep_help(ptr, indices, false)
|
||||
fn gep(&mut self, ty: Self::Type, ptr: Self::Value, indices: &[Self::Value]) -> Self::Value {
|
||||
self.gep_help(ty, ptr, indices, false)
|
||||
}
|
||||
|
||||
fn inbounds_gep(&mut self, ptr: Self::Value, indices: &[Self::Value]) -> Self::Value {
|
||||
self.gep_help(ptr, indices, true)
|
||||
fn inbounds_gep(
|
||||
&mut self,
|
||||
ty: Self::Type,
|
||||
ptr: Self::Value,
|
||||
indices: &[Self::Value],
|
||||
) -> Self::Value {
|
||||
self.gep_help(ty, ptr, indices, true)
|
||||
}
|
||||
|
||||
fn struct_gep(&mut self, ptr: Self::Value, idx: u64) -> Self::Value {
|
||||
fn struct_gep(&mut self, ty: Self::Type, ptr: Self::Value, idx: u64) -> Self::Value {
|
||||
let pointee = match self.lookup_type(ptr.ty) {
|
||||
SpirvType::Pointer { pointee } => pointee,
|
||||
SpirvType::Pointer { pointee } => {
|
||||
assert_ty_eq!(self, ty, pointee);
|
||||
pointee
|
||||
}
|
||||
other => self.fatal(&format!(
|
||||
"struct_gep not on pointer type: {:?}, index {}",
|
||||
other, idx
|
||||
@ -2087,6 +2118,7 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
|
||||
|
||||
fn call(
|
||||
&mut self,
|
||||
callee_ty: Self::Type,
|
||||
callee: Self::Value,
|
||||
args: &[Self::Value],
|
||||
funclet: Option<&Self::Funclet>,
|
||||
@ -2104,7 +2136,10 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
|
||||
SpirvType::Function {
|
||||
return_type,
|
||||
arguments,
|
||||
} => (callee.def(self), return_type, arguments),
|
||||
} => {
|
||||
assert_ty_eq!(self, callee_ty, callee.ty);
|
||||
(callee.def(self), return_type, arguments)
|
||||
}
|
||||
|
||||
SpirvType::Pointer { pointee } => match self.lookup_type(pointee) {
|
||||
SpirvType::Function {
|
||||
@ -2112,7 +2147,10 @@ impl<'a, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'tcx> {
|
||||
arguments,
|
||||
} => (
|
||||
match callee.kind {
|
||||
SpirvValueKind::FnAddr { function } => function,
|
||||
SpirvValueKind::FnAddr { function } => {
|
||||
assert_ty_eq!(self, callee_ty, pointee);
|
||||
function
|
||||
}
|
||||
|
||||
// Truly indirect call.
|
||||
_ => {
|
||||
|
@ -99,15 +99,17 @@ impl<'a, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'tcx> {
|
||||
}
|
||||
|
||||
sym::volatile_load | sym::unaligned_volatile_load => {
|
||||
let tp_ty = substs.type_at(0);
|
||||
let mut ptr = args[0].immediate();
|
||||
let ptr = args[0].immediate();
|
||||
if let PassMode::Cast(ty) = fn_abi.ret.mode {
|
||||
let pointee = ty.spirv_type(self.span(), self);
|
||||
let pointer = SpirvType::Pointer { pointee }.def(self.span(), self);
|
||||
ptr = self.pointercast(ptr, pointer);
|
||||
let ptr = self.pointercast(ptr, pointer);
|
||||
self.volatile_load(pointee, ptr)
|
||||
} else {
|
||||
let layout = self.layout_of(substs.type_at(0));
|
||||
let load = self.volatile_load(layout.spirv_type(self.span(), self), ptr);
|
||||
self.to_immediate(load, layout)
|
||||
}
|
||||
let load = self.volatile_load(ptr);
|
||||
self.to_immediate(load, self.layout_of(tp_ty))
|
||||
}
|
||||
|
||||
sym::prefetch_read_data
|
||||
|
@ -95,6 +95,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
|
||||
pub fn gep_help(
|
||||
&self,
|
||||
ty: Word,
|
||||
ptr: SpirvValue,
|
||||
indices: &[SpirvValue],
|
||||
is_inbounds: bool,
|
||||
@ -105,7 +106,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
// https://github.com/gpuweb/gpuweb/issues/33
|
||||
let mut result_indices = Vec::with_capacity(indices.len() - 1);
|
||||
let mut result_pointee_type = match self.lookup_type(ptr.ty) {
|
||||
SpirvType::Pointer { pointee } => pointee,
|
||||
SpirvType::Pointer { pointee } => {
|
||||
assert_ty_eq!(self, ty, pointee);
|
||||
pointee
|
||||
}
|
||||
other_type => self.fatal(&format!(
|
||||
"GEP first deref not implemented for type {:?}",
|
||||
other_type
|
||||
|
@ -11,6 +11,7 @@ use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_middle::bug;
|
||||
use rustc_span::symbol::Symbol;
|
||||
use rustc_span::{Span, DUMMY_SP};
|
||||
use std::assert_matches::assert_matches;
|
||||
use std::cell::{RefCell, RefMut};
|
||||
use std::rc::Rc;
|
||||
use std::{fs::File, io::Write, path::Path};
|
||||
|
@ -262,8 +262,9 @@ impl<'tcx> ConstMethods<'tcx> for CodegenCx<'tcx> {
|
||||
}
|
||||
}
|
||||
}
|
||||
Scalar::Ptr(ptr) => {
|
||||
let (base_addr, _base_addr_space) = match self.tcx.global_alloc(ptr.alloc_id) {
|
||||
Scalar::Ptr(ptr, _) => {
|
||||
let (alloc_id, offset) = ptr.into_parts();
|
||||
let (base_addr, _base_addr_space) = match self.tcx.global_alloc(alloc_id) {
|
||||
GlobalAlloc::Memory(alloc) => {
|
||||
let pointee = match self.lookup_type(ty) {
|
||||
SpirvType::Pointer { pointee } => pointee,
|
||||
@ -286,12 +287,12 @@ impl<'tcx> ConstMethods<'tcx> for CodegenCx<'tcx> {
|
||||
(self.get_static(def_id), AddressSpace::DATA)
|
||||
}
|
||||
};
|
||||
let value = if ptr.offset.bytes() == 0 {
|
||||
let value = if offset.bytes() == 0 {
|
||||
base_addr
|
||||
} else {
|
||||
self.tcx
|
||||
.sess
|
||||
.fatal("Non-constant scalar_to_backend ptr.offset not supported")
|
||||
.fatal("Non-zero scalar_to_backend ptr.offset not supported")
|
||||
// let offset = self.constant_u64(ptr.offset.bytes());
|
||||
// self.gep(base_addr, once(offset))
|
||||
};
|
||||
@ -307,6 +308,13 @@ impl<'tcx> ConstMethods<'tcx> for CodegenCx<'tcx> {
|
||||
}
|
||||
}
|
||||
}
|
||||
// FIXME(eddyb) this shouldn't exist, and is only used by vtable creation,
|
||||
// see https://github.com/rust-lang/rust/pull/86475#discussion_r680792727.
|
||||
fn const_data_from_alloc(&self, _alloc: &Allocation) -> Self::Value {
|
||||
let undef = self.undef(SpirvType::Void.def(DUMMY_SP, self));
|
||||
self.zombie_no_span(undef.def_cx(self), "const_data_from_alloc");
|
||||
undef
|
||||
}
|
||||
fn from_const_alloc(
|
||||
&self,
|
||||
layout: TyAndLayout<'tcx>,
|
||||
|
@ -143,7 +143,7 @@ impl<'tcx> CodegenCx<'tcx> {
|
||||
);
|
||||
}
|
||||
bx.set_span(span);
|
||||
bx.call(entry_func, &call_args, None);
|
||||
bx.call(entry_func.ty, entry_func, &call_args, None);
|
||||
bx.ret_void();
|
||||
|
||||
let stub_fn_id = stub_fn.def_cx(self);
|
||||
@ -307,14 +307,13 @@ impl<'tcx> CodegenCx<'tcx> {
|
||||
let var_ptr_spirv_type;
|
||||
let (value_ptr, value_len) = match storage_class {
|
||||
StorageClass::PushConstant | StorageClass::Uniform | StorageClass::StorageBuffer => {
|
||||
var_ptr_spirv_type = self.type_ptr_to(
|
||||
SpirvType::InterfaceBlock {
|
||||
inner_type: value_spirv_type,
|
||||
}
|
||||
.def(hir_param.span, self),
|
||||
);
|
||||
let var_spirv_type = SpirvType::InterfaceBlock {
|
||||
inner_type: value_spirv_type,
|
||||
}
|
||||
.def(hir_param.span, self);
|
||||
var_ptr_spirv_type = self.type_ptr_to(var_spirv_type);
|
||||
|
||||
let value_ptr = bx.struct_gep(var.with_type(var_ptr_spirv_type), 0);
|
||||
let value_ptr = bx.struct_gep(var_spirv_type, var.with_type(var_ptr_spirv_type), 0);
|
||||
|
||||
let value_len = if is_unsized_with_len {
|
||||
match self.lookup_type(value_spirv_type) {
|
||||
@ -409,7 +408,11 @@ impl<'tcx> CodegenCx<'tcx> {
|
||||
|
||||
call_args.push(match entry_arg_abi.mode {
|
||||
PassMode::Indirect { .. } => value_ptr,
|
||||
PassMode::Direct(_) => bx.load(value_ptr, entry_arg_abi.layout.align.abi),
|
||||
PassMode::Direct(_) => bx.load(
|
||||
entry_arg_abi.layout.spirv_type(hir_param.ty_span, bx),
|
||||
value_ptr,
|
||||
entry_arg_abi.layout.align.abi,
|
||||
),
|
||||
_ => unreachable!(),
|
||||
});
|
||||
assert_eq!(value_len, None);
|
||||
|
@ -543,7 +543,7 @@ impl<'tcx> MiscMethods<'tcx> for CodegenCx<'tcx> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
fn set_frame_pointer_elimination(&self, _llfn: Self::Function) {
|
||||
fn set_frame_pointer_type(&self, _llfn: Self::Function) {
|
||||
todo!()
|
||||
}
|
||||
|
||||
|
@ -41,10 +41,14 @@ impl<'tcx> LayoutTypeMethods<'tcx> for CodegenCx<'tcx> {
|
||||
ty.spirv_type(DUMMY_SP, self)
|
||||
}
|
||||
|
||||
fn fn_ptr_backend_type(&self, fn_abi: &FnAbi<'tcx, Ty<'tcx>>) -> Self::Type {
|
||||
fn fn_decl_backend_type(&self, fn_abi: &FnAbi<'tcx, Ty<'tcx>>) -> Self::Type {
|
||||
fn_abi.spirv_type(DUMMY_SP, self)
|
||||
}
|
||||
|
||||
fn fn_ptr_backend_type(&self, fn_abi: &FnAbi<'tcx, Ty<'tcx>>) -> Self::Type {
|
||||
self.type_ptr_to(self.fn_decl_backend_type(fn_abi))
|
||||
}
|
||||
|
||||
fn reg_backend_type(&self, ty: &Reg) -> Self::Type {
|
||||
ty.spirv_type(DUMMY_SP, self)
|
||||
}
|
||||
|
@ -367,8 +367,7 @@ fn add_upstream_rust_crates(
|
||||
.iter()
|
||||
.find(|(ty, _)| *ty == crate_type)
|
||||
.expect("failed to find crate type in dependency format list");
|
||||
let deps = &codegen_results.crate_info.used_crates_dynamic;
|
||||
for &(cnum, _) in deps.iter() {
|
||||
for &cnum in &codegen_results.crate_info.used_crates {
|
||||
let src = &codegen_results.crate_info.used_crate_source[&cnum];
|
||||
match data[cnum.as_usize() - 1] {
|
||||
Linkage::NotLinked | Linkage::IncludedFromDylib => {}
|
||||
@ -391,8 +390,7 @@ fn add_upstream_native_libraries(
|
||||
.find(|(ty, _)| *ty == crate_type)
|
||||
.expect("failed to find crate type in dependency format list");
|
||||
|
||||
let crates = &codegen_results.crate_info.used_crates_static;
|
||||
for &(cnum, _) in crates {
|
||||
for &cnum in &codegen_results.crate_info.used_crates {
|
||||
for lib in codegen_results.crate_info.native_libraries[&cnum].iter() {
|
||||
let name = match lib.name {
|
||||
Some(l) => l,
|
||||
|
@ -5,5 +5,5 @@
|
||||
# to the user in the error, instead of "error: invalid channel name '[toolchain]'".
|
||||
|
||||
[toolchain]
|
||||
channel = "nightly-2021-06-09"
|
||||
channel = "nightly-2021-08-10"
|
||||
components = ["rust-src", "rustc-dev", "llvm-tools-preview"]
|
||||
|
8
tests/ui/dis/ptr_copy.normal.stderr
Normal file
8
tests/ui/dis/ptr_copy.normal.stderr
Normal file
@ -0,0 +1,8 @@
|
||||
error: Cannot memcpy dynamically sized data
|
||||
--> $CORE_SRC/intrinsics.rs:2132:14
|
||||
|
|
||||
2132 | unsafe { copy(src, dst, count) }
|
||||
| ^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
error: aborting due to previous error
|
||||
|
@ -1,10 +1,32 @@
|
||||
// build-pass
|
||||
// revisions: normal via_intrinsic
|
||||
// [normal] build-fail
|
||||
// normalize-stderr-test "\S*/library/core/src/" -> "$$CORE_SRC/"
|
||||
// [via_intrinsic] build-pass
|
||||
// compile-flags: -C llvm-args=--disassemble-fn=ptr_copy::copy_via_raw_ptr
|
||||
|
||||
#![cfg_attr(via_intrinsic, feature(intrinsics))]
|
||||
|
||||
use spirv_std as _;
|
||||
|
||||
fn copy_via_raw_ptr(src: &f32, dst: &mut f32) {
|
||||
unsafe { core::ptr::copy(src, dst, 1) }
|
||||
#[cfg(via_intrinsic)]
|
||||
{
|
||||
extern "rust-intrinsic" {
|
||||
fn copy<T>(src: *const T, dst: *mut T, count: usize);
|
||||
}
|
||||
unsafe { copy(src, dst, 1) }
|
||||
}
|
||||
|
||||
#[cfg(normal)]
|
||||
{
|
||||
// FIXME(eddyb) `ptr::copy` doesn't currently work, and so the test uses
|
||||
// the intrinsic for success and `ptr::copy` for failure, so that it can
|
||||
// be switched back to `ptr::copy`-only when that starts succeeding,
|
||||
// likely once https://github.com/rust-lang/rust/pull/86003 is reverted
|
||||
// (and/or https://github.com/rust-lang/rust/pull/81238 is reattempted),
|
||||
// which is blocked on https://github.com/rust-lang/rust/pull/86699.
|
||||
unsafe { core::ptr::copy(src, dst, 1) }
|
||||
}
|
||||
}
|
||||
#[spirv(fragment)]
|
||||
pub fn main(i: f32, o: &mut f32) {
|
||||
|
@ -2,8 +2,8 @@
|
||||
%4 = OpFunctionParameter %5
|
||||
%6 = OpFunctionParameter %5
|
||||
%7 = OpLabel
|
||||
OpLine %8 7 13
|
||||
OpLine %8 17 17
|
||||
OpCopyMemory %6 %4
|
||||
OpLine %8 8 1
|
||||
OpLine %8 30 1
|
||||
OpReturn
|
||||
OpFunctionEnd
|
@ -5,9 +5,9 @@
|
||||
%8 = OpVariable %5 Function
|
||||
OpLine %9 319 5
|
||||
OpStore %8 %10
|
||||
OpLine %11 696 8
|
||||
OpLine %11 703 8
|
||||
OpCopyMemory %8 %4
|
||||
OpLine %11 697 8
|
||||
OpLine %11 704 8
|
||||
%12 = OpLoad %13 %8
|
||||
OpLine %14 7 13
|
||||
OpStore %6 %12
|
||||
|
@ -5,9 +5,9 @@
|
||||
%8 = OpVariable %5 Function
|
||||
OpLine %9 319 5
|
||||
OpStore %8 %10
|
||||
OpLine %11 696 8
|
||||
OpLine %11 703 8
|
||||
OpCopyMemory %8 %4
|
||||
OpLine %11 697 8
|
||||
OpLine %11 704 8
|
||||
%12 = OpLoad %13 %8
|
||||
OpLine %14 7 13
|
||||
OpStore %6 %12
|
||||
|
@ -7,7 +7,7 @@ OpLine %9 7 35
|
||||
%10 = OpLoad %11 %4
|
||||
OpLine %9 7 13
|
||||
OpStore %8 %10
|
||||
OpLine %12 880 8
|
||||
OpLine %12 894 8
|
||||
OpCopyMemory %6 %8
|
||||
OpLine %9 8 1
|
||||
OpReturn
|
||||
|
@ -5,9 +5,9 @@
|
||||
%8 = OpVariable %5 Function
|
||||
OpLine %9 7 37
|
||||
%10 = OpLoad %11 %4
|
||||
OpLine %12 1012 17
|
||||
OpLine %12 1013 17
|
||||
OpStore %8 %10
|
||||
OpLine %13 880 8
|
||||
OpLine %13 894 8
|
||||
OpCopyMemory %6 %8
|
||||
OpLine %9 8 1
|
||||
OpReturn
|
||||
|
@ -5,7 +5,7 @@
|
||||
|
||||
#![feature(
|
||||
extern_types,
|
||||
min_type_alias_impl_trait,
|
||||
type_alias_impl_trait, // HACK(eddyb) this comment prevents rustfmt
|
||||
stmt_expr_attributes,
|
||||
trait_alias
|
||||
)]
|
||||
|
Loading…
Reference in New Issue
Block a user