1use crate::maybe_pqp_cg_ssa as rustc_codegen_ssa;
3
4use super::CodegenCx;
5use crate::abi::ConvSpirvType;
6use crate::builder_spirv::{SpirvConst, SpirvValue, SpirvValueExt, SpirvValueKind};
7use crate::spirv_type::SpirvType;
8use itertools::Itertools as _;
9use rspirv::spirv::Word;
10use rustc_abi::{self as abi, AddressSpace, Float, HasDataLayout, Integer, Primitive, Size};
11use rustc_codegen_ssa::traits::{ConstCodegenMethods, MiscCodegenMethods, StaticCodegenMethods};
12use rustc_middle::mir::interpret::{AllocError, ConstAllocation, GlobalAlloc, Scalar, alloc_range};
13use rustc_middle::ty::layout::LayoutOf;
14use rustc_span::{DUMMY_SP, Span};
15
16impl<'tcx> CodegenCx<'tcx> {
17 pub fn def_constant(&self, ty: Word, val: SpirvConst<'_, 'tcx>) -> SpirvValue {
18 self.builder.def_constant_cx(ty, val, self)
19 }
20
21 pub fn constant_u8(&self, span: Span, val: u8) -> SpirvValue {
22 self.constant_int_from_native_unsigned(span, val)
23 }
24
25 pub fn constant_i8(&self, span: Span, val: i8) -> SpirvValue {
26 self.constant_int_from_native_signed(span, val)
27 }
28
29 pub fn constant_i16(&self, span: Span, val: i16) -> SpirvValue {
30 self.constant_int_from_native_signed(span, val)
31 }
32
33 pub fn constant_u16(&self, span: Span, val: u16) -> SpirvValue {
34 self.constant_int_from_native_unsigned(span, val)
35 }
36
37 pub fn constant_i32(&self, span: Span, val: i32) -> SpirvValue {
38 self.constant_int_from_native_signed(span, val)
39 }
40
41 pub fn constant_u32(&self, span: Span, val: u32) -> SpirvValue {
42 self.constant_int_from_native_unsigned(span, val)
43 }
44
45 pub fn constant_i64(&self, span: Span, val: i64) -> SpirvValue {
46 self.constant_int_from_native_signed(span, val)
47 }
48
49 pub fn constant_u64(&self, span: Span, val: u64) -> SpirvValue {
50 self.constant_int_from_native_unsigned(span, val)
51 }
52
53 fn constant_int_from_native_unsigned(&self, span: Span, val: impl Into<u128>) -> SpirvValue {
54 let size = Size::from_bytes(std::mem::size_of_val(&val));
55 let ty = SpirvType::Integer(size.bits() as u32, false).def(span, self);
56 self.constant_int(ty, val.into())
57 }
58
59 fn constant_int_from_native_signed(&self, span: Span, val: impl Into<i128>) -> SpirvValue {
60 let size = Size::from_bytes(std::mem::size_of_val(&val));
61 let ty = SpirvType::Integer(size.bits() as u32, true).def(span, self);
62 self.constant_int(ty, val.into() as u128)
63 }
64
65 pub fn constant_int(&self, ty: Word, val: u128) -> SpirvValue {
66 self.def_constant(ty, SpirvConst::Scalar(val))
67 }
68
69 pub fn constant_f32(&self, span: Span, val: f32) -> SpirvValue {
70 let ty = SpirvType::Float(32).def(span, self);
71 self.def_constant(ty, SpirvConst::Scalar(val.to_bits().into()))
72 }
73
74 pub fn constant_f64(&self, span: Span, val: f64) -> SpirvValue {
75 let ty = SpirvType::Float(64).def(span, self);
76 self.def_constant(ty, SpirvConst::Scalar(val.to_bits().into()))
77 }
78
79 pub fn constant_float(&self, ty: Word, val: f64) -> SpirvValue {
80 match self.lookup_type(ty) {
81 SpirvType::Float(32) => {
83 self.def_constant(ty, SpirvConst::Scalar((val as f32).to_bits().into()))
84 }
85 SpirvType::Float(64) => self.def_constant(ty, SpirvConst::Scalar(val.to_bits().into())),
86 other => self.tcx.dcx().fatal(format!(
87 "constant_float does not support type {}",
88 other.debug(ty, self)
89 )),
90 }
91 }
92
93 pub fn constant_bool(&self, span: Span, val: bool) -> SpirvValue {
94 let ty = SpirvType::Bool.def(span, self);
95 self.def_constant(ty, SpirvConst::Scalar(val as u128))
96 }
97
98 pub fn constant_composite(&self, ty: Word, fields: impl Iterator<Item = Word>) -> SpirvValue {
99 self.def_constant(ty, SpirvConst::Composite(&fields.collect::<Vec<_>>()))
101 }
102
103 pub fn constant_null(&self, ty: Word) -> SpirvValue {
104 self.def_constant(ty, SpirvConst::Null)
105 }
106
107 pub fn undef(&self, ty: Word) -> SpirvValue {
108 self.def_constant(ty, SpirvConst::Undef)
109 }
110}
111
112impl ConstCodegenMethods for CodegenCx<'_> {
113 fn const_null(&self, t: Self::Type) -> Self::Value {
114 self.constant_null(t)
115 }
116 fn const_undef(&self, ty: Self::Type) -> Self::Value {
117 self.undef(ty)
118 }
119 fn const_poison(&self, ty: Self::Type) -> Self::Value {
120 self.const_undef(ty)
122 }
123 fn const_int(&self, t: Self::Type, i: i64) -> Self::Value {
124 self.constant_int(t, i as u128)
125 }
126 fn const_uint(&self, t: Self::Type, i: u64) -> Self::Value {
127 self.constant_int(t, i.into())
128 }
129 fn const_uint_big(&self, t: Self::Type, i: u128) -> Self::Value {
130 self.constant_int(t, i)
131 }
132 fn const_bool(&self, val: bool) -> Self::Value {
133 self.constant_bool(DUMMY_SP, val)
134 }
135 fn const_i8(&self, i: i8) -> Self::Value {
136 self.constant_i8(DUMMY_SP, i)
137 }
138 fn const_i16(&self, i: i16) -> Self::Value {
139 self.constant_i16(DUMMY_SP, i)
140 }
141 fn const_i32(&self, i: i32) -> Self::Value {
142 self.constant_i32(DUMMY_SP, i)
143 }
144 fn const_u32(&self, i: u32) -> Self::Value {
145 self.constant_u32(DUMMY_SP, i)
146 }
147 fn const_u64(&self, i: u64) -> Self::Value {
148 self.constant_u64(DUMMY_SP, i)
149 }
150 fn const_u128(&self, i: u128) -> Self::Value {
151 let ty = SpirvType::Integer(128, false).def(DUMMY_SP, self);
152 self.const_uint_big(ty, i)
153 }
154 fn const_usize(&self, i: u64) -> Self::Value {
155 let ptr_size = self.tcx.data_layout.pointer_size.bits() as u32;
156 let t = SpirvType::Integer(ptr_size, false).def(DUMMY_SP, self);
157 self.constant_int(t, i.into())
158 }
159 fn const_u8(&self, i: u8) -> Self::Value {
160 self.constant_u8(DUMMY_SP, i)
161 }
162 fn const_real(&self, t: Self::Type, val: f64) -> Self::Value {
163 self.constant_float(t, val)
164 }
165
166 fn const_str(&self, s: &str) -> (Self::Value, Self::Value) {
167 let len = s.len();
168 let str_ty = self
169 .layout_of(self.tcx.types.str_)
170 .spirv_type(DUMMY_SP, self);
171 (
172 self.def_constant(
173 self.type_ptr_to(str_ty),
174 SpirvConst::PtrTo {
175 pointee: self
176 .constant_composite(
177 str_ty,
178 s.bytes().map(|b| self.const_u8(b).def_cx(self)),
179 )
180 .def_cx(self),
181 },
182 ),
183 self.const_usize(len as u64),
184 )
185 }
186 fn const_struct(&self, elts: &[Self::Value], _packed: bool) -> Self::Value {
187 let field_types = elts.iter().map(|f| f.ty).collect::<Vec<_>>();
190 let (field_offsets, size, align) = crate::abi::auto_struct_layout(self, &field_types);
191 let struct_ty = SpirvType::Adt {
192 def_id: None,
193 size,
194 align,
195 field_types: &field_types,
196 field_offsets: &field_offsets,
197 field_names: None,
198 }
199 .def(DUMMY_SP, self);
200 self.constant_composite(struct_ty, elts.iter().map(|f| f.def_cx(self)))
201 }
202 fn const_vector(&self, elts: &[Self::Value]) -> Self::Value {
203 let vector_ty = SpirvType::simd_vector(
204 self,
205 DUMMY_SP,
206 self.lookup_type(elts[0].ty),
207 elts.len() as u32,
208 )
209 .def(DUMMY_SP, self);
210 self.constant_composite(vector_ty, elts.iter().map(|elt| elt.def_cx(self)))
211 }
212
213 fn const_to_opt_uint(&self, v: Self::Value) -> Option<u64> {
214 self.builder.lookup_const_scalar(v)?.try_into().ok()
215 }
216 fn const_to_opt_u128(&self, v: Self::Value, _sign_ext: bool) -> Option<u128> {
219 self.builder.lookup_const_scalar(v)
220 }
221
222 fn scalar_to_backend(
223 &self,
224 scalar: Scalar,
225 layout: abi::Scalar,
226 ty: Self::Type,
227 ) -> Self::Value {
228 match scalar {
229 Scalar::Int(int) => {
230 assert_eq!(int.size(), layout.primitive().size(self));
231 let data = int.to_uint(int.size());
232
233 if let Primitive::Pointer(_) = layout.primitive() {
234 if data == 0 {
235 self.constant_null(ty)
236 } else {
237 let result = self.undef(ty);
238 self.zombie_no_span(
239 result.def_cx(self),
240 "pointer has non-null integer address",
241 );
242 result
243 }
244 } else {
245 self.def_constant(ty, SpirvConst::Scalar(data))
246 }
247 }
248 Scalar::Ptr(ptr, _) => {
249 let (prov, offset) = ptr.into_parts();
250 let alloc_id = prov.alloc_id();
251 let (base_addr, _base_addr_space) = match self.tcx.global_alloc(alloc_id) {
252 GlobalAlloc::Memory(alloc) => {
253 let pointee = match self.lookup_type(ty) {
254 SpirvType::Pointer { pointee } => pointee,
255 other => self.tcx.dcx().fatal(format!(
256 "GlobalAlloc::Memory type not implemented: {}",
257 other.debug(ty, self)
258 )),
259 };
260 let init = self
263 .try_read_from_const_alloc(alloc, pointee)
264 .unwrap_or_else(|| self.const_data_from_alloc(alloc));
265 let value = self.static_addr_of(init, alloc.inner().align, None);
266 (value, AddressSpace::DATA)
267 }
268 GlobalAlloc::Function { instance } => (
269 self.get_fn_addr(instance),
270 self.data_layout().instruction_address_space,
271 ),
272 GlobalAlloc::VTable(vty, dyn_ty) => {
273 let alloc = self
274 .tcx
275 .global_alloc(self.tcx.vtable_allocation((
276 vty,
277 dyn_ty.principal().map(|principal| {
278 self.tcx.instantiate_bound_regions_with_erased(principal)
279 }),
280 )))
281 .unwrap_memory();
282 let pointee = match self.lookup_type(ty) {
283 SpirvType::Pointer { pointee } => pointee,
284 other => self.tcx.dcx().fatal(format!(
285 "GlobalAlloc::VTable type not implemented: {}",
286 other.debug(ty, self)
287 )),
288 };
289 let init = self
292 .try_read_from_const_alloc(alloc, pointee)
293 .unwrap_or_else(|| self.const_data_from_alloc(alloc));
294 let value = self.static_addr_of(init, alloc.inner().align, None);
295 (value, AddressSpace::DATA)
296 }
297 GlobalAlloc::Static(def_id) => {
298 assert!(self.tcx.is_static(def_id));
299 assert!(!self.tcx.is_thread_local_static(def_id));
300 (self.get_static(def_id), AddressSpace::DATA)
301 }
302 };
303 self.const_bitcast(self.const_ptr_byte_offset(base_addr, offset), ty)
304 }
305 }
306 }
307
308 fn const_data_from_alloc(&self, alloc: ConstAllocation<'_>) -> Self::Value {
313 let alloc = self.tcx.lift(alloc).unwrap();
317
318 let void_type = SpirvType::Void.def(DUMMY_SP, self);
319 self.def_constant(void_type, SpirvConst::ConstDataFromAlloc(alloc))
320 }
321
322 fn const_ptr_byte_offset(&self, val: Self::Value, offset: Size) -> Self::Value {
323 if offset == Size::ZERO {
324 val
325 } else {
326 let result = val;
329 self.zombie_no_span(result.def_cx(self), "const_ptr_byte_offset");
330 result
331 }
332 }
333}
334
335impl<'tcx> CodegenCx<'tcx> {
336 pub fn const_bitcast(&self, val: SpirvValue, ty: Word) -> SpirvValue {
337 if let SpirvValueKind::IllegalConst(_) = val.kind
340 && let Some(SpirvConst::PtrTo { pointee }) = self.builder.lookup_const(val)
341 && let Some(SpirvConst::ConstDataFromAlloc(alloc)) =
342 self.builder.lookup_const_by_id(pointee)
343 && let SpirvType::Pointer { pointee } = self.lookup_type(ty)
344 && let Some(init) = self.try_read_from_const_alloc(alloc, pointee)
345 {
346 return self.static_addr_of(init, alloc.inner().align, None);
347 }
348
349 if val.ty == ty {
350 val
351 } else {
352 let result = val.def_cx(self).with_type(ty);
355 self.zombie_no_span(result.def_cx(self), "const_bitcast");
356 result
357 }
358 }
359
360 pub fn primitive_to_scalar(&self, value: Primitive) -> abi::Scalar {
363 let bits = value.size(self.data_layout()).bits();
364 assert!(bits <= 128);
365 abi::Scalar::Initialized {
366 value,
367 valid_range: abi::WrappingRange {
368 start: 0,
369 end: (!0 >> (128 - bits)),
370 },
371 }
372 }
373
374 pub fn try_read_from_const_alloc(
379 &self,
380 alloc: ConstAllocation<'tcx>,
381 ty: Word,
382 ) -> Option<SpirvValue> {
383 let (result, read_size) = self.read_from_const_alloc_at(alloc, ty, Size::ZERO);
384 (read_size == alloc.inner().size()).then_some(result)
385 }
386
387 #[tracing::instrument(level = "trace", skip(self), fields(ty = ?self.debug_type(ty), offset))]
392 fn read_from_const_alloc_at(
393 &self,
394 alloc: ConstAllocation<'tcx>,
395 ty: Word,
396 offset: Size,
397 ) -> (SpirvValue, Size) {
398 let ty_def = self.lookup_type(ty);
399 match ty_def {
400 SpirvType::Bool
401 | SpirvType::Integer(..)
402 | SpirvType::Float(_)
403 | SpirvType::Pointer { .. } => {
404 let size = ty_def.sizeof(self).unwrap();
405 let primitive = match ty_def {
406 SpirvType::Bool => Primitive::Int(Integer::fit_unsigned(0), false),
407 SpirvType::Integer(int_size, int_signedness) => Primitive::Int(
408 match int_size {
409 8 => Integer::I8,
410 16 => Integer::I16,
411 32 => Integer::I32,
412 64 => Integer::I64,
413 128 => Integer::I128,
414 other => {
415 self.tcx
416 .dcx()
417 .fatal(format!("invalid size for integer: {other}"));
418 }
419 },
420 int_signedness,
421 ),
422 SpirvType::Float(float_size) => Primitive::Float(match float_size {
423 16 => Float::F16,
424 32 => Float::F32,
425 64 => Float::F64,
426 128 => Float::F128,
427 other => {
428 self.tcx
429 .dcx()
430 .fatal(format!("invalid size for float: {other}"));
431 }
432 }),
433 SpirvType::Pointer { .. } => Primitive::Pointer(AddressSpace::DATA),
434 _ => unreachable!(),
435 };
436
437 let range = alloc_range(offset, size);
438 let read_provenance = matches!(primitive, Primitive::Pointer(_));
439
440 let mut primitive = primitive;
441 let mut read_result = alloc.inner().read_scalar(self, range, read_provenance);
442
443 if read_result.is_err()
447 && !read_provenance
448 && let read_ptr_result @ Ok(Scalar::Ptr(ptr, _)) = alloc
449 .inner()
450 .read_scalar(self, range, true)
451 {
452 let (prov, _offset) = ptr.into_parts();
453 primitive = Primitive::Pointer(
454 self.tcx.global_alloc(prov.alloc_id()).address_space(self),
455 );
456 read_result = read_ptr_result;
457 }
458
459 let scalar_or_zombie = match read_result {
460 Ok(scalar) => {
461 Ok(self.scalar_to_backend(scalar, self.primitive_to_scalar(primitive), ty))
462 }
463
464 Err(err) => match err {
467 AllocError::InvalidUninitBytes(_) => {
471 let uninit_range = alloc
472 .inner()
473 .init_mask()
474 .is_range_initialized(range)
475 .unwrap_err();
476 let uninit_size = {
477 let [start, end] = [uninit_range.start, uninit_range.end()]
478 .map(|x| x.clamp(range.start, range.end()));
479 end - start
480 };
481 if uninit_size == size {
482 Ok(self.undef(ty))
483 } else {
484 Err(format!(
485 "overlaps {} uninitialized bytes",
486 uninit_size.bytes()
487 ))
488 }
489 }
490 AllocError::ReadPointerAsInt(_) => Err("overlaps pointer bytes".into()),
491 AllocError::ReadPartialPointer(_) => {
492 Err("partially overlaps another pointer".into())
493 }
494
495 AllocError::ScalarSizeMismatch(_)
498 | AllocError::OverwritePartialPointer(_) => {
499 Err(format!("unrecognized `AllocError::{err:?}`"))
500 }
501 },
502 };
503 let result = scalar_or_zombie.unwrap_or_else(|reason| {
504 let result = self.undef(ty);
505 self.zombie_no_span(
506 result.def_cx(self),
507 &format!("unsupported `{}` constant: {reason}", self.debug_type(ty),),
508 );
509 result
510 });
511 (result, size)
512 }
513 SpirvType::Adt {
514 field_types,
515 field_offsets,
516 ..
517 } => {
518 let mut tail_read_range = ..Size::ZERO;
521 let result = self.constant_composite(
522 ty,
523 field_types
524 .iter()
525 .zip_eq(field_offsets.iter())
526 .map(|(&f_ty, &f_offset)| {
527 let (f, f_size) =
528 self.read_from_const_alloc_at(alloc, f_ty, offset + f_offset);
529 tail_read_range.end =
530 tail_read_range.end.max(offset + f_offset + f_size);
531 f.def_cx(self)
532 }),
533 );
534
535 let ty_size = ty_def.sizeof(self);
536
537 if let Some(ty_size) = ty_size
539 && let Some(tail_gap) = (ty_size.bytes())
540 .checked_sub(tail_read_range.end.align_to(ty_def.alignof(self)).bytes())
541 && tail_gap > 0
542 {
543 self.zombie_no_span(
544 result.def_cx(self),
545 &format!(
546 "undersized `{}` constant (at least {tail_gap} bytes may be missing)",
547 self.debug_type(ty)
548 ),
549 );
550 }
551
552 (result, ty_size.unwrap_or(tail_read_range.end))
553 }
554 SpirvType::Vector { element, .. }
555 | SpirvType::Matrix { element, .. }
556 | SpirvType::Array { element, .. }
557 | SpirvType::RuntimeArray { element } => {
558 let stride = self.lookup_type(element).sizeof(self).unwrap();
559
560 let count = match ty_def {
561 SpirvType::Vector { count, .. } | SpirvType::Matrix { count, .. } => {
562 u64::from(count)
563 }
564 SpirvType::Array { count, .. } => {
565 u64::try_from(self.builder.lookup_const_scalar(count).unwrap()).unwrap()
566 }
567 SpirvType::RuntimeArray { .. } => {
568 (alloc.inner().size() - offset).bytes() / stride.bytes()
569 }
570 _ => unreachable!(),
571 };
572
573 let result = self.constant_composite(
574 ty,
575 (0..count).map(|i| {
576 let (e, e_size) =
577 self.read_from_const_alloc_at(alloc, element, offset + i * stride);
578 assert_eq!(e_size, stride);
579 e.def_cx(self)
580 }),
581 );
582
583 let read_size = (count * stride).align_to(ty_def.alignof(self));
587
588 if let Some(ty_size) = ty_def.sizeof(self) {
589 assert_eq!(read_size, ty_size);
590 }
591
592 if let SpirvType::RuntimeArray { .. } = ty_def {
593 self.zombie_no_span(
598 result.def_cx(self),
599 &format!("unsupported unsized `{}` constant", self.debug_type(ty)),
600 );
601 }
602
603 (result, read_size)
604 }
605
606 SpirvType::Void
607 | SpirvType::Function { .. }
608 | SpirvType::Image { .. }
609 | SpirvType::Sampler
610 | SpirvType::SampledImage { .. }
611 | SpirvType::InterfaceBlock { .. }
612 | SpirvType::AccelerationStructureKhr
613 | SpirvType::RayQueryKhr => {
614 let result = self.undef(ty);
615 self.zombie_no_span(
616 result.def_cx(self),
617 &format!(
618 "cannot reinterpret Rust constant data as a `{}` value",
619 self.debug_type(ty)
620 ),
621 );
622 (result, ty_def.sizeof(self).unwrap_or(Size::ZERO))
623 }
624 }
625 }
626}