From b403232626bf43210777f2f6cf4734e8a0e8c74e Mon Sep 17 00:00:00 2001 From: Gary Guo Date: Wed, 12 Mar 2025 22:48:32 +0000 Subject: [PATCH 01/10] Collect constants within `global_asm!` in collector This is currently a no-op, but will be useful when const in `global_asm!` can be pointers. --- compiler/rustc_monomorphize/src/collector.rs | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/compiler/rustc_monomorphize/src/collector.rs b/compiler/rustc_monomorphize/src/collector.rs index 3aa55cc8eb9fb..88b1e6434dbb9 100644 --- a/compiler/rustc_monomorphize/src/collector.rs +++ b/compiler/rustc_monomorphize/src/collector.rs @@ -509,10 +509,18 @@ fn collect_items_rec<'tcx>( if let hir::ItemKind::GlobalAsm { asm, .. } = item.kind { for (op, op_sp) in asm.operands { match *op { - hir::InlineAsmOperand::Const { .. } => { - // Only constants which resolve to a plain integer - // are supported. Therefore the value should not - // depend on any other items. + hir::InlineAsmOperand::Const { anon_const } => { + match tcx.const_eval_poly(anon_const.def_id.to_def_id()) { + Ok(val) => { + collect_const_value(tcx, val, &mut used_items); + } + Err(ErrorHandled::TooGeneric(..)) => { + span_bug!(*op_sp, "asm const cannot be resolved; too generic") + } + Err(ErrorHandled::Reported(..)) => { + continue; + } + } } hir::InlineAsmOperand::SymFn { expr } => { let fn_ty = tcx.typeck(item_id.owner_id).expr_ty(expr); From 445bc8bb42d3813678df5facb3a329222388a5a0 Mon Sep 17 00:00:00 2001 From: Gary Guo Date: Fri, 5 Dec 2025 19:31:28 +0000 Subject: [PATCH 02/10] Add FnDef/Closure -> FnPtr coercion for inline asm const operand --- compiler/rustc_hir_typeck/src/expr.rs | 35 ++++++++++++++++++++++++++- 1 file changed, 34 insertions(+), 1 deletion(-) diff --git a/compiler/rustc_hir_typeck/src/expr.rs b/compiler/rustc_hir_typeck/src/expr.rs index b20579231cdd3..d3fa4dc2db3f7 100644 --- a/compiler/rustc_hir_typeck/src/expr.rs +++ b/compiler/rustc_hir_typeck/src/expr.rs @@ -3731,7 +3731,40 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } hir::InlineAsmOperand::Const { ref anon_const } => { - self.check_expr_const_block(anon_const, Expectation::NoExpectation); + // This is mostly similar to type-checking of inline const expressions `const { ... }`, however + // asm const has special coercion rules (per RFC 3848) where function items and closures are coerced to + // function pointers (while pointers and integer remain as-is). + let body = self.tcx.hir_body(anon_const.body); + + let fcx = FnCtxt::new(self, self.param_env, anon_const.def_id); + let ty = fcx.check_expr(body.value); + let target_ty = match self.structurally_resolve_type(body.value.span, ty).kind() + { + ty::FnDef(..) => { + let fn_sig = ty.fn_sig(self.tcx()); + Ty::new_fn_ptr(self.tcx(), fn_sig) + } + ty::Closure(_, args) => { + let closure_sig = args.as_closure().sig(); + let fn_sig = + self.tcx().signature_unclosure(closure_sig, hir::Safety::Safe); + Ty::new_fn_ptr(self.tcx(), fn_sig) + } + _ => ty, + }; + + if let Err(diag) = + self.demand_coerce_diag(&body.value, ty, target_ty, None, AllowTwoPhase::No) + { + diag.emit(); + } + + fcx.require_type_is_sized( + target_ty, + body.value.span, + ObligationCauseCode::SizedConstOrStatic, + ); + fcx.write_ty(anon_const.hir_id, target_ty); } hir::InlineAsmOperand::SymFn { expr } => { self.check_expr(expr); From 95b2eef0977dda2087c7aaa4d6edb0a3a5e9d33f Mon Sep 17 00:00:00 2001 From: Gary Guo Date: Mon, 15 Dec 2025 19:36:35 +0000 Subject: [PATCH 03/10] Give global_asm symbol names Currently global_asm already have symbol names when using v0 scheme, this makes them obtain symbols with legacy scheme too. --- compiler/rustc_middle/src/mir/mono.rs | 2 +- compiler/rustc_symbol_mangling/src/legacy.rs | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/compiler/rustc_middle/src/mir/mono.rs b/compiler/rustc_middle/src/mir/mono.rs index acebf91b1cbf5..8faaac6f866b0 100644 --- a/compiler/rustc_middle/src/mir/mono.rs +++ b/compiler/rustc_middle/src/mir/mono.rs @@ -123,7 +123,7 @@ impl<'tcx> MonoItem<'tcx> { MonoItem::Fn(instance) => tcx.symbol_name(instance), MonoItem::Static(def_id) => tcx.symbol_name(Instance::mono(tcx, def_id)), MonoItem::GlobalAsm(item_id) => { - SymbolName::new(tcx, &format!("global_asm_{:?}", item_id.owner_id)) + tcx.symbol_name(Instance::mono(tcx, item_id.owner_id.to_def_id())) } } } diff --git a/compiler/rustc_symbol_mangling/src/legacy.rs b/compiler/rustc_symbol_mangling/src/legacy.rs index ea16231880e2c..4ba347187cf71 100644 --- a/compiler/rustc_symbol_mangling/src/legacy.rs +++ b/compiler/rustc_symbol_mangling/src/legacy.rs @@ -36,6 +36,11 @@ pub(super) fn mangle<'tcx>( debug!(?instance_ty); break; } + DefPathData::GlobalAsm => { + // `global_asm!` doesn't have a type. + instance_ty = tcx.types.unit; + break; + } _ => { // if we're making a symbol for something, there ought // to be a value or type-def or something in there From d3cc9b206d251c46a3179afd8ce80c097ce6d535 Mon Sep 17 00:00:00 2001 From: Gary Guo Date: Thu, 18 Dec 2025 17:41:19 +0000 Subject: [PATCH 04/10] Delay stringification of const to backend --- .../rustc_codegen_cranelift/src/global_asm.rs | 10 ++++-- .../rustc_codegen_cranelift/src/inline_asm.rs | 10 +++++- compiler/rustc_codegen_gcc/src/asm.rs | 31 ++++++++++++++----- compiler/rustc_codegen_llvm/src/asm.rs | 22 ++++++++++--- compiler/rustc_codegen_ssa/src/base.rs | 29 +++++++++-------- compiler/rustc_codegen_ssa/src/common.rs | 8 ++--- compiler/rustc_codegen_ssa/src/mir/block.rs | 15 ++++----- .../rustc_codegen_ssa/src/mir/naked_asm.rs | 21 ++++++------- compiler/rustc_codegen_ssa/src/traits/asm.rs | 21 ++++++++++--- 9 files changed, 110 insertions(+), 57 deletions(-) diff --git a/compiler/rustc_codegen_cranelift/src/global_asm.rs b/compiler/rustc_codegen_cranelift/src/global_asm.rs index 1daf428acf766..ecae574eafdea 100644 --- a/compiler/rustc_codegen_cranelift/src/global_asm.rs +++ b/compiler/rustc_codegen_cranelift/src/global_asm.rs @@ -108,8 +108,14 @@ fn codegen_global_asm_inner<'tcx>( InlineAsmTemplatePiece::Placeholder { operand_idx, modifier: _, span } => { use rustc_codegen_ssa::back::symbol_export::escape_symbol_name; match operands[operand_idx] { - GlobalAsmOperandRef::Const { ref string } => { - global_asm.push_str(string); + GlobalAsmOperandRef::Const { value, ty } => { + let string = rustc_codegen_ssa::common::asm_const_to_str( + tcx, + span, + value, + FullyMonomorphizedLayoutCx(tcx).layout_of(ty), + ); + global_asm.push_str(&string); } GlobalAsmOperandRef::SymFn { instance } => { if cfg!(not(feature = "inline_asm_sym")) { diff --git a/compiler/rustc_codegen_cranelift/src/inline_asm.rs b/compiler/rustc_codegen_cranelift/src/inline_asm.rs index ac0da06cbb8e3..99ab1281c096a 100644 --- a/compiler/rustc_codegen_cranelift/src/inline_asm.rs +++ b/compiler/rustc_codegen_cranelift/src/inline_asm.rs @@ -94,10 +94,18 @@ pub(crate) fn codegen_inline_asm_terminator<'tcx>( } InlineAsmOperand::Const { ref value } => { let (const_value, ty) = crate::constant::eval_mir_constant(fx, value); + let mir::ConstValue::Scalar(scalar) = const_value else { + span_bug!( + span, + "expected Scalar for promoted asm const, but got {:#?}", + const_value + ) + }; + let value = rustc_codegen_ssa::common::asm_const_to_str( fx.tcx, span, - const_value, + scalar, fx.layout_of(ty), ); CInlineAsmOperand::Const { value } diff --git a/compiler/rustc_codegen_gcc/src/asm.rs b/compiler/rustc_codegen_gcc/src/asm.rs index 319f3d3278730..69f8e35e7a515 100644 --- a/compiler/rustc_codegen_gcc/src/asm.rs +++ b/compiler/rustc_codegen_gcc/src/asm.rs @@ -12,6 +12,7 @@ use rustc_codegen_ssa::traits::{ }; use rustc_middle::bug; use rustc_middle::ty::Instance; +use rustc_middle::ty::layout::LayoutOf; use rustc_span::Span; use rustc_target::asm::*; @@ -303,8 +304,9 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> { } } - InlineAsmOperandRef::Const { ref string } => { - constants_len += string.len() + att_dialect as usize; + InlineAsmOperandRef::Const { .. } => { + // We don't know the size at this point, just some estimate. + constants_len += 20; } InlineAsmOperandRef::SymFn { instance } => { @@ -453,7 +455,7 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> { template_str.push_str(escaped_char); } } - InlineAsmTemplatePiece::Placeholder { operand_idx, modifier, span: _ } => { + InlineAsmTemplatePiece::Placeholder { operand_idx, modifier, span } => { let mut push_to_template = |modifier, gcc_idx| { use std::fmt::Write; @@ -511,8 +513,15 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> { template_str.push_str(name); } - InlineAsmOperandRef::Const { ref string } => { - template_str.push_str(string); + InlineAsmOperandRef::Const { value, ty } => { + // Const operands get injected directly into the template + let string = rustc_codegen_ssa::common::asm_const_to_str( + self.tcx, + span, + value, + self.layout_of(ty), + ); + template_str.push_str(&string); } InlineAsmOperandRef::Label { label } => { @@ -889,13 +898,19 @@ impl<'gcc, 'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> { .unwrap_or(string.len()); } } - InlineAsmTemplatePiece::Placeholder { operand_idx, modifier: _, span: _ } => { + InlineAsmTemplatePiece::Placeholder { operand_idx, modifier: _, span } => { match operands[operand_idx] { - GlobalAsmOperandRef::Const { ref string } => { + GlobalAsmOperandRef::Const { value, ty } => { // Const operands get injected directly into the // template. Note that we don't need to escape % // here unlike normal inline assembly. - template_str.push_str(string); + let string = rustc_codegen_ssa::common::asm_const_to_str( + self.tcx, + span, + value, + self.layout_of(ty), + ); + template_str.push_str(&string); } GlobalAsmOperandRef::SymFn { instance } => { diff --git a/compiler/rustc_codegen_llvm/src/asm.rs b/compiler/rustc_codegen_llvm/src/asm.rs index 80d77be1cc384..698f0c21d95fd 100644 --- a/compiler/rustc_codegen_llvm/src/asm.rs +++ b/compiler/rustc_codegen_llvm/src/asm.rs @@ -188,7 +188,7 @@ impl<'ll, 'tcx> AsmBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> { template_str.push_str(s) } } - InlineAsmTemplatePiece::Placeholder { operand_idx, modifier, span: _ } => { + InlineAsmTemplatePiece::Placeholder { operand_idx, modifier, span } => { match operands[operand_idx] { InlineAsmOperandRef::In { reg, .. } | InlineAsmOperandRef::Out { reg, .. } @@ -203,9 +203,15 @@ impl<'ll, 'tcx> AsmBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> { template_str.push_str(&format!("${{{}}}", op_idx[&operand_idx])); } } - InlineAsmOperandRef::Const { ref string } => { + InlineAsmOperandRef::Const { value, ty } => { // Const operands get injected directly into the template - template_str.push_str(string); + let string = rustc_codegen_ssa::common::asm_const_to_str( + self.tcx, + span, + value, + self.layout_of(ty), + ); + template_str.push_str(&string); } InlineAsmOperandRef::SymFn { .. } | InlineAsmOperandRef::SymStatic { .. } => { @@ -402,11 +408,17 @@ impl<'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'_, 'tcx> { InlineAsmTemplatePiece::Placeholder { operand_idx, modifier: _, span } => { use rustc_codegen_ssa::back::symbol_export::escape_symbol_name; match operands[operand_idx] { - GlobalAsmOperandRef::Const { ref string } => { + GlobalAsmOperandRef::Const { value, ty } => { // Const operands get injected directly into the // template. Note that we don't need to escape $ // here unlike normal inline assembly. - template_str.push_str(string); + let string = rustc_codegen_ssa::common::asm_const_to_str( + self.tcx, + span, + value, + self.layout_of(ty), + ); + template_str.push_str(&string); } GlobalAsmOperandRef::SymFn { instance } => { let llval = self.get_fn(instance); diff --git a/compiler/rustc_codegen_ssa/src/base.rs b/compiler/rustc_codegen_ssa/src/base.rs index 609f54b7a1cf4..2ff554a3b3637 100644 --- a/compiler/rustc_codegen_ssa/src/base.rs +++ b/compiler/rustc_codegen_ssa/src/base.rs @@ -22,12 +22,12 @@ use rustc_middle::middle::debugger_visualizer::DebuggerVisualizerFile; use rustc_middle::middle::dependency_format::Dependencies; use rustc_middle::middle::exported_symbols::{self, SymbolExportKind}; use rustc_middle::middle::lang_items; -use rustc_middle::mir::BinOp; -use rustc_middle::mir::interpret::ErrorHandled; +use rustc_middle::mir::interpret::{ErrorHandled, Scalar}; use rustc_middle::mir::mono::{CodegenUnit, CodegenUnitNameBuilder, MonoItem, MonoItemPartitions}; +use rustc_middle::mir::{BinOp, ConstValue}; use rustc_middle::query::Providers; use rustc_middle::ty::layout::{HasTyCtxt, HasTypingEnv, LayoutOf, TyAndLayout}; -use rustc_middle::ty::{self, Instance, Ty, TyCtxt}; +use rustc_middle::ty::{self, Instance, Ty, TyCtxt, UintTy}; use rustc_middle::{bug, span_bug}; use rustc_session::Session; use rustc_session::config::{self, CrateType, EntryFnType}; @@ -410,20 +410,23 @@ where Ok(const_value) => { let ty = cx.tcx().typeck_body(anon_const.body).node_type(anon_const.hir_id); - let string = common::asm_const_to_str( - cx.tcx(), - *op_sp, - const_value, - cx.layout_of(ty), - ); - GlobalAsmOperandRef::Const { string } + let ConstValue::Scalar(scalar) = const_value else { + span_bug!( + *op_sp, + "expected Scalar for promoted asm const, but got {:#?}", + const_value + ) + }; + GlobalAsmOperandRef::Const { value: scalar, ty } } Err(ErrorHandled::Reported { .. }) => { // An error has already been reported and // compilation is guaranteed to fail if execution - // hits this path. So an empty string instead of - // a stringified constant value will suffice. - GlobalAsmOperandRef::Const { string: String::new() } + // hits this path. So anything will suffice. + GlobalAsmOperandRef::Const { + value: Scalar::from_u32(0), + ty: Ty::new_uint(cx.tcx(), UintTy::U32), + } } Err(ErrorHandled::TooGeneric(_)) => { span_bug!(*op_sp, "asm const cannot be resolved; too generic") diff --git a/compiler/rustc_codegen_ssa/src/common.rs b/compiler/rustc_codegen_ssa/src/common.rs index 89a3f8061a8c1..80b648413a5db 100644 --- a/compiler/rustc_codegen_ssa/src/common.rs +++ b/compiler/rustc_codegen_ssa/src/common.rs @@ -2,9 +2,10 @@ use rustc_hir::LangItem; use rustc_hir::attrs::PeImportNameType; +use rustc_middle::mir::interpret::Scalar; use rustc_middle::ty::layout::TyAndLayout; use rustc_middle::ty::{self, Instance, TyCtxt}; -use rustc_middle::{bug, mir, span_bug}; +use rustc_middle::{bug, span_bug}; use rustc_session::cstore::{DllCallingConvention, DllImport}; use rustc_span::Span; use rustc_target::spec::{Abi, Env, Os, Target}; @@ -149,12 +150,9 @@ pub(crate) fn shift_mask_val<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( pub fn asm_const_to_str<'tcx>( tcx: TyCtxt<'tcx>, sp: Span, - const_value: mir::ConstValue, + scalar: Scalar, ty_and_layout: TyAndLayout<'tcx>, ) -> String { - let mir::ConstValue::Scalar(scalar) = const_value else { - span_bug!(sp, "expected Scalar for promoted asm const, but got {:#?}", const_value) - }; let value = scalar.assert_scalar_int().to_bits(ty_and_layout.size); match ty_and_layout.ty.kind() { ty::Uint(_) => value.to_string(), diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs index cf643931717be..ce4f0ad5c569d 100644 --- a/compiler/rustc_codegen_ssa/src/mir/block.rs +++ b/compiler/rustc_codegen_ssa/src/mir/block.rs @@ -1395,13 +1395,14 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { } mir::InlineAsmOperand::Const { ref value } => { let const_value = self.eval_mir_constant(value); - let string = common::asm_const_to_str( - bx.tcx(), - span, - const_value, - bx.layout_of(value.ty()), - ); - InlineAsmOperandRef::Const { string } + let mir::ConstValue::Scalar(scalar) = const_value else { + span_bug!( + span, + "expected Scalar for promoted asm const, but got {:#?}", + const_value + ) + }; + InlineAsmOperandRef::Const { value: scalar, ty: value.ty() } } mir::InlineAsmOperand::SymFn { ref value } => { let const_ = self.monomorphize(value.const_); diff --git a/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs b/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs index f12410c65d986..0836922f61f54 100644 --- a/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs +++ b/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs @@ -2,15 +2,14 @@ use rustc_abi::{BackendRepr, Float, Integer, Primitive, RegKind}; use rustc_hir::attrs::{InstructionSetAttr, Linkage}; use rustc_hir::def_id::LOCAL_CRATE; use rustc_middle::mir::mono::{MonoItemData, Visibility}; -use rustc_middle::mir::{InlineAsmOperand, START_BLOCK}; +use rustc_middle::mir::{self, InlineAsmOperand, START_BLOCK}; use rustc_middle::ty::layout::{FnAbiOf, LayoutOf, TyAndLayout}; use rustc_middle::ty::{Instance, Ty, TyCtxt, TypeVisitableExt}; -use rustc_middle::{bug, ty}; +use rustc_middle::{bug, span_bug, ty}; use rustc_span::sym; use rustc_target::callconv::{ArgAbi, FnAbi, PassMode}; use rustc_target::spec::{Arch, BinaryFormat}; -use crate::common; use crate::mir::AsmCodegenMethods; use crate::traits::GlobalAsmOperandRef; @@ -77,15 +76,15 @@ fn inline_to_global_operand<'a, 'tcx, Cx: LayoutOf<'tcx, LayoutOfResult = TyAndL cx.typing_env(), ty::EarlyBinder::bind(value.ty()), ); + let mir::ConstValue::Scalar(scalar) = const_value else { + span_bug!( + value.span, + "expected Scalar for promoted asm const, but got {:#?}", + const_value + ) + }; - let string = common::asm_const_to_str( - cx.tcx(), - value.span, - const_value, - cx.layout_of(mono_type), - ); - - GlobalAsmOperandRef::Const { string } + GlobalAsmOperandRef::Const { value: scalar, ty: mono_type } } InlineAsmOperand::SymFn { value } => { let mono_type = instance.instantiate_mir_and_normalize_erasing_regions( diff --git a/compiler/rustc_codegen_ssa/src/traits/asm.rs b/compiler/rustc_codegen_ssa/src/traits/asm.rs index cc7a6a3f19e9e..f7bab1e07c3a9 100644 --- a/compiler/rustc_codegen_ssa/src/traits/asm.rs +++ b/compiler/rustc_codegen_ssa/src/traits/asm.rs @@ -1,6 +1,7 @@ use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece}; use rustc_hir::def_id::DefId; -use rustc_middle::ty::Instance; +use rustc_middle::mir::interpret::Scalar; +use rustc_middle::ty::{Instance, Ty}; use rustc_span::Span; use rustc_target::asm::InlineAsmRegOrRegClass; @@ -26,7 +27,9 @@ pub enum InlineAsmOperandRef<'tcx, B: BackendTypes + ?Sized> { out_place: Option>, }, Const { - string: String, + value: Scalar, + /// Type of the constant. This is needed to extract width and signedness. + ty: Ty<'tcx>, }, SymFn { instance: Instance<'tcx>, @@ -41,9 +44,17 @@ pub enum InlineAsmOperandRef<'tcx, B: BackendTypes + ?Sized> { #[derive(Debug)] pub enum GlobalAsmOperandRef<'tcx> { - Const { string: String }, - SymFn { instance: Instance<'tcx> }, - SymStatic { def_id: DefId }, + Const { + value: Scalar, + /// Type of the constant. This is needed to extract width and signedness. + ty: Ty<'tcx>, + }, + SymFn { + instance: Instance<'tcx>, + }, + SymStatic { + def_id: DefId, + }, } pub trait AsmBuilderMethods<'tcx>: BackendTypes { From 1ab078c36b52c671ad0229a8f1e8b1d61e0eca6f Mon Sep 17 00:00:00 2001 From: Gary Guo Date: Thu, 18 Dec 2025 18:31:04 +0000 Subject: [PATCH 05/10] Unify handling of asm const and sym fn using CTFE This gives the asm-const code the basic ability to deal wiht pointer and provenances, which lays the ground work for asm_const_ptr. Note that `SymStatic` is not removed, as it supports `#[thread_local]` statics where CTFE does not. --- .../rustc_codegen_cranelift/src/global_asm.rs | 63 ++++--- .../rustc_codegen_cranelift/src/inline_asm.rs | 2 +- compiler/rustc_codegen_gcc/src/asm.rs | 161 +++++++++++------- compiler/rustc_codegen_llvm/src/asm.rs | 128 ++++++++++---- compiler/rustc_codegen_ssa/src/base.rs | 10 +- compiler/rustc_codegen_ssa/src/common.rs | 7 +- compiler/rustc_codegen_ssa/src/mir/block.rs | 10 +- .../rustc_codegen_ssa/src/mir/naked_asm.rs | 9 +- compiler/rustc_codegen_ssa/src/traits/asm.rs | 6 - 9 files changed, 268 insertions(+), 128 deletions(-) diff --git a/compiler/rustc_codegen_cranelift/src/global_asm.rs b/compiler/rustc_codegen_cranelift/src/global_asm.rs index ecae574eafdea..306b77246a3f7 100644 --- a/compiler/rustc_codegen_cranelift/src/global_asm.rs +++ b/compiler/rustc_codegen_cranelift/src/global_asm.rs @@ -8,6 +8,7 @@ use std::sync::Arc; use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece}; use rustc_codegen_ssa::traits::{AsmCodegenMethods, GlobalAsmOperandRef}; +use rustc_middle::mir::interpret::{GlobalAlloc, Scalar as ConstScalar}; use rustc_middle::ty::TyCtxt; use rustc_middle::ty::layout::{ FnAbiError, FnAbiOfHelpers, FnAbiRequest, HasTyCtxt, HasTypingEnv, LayoutError, LayoutOfHelpers, @@ -109,26 +110,49 @@ fn codegen_global_asm_inner<'tcx>( use rustc_codegen_ssa::back::symbol_export::escape_symbol_name; match operands[operand_idx] { GlobalAsmOperandRef::Const { value, ty } => { - let string = rustc_codegen_ssa::common::asm_const_to_str( - tcx, - span, - value, - FullyMonomorphizedLayoutCx(tcx).layout_of(ty), - ); - global_asm.push_str(&string); - } - GlobalAsmOperandRef::SymFn { instance } => { - if cfg!(not(feature = "inline_asm_sym")) { - tcx.dcx().span_err( - span, - "asm! and global_asm! sym operands are not yet supported", - ); - } + match value { + ConstScalar::Int(int) => { + let string = rustc_codegen_ssa::common::asm_const_to_str( + tcx, + span, + int, + FullyMonomorphizedLayoutCx(tcx).layout_of(ty), + ); + global_asm.push_str(&string); + } - let symbol = tcx.symbol_name(instance); - // FIXME handle the case where the function was made private to the - // current codegen unit - global_asm.push_str(&escape_symbol_name(tcx, symbol.name, span)); + ConstScalar::Ptr(ptr, _) => { + let (prov, offset) = ptr.prov_and_relative_offset(); + assert_eq!(offset.bytes(), 0); + let global_alloc = tcx.global_alloc(prov.alloc_id()); + let symbol_name = match global_alloc { + GlobalAlloc::Function { instance } => { + if cfg!(not(feature = "inline_asm_sym")) { + tcx.dcx().span_err( + span, + "asm! and global_asm! sym operands are not yet supported", + ); + } + + // FIXME handle the case where the function was made private to the + // current codegen unit + tcx.symbol_name(instance) + } + GlobalAlloc::Static(def_id) => { + let instance = Instance::mono(tcx, def_id); + tcx.symbol_name(instance) + } + GlobalAlloc::Memory(_) + | GlobalAlloc::VTable(..) + | GlobalAlloc::TypeId { .. } => unreachable!(), + }; + global_asm.push_str(&escape_symbol_name( + tcx, + symbol_name.name, + span, + )); + } + } } GlobalAsmOperandRef::SymStatic { def_id } => { if cfg!(not(feature = "inline_asm_sym")) { @@ -137,7 +161,6 @@ fn codegen_global_asm_inner<'tcx>( "asm! and global_asm! sym operands are not yet supported", ); } - let instance = Instance::mono(tcx, def_id); let symbol = tcx.symbol_name(instance); global_asm.push_str(&escape_symbol_name(tcx, symbol.name, span)); diff --git a/compiler/rustc_codegen_cranelift/src/inline_asm.rs b/compiler/rustc_codegen_cranelift/src/inline_asm.rs index 99ab1281c096a..6011912db089a 100644 --- a/compiler/rustc_codegen_cranelift/src/inline_asm.rs +++ b/compiler/rustc_codegen_cranelift/src/inline_asm.rs @@ -105,7 +105,7 @@ pub(crate) fn codegen_inline_asm_terminator<'tcx>( let value = rustc_codegen_ssa::common::asm_const_to_str( fx.tcx, span, - scalar, + scalar.assert_scalar_int(), fx.layout_of(ty), ); CInlineAsmOperand::Const { value } diff --git a/compiler/rustc_codegen_gcc/src/asm.rs b/compiler/rustc_codegen_gcc/src/asm.rs index 69f8e35e7a515..d2c2955f041b6 100644 --- a/compiler/rustc_codegen_gcc/src/asm.rs +++ b/compiler/rustc_codegen_gcc/src/asm.rs @@ -11,6 +11,7 @@ use rustc_codegen_ssa::traits::{ GlobalAsmOperandRef, InlineAsmOperandRef, }; use rustc_middle::bug; +use rustc_middle::mir::interpret::{GlobalAlloc, Scalar}; use rustc_middle::ty::Instance; use rustc_middle::ty::layout::LayoutOf; use rustc_span::Span; @@ -309,12 +310,6 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> { constants_len += 20; } - InlineAsmOperandRef::SymFn { instance } => { - // TODO(@Amanieu): Additional mangling is needed on - // some targets to add a leading underscore (Mach-O) - // or byte count suffixes (x86 Windows). - constants_len += self.tcx.symbol_name(instance).name.len(); - } InlineAsmOperandRef::SymStatic { def_id } => { // TODO(@Amanieu): Additional mangling is needed on // some targets to add a leading underscore (Mach-O). @@ -404,24 +399,32 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> { // processed in the previous pass } - InlineAsmOperandRef::SymFn { instance } => { - inputs.push(AsmInOperand { - constraint: "X".into(), - rust_idx, - val: get_fn(self.cx, instance).get_address(None), - }); - } + InlineAsmOperandRef::Const { value, ty: _ } => match value { + Scalar::Int(_) => (), + Scalar::Ptr(ptr, _) => { + let (prov, offset) = ptr.prov_and_relative_offset(); + assert_eq!(offset.bytes(), 0); + let global_alloc = self.tcx.global_alloc(prov.alloc_id()); + let val = match global_alloc { + GlobalAlloc::Function { instance } => { + get_fn(self.cx, instance).get_address(None) + } + GlobalAlloc::Static(def_id) => { + self.cx.get_static(def_id).get_address(None) + } + GlobalAlloc::Memory(_) + | GlobalAlloc::VTable(..) + | GlobalAlloc::TypeId { .. } => unreachable!(), + }; + inputs.push(AsmInOperand { constraint: "X".into(), rust_idx, val }); + } + }, InlineAsmOperandRef::SymStatic { def_id } => { - inputs.push(AsmInOperand { - constraint: "X".into(), - rust_idx, - val: self.cx.get_static(def_id).get_address(None), - }); - } - - InlineAsmOperandRef::Const { .. } => { - // processed in the previous pass + // TODO(@Amanieu): Additional mangling is needed on + // some targets to add a leading underscore (MachO). + constants_len += + self.tcx.symbol_name(Instance::mono(self.tcx, def_id)).name.len(); } InlineAsmOperandRef::Label { .. } => { @@ -497,12 +500,43 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> { push_to_template(modifier, gcc_index); } - InlineAsmOperandRef::SymFn { instance } => { - // TODO(@Amanieu): Additional mangling is needed on - // some targets to add a leading underscore (Mach-O) - // or byte count suffixes (x86 Windows). - let name = self.tcx.symbol_name(instance).name; - template_str.push_str(name); + InlineAsmOperandRef::Const { value, ty } => { + match value { + Scalar::Int(int) => { + // Const operands get injected directly into the template + let string = rustc_codegen_ssa::common::asm_const_to_str( + self.tcx, + span, + int, + self.layout_of(ty), + ); + template_str.push_str(&string); + } + + Scalar::Ptr(ptr, _) => { + let (prov, offset) = ptr.prov_and_relative_offset(); + assert_eq!(offset.bytes(), 0); + let global_alloc = self.tcx.global_alloc(prov.alloc_id()); + let symbol_name = match global_alloc { + GlobalAlloc::Function { instance } => { + // TODO(@Amanieu): Additional mangling is needed on + // some targets to add a leading underscore (Mach-O) + // or byte count suffixes (x86 Windows). + self.tcx.symbol_name(instance) + } + GlobalAlloc::Static(def_id) => { + // TODO(@Amanieu): Additional mangling is needed on + // some targets to add a leading underscore (Mach-O). + let instance = Instance::mono(self.tcx, def_id); + self.tcx.symbol_name(instance) + } + GlobalAlloc::Memory(_) + | GlobalAlloc::VTable(..) + | GlobalAlloc::TypeId { .. } => unreachable!(), + }; + template_str.push_str(symbol_name.name); + } + } } InlineAsmOperandRef::SymStatic { def_id } => { @@ -513,17 +547,6 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> { template_str.push_str(name); } - InlineAsmOperandRef::Const { value, ty } => { - // Const operands get injected directly into the template - let string = rustc_codegen_ssa::common::asm_const_to_str( - self.tcx, - span, - value, - self.layout_of(ty), - ); - template_str.push_str(&string); - } - InlineAsmOperandRef::Label { label } => { let label_gcc_index = labels.iter().position(|&l| l == label).expect("wrong rust index"); @@ -901,28 +924,48 @@ impl<'gcc, 'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> { InlineAsmTemplatePiece::Placeholder { operand_idx, modifier: _, span } => { match operands[operand_idx] { GlobalAsmOperandRef::Const { value, ty } => { - // Const operands get injected directly into the - // template. Note that we don't need to escape % - // here unlike normal inline assembly. - let string = rustc_codegen_ssa::common::asm_const_to_str( - self.tcx, - span, - value, - self.layout_of(ty), - ); - template_str.push_str(&string); - } + match value { + Scalar::Int(int) => { + // Const operands get injected directly into the + // template. Note that we don't need to escape % + // here unlike normal inline assembly. + let string = rustc_codegen_ssa::common::asm_const_to_str( + self.tcx, + span, + int, + self.layout_of(ty), + ); + template_str.push_str(&string); + } - GlobalAsmOperandRef::SymFn { instance } => { - let function = get_fn(self, instance); - self.add_used_function(function); - // TODO(@Amanieu): Additional mangling is needed on - // some targets to add a leading underscore (Mach-O) - // or byte count suffixes (x86 Windows). - let name = self.tcx.symbol_name(instance).name; - template_str.push_str(name); + Scalar::Ptr(ptr, _) => { + let (prov, offset) = ptr.prov_and_relative_offset(); + assert_eq!(offset.bytes(), 0); + let global_alloc = self.tcx.global_alloc(prov.alloc_id()); + let symbol_name = match global_alloc { + GlobalAlloc::Function { instance } => { + let function = get_fn(self, instance); + self.add_used_function(function); + // TODO(@Amanieu): Additional mangling is needed on + // some targets to add a leading underscore (Mach-O) + // or byte count suffixes (x86 Windows). + self.tcx.symbol_name(instance) + } + GlobalAlloc::Static(def_id) => { + // TODO(antoyo): set the global variable as used. + // TODO(@Amanieu): Additional mangling is needed on + // some targets to add a leading underscore (Mach-O). + let instance = Instance::mono(self.tcx, def_id); + self.tcx.symbol_name(instance) + } + GlobalAlloc::Memory(_) + | GlobalAlloc::VTable(..) + | GlobalAlloc::TypeId { .. } => unreachable!(), + }; + template_str.push_str(symbol_name.name); + } + } } - GlobalAsmOperandRef::SymStatic { def_id } => { // TODO(antoyo): set the global variable as used. // TODO(@Amanieu): Additional mangling is needed on diff --git a/compiler/rustc_codegen_llvm/src/asm.rs b/compiler/rustc_codegen_llvm/src/asm.rs index 698f0c21d95fd..9e3a56fd41596 100644 --- a/compiler/rustc_codegen_llvm/src/asm.rs +++ b/compiler/rustc_codegen_llvm/src/asm.rs @@ -4,6 +4,7 @@ use rustc_codegen_ssa::mir::operand::OperandValue; use rustc_codegen_ssa::traits::*; use rustc_data_structures::assert_matches; use rustc_data_structures::fx::FxHashMap; +use rustc_middle::mir::interpret::{GlobalAlloc, Scalar as ConstScalar}; use rustc_middle::ty::Instance; use rustc_middle::ty::layout::TyAndLayout; use rustc_middle::{bug, span_bug}; @@ -156,11 +157,29 @@ impl<'ll, 'tcx> AsmBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> { constraints.push(format!("{}", op_idx[&idx])); } } - InlineAsmOperandRef::SymFn { instance } => { - inputs.push(self.cx.get_fn(instance)); - op_idx.insert(idx, constraints.len()); - constraints.push("s".to_string()); - } + InlineAsmOperandRef::Const { value, ty: _ } => match value { + ConstScalar::Int(_) => (), + ConstScalar::Ptr(ptr, _) => { + let (prov, offset) = ptr.prov_and_relative_offset(); + assert_eq!(offset.bytes(), 0); + let global_alloc = self.tcx.global_alloc(prov.alloc_id()); + match global_alloc { + GlobalAlloc::Function { instance } => { + inputs.push(self.cx.get_fn(instance)); + op_idx.insert(idx, constraints.len()); + constraints.push("s".to_string()); + } + GlobalAlloc::Static(def_id) => { + inputs.push(self.cx.get_static(def_id)); + op_idx.insert(idx, constraints.len()); + constraints.push("s".to_string()); + } + GlobalAlloc::Memory(_) + | GlobalAlloc::VTable(..) + | GlobalAlloc::TypeId { .. } => unreachable!(), + } + } + }, InlineAsmOperandRef::SymStatic { def_id } => { inputs.push(self.cx.get_static(def_id)); op_idx.insert(idx, constraints.len()); @@ -204,17 +223,37 @@ impl<'ll, 'tcx> AsmBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> { } } InlineAsmOperandRef::Const { value, ty } => { - // Const operands get injected directly into the template - let string = rustc_codegen_ssa::common::asm_const_to_str( - self.tcx, - span, - value, - self.layout_of(ty), - ); - template_str.push_str(&string); + match value { + ConstScalar::Int(int) => { + // Const operands get injected directly into the template + let string = rustc_codegen_ssa::common::asm_const_to_str( + self.tcx, + span, + int, + self.layout_of(ty), + ); + template_str.push_str(&string); + } + ConstScalar::Ptr(ptr, _) => { + let (prov, offset) = ptr.prov_and_relative_offset(); + assert_eq!(offset.bytes(), 0); + let global_alloc = self.tcx.global_alloc(prov.alloc_id()); + match global_alloc { + GlobalAlloc::Function { .. } | GlobalAlloc::Static(_) => { + // Only emit the raw symbol name + template_str.push_str(&format!( + "${{{}:c}}", + op_idx[&operand_idx] + )); + } + GlobalAlloc::Memory(_) + | GlobalAlloc::VTable(..) + | GlobalAlloc::TypeId { .. } => unreachable!(), + } + } + } } - InlineAsmOperandRef::SymFn { .. } - | InlineAsmOperandRef::SymStatic { .. } => { + InlineAsmOperandRef::SymStatic { .. } => { // Only emit the raw symbol name template_str.push_str(&format!("${{{}:c}}", op_idx[&operand_idx])); } @@ -409,25 +448,46 @@ impl<'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'_, 'tcx> { use rustc_codegen_ssa::back::symbol_export::escape_symbol_name; match operands[operand_idx] { GlobalAsmOperandRef::Const { value, ty } => { - // Const operands get injected directly into the - // template. Note that we don't need to escape $ - // here unlike normal inline assembly. - let string = rustc_codegen_ssa::common::asm_const_to_str( - self.tcx, - span, - value, - self.layout_of(ty), - ); - template_str.push_str(&string); - } - GlobalAsmOperandRef::SymFn { instance } => { - let llval = self.get_fn(instance); - self.add_compiler_used_global(llval); - let symbol = llvm::build_string(|s| unsafe { - llvm::LLVMRustGetMangledName(llval, s); - }) - .expect("symbol is not valid UTF-8"); - template_str.push_str(&escape_symbol_name(self.tcx, &symbol, span)); + match value { + ConstScalar::Int(int) => { + // Const operands get injected directly into the + // template. Note that we don't need to escape $ + // here unlike normal inline assembly. + let string = rustc_codegen_ssa::common::asm_const_to_str( + self.tcx, + span, + int, + self.layout_of(ty), + ); + template_str.push_str(&string); + } + + ConstScalar::Ptr(ptr, _) => { + let (prov, offset) = ptr.prov_and_relative_offset(); + assert_eq!(offset.bytes(), 0); + let global_alloc = self.tcx.global_alloc(prov.alloc_id()); + let llval = match global_alloc { + GlobalAlloc::Function { instance } => self.get_fn(instance), + GlobalAlloc::Static(def_id) => self + .renamed_statics + .borrow() + .get(&def_id) + .copied() + .unwrap_or_else(|| self.get_static(def_id)), + GlobalAlloc::Memory(_) + | GlobalAlloc::VTable(..) + | GlobalAlloc::TypeId { .. } => unreachable!(), + }; + + self.add_compiler_used_global(llval); + let symbol = llvm::build_string(|s| unsafe { + llvm::LLVMRustGetMangledName(llval, s); + }) + .expect("symbol is not valid UTF-8"); + template_str + .push_str(&escape_symbol_name(self.tcx, &symbol, span)); + } + } } GlobalAsmOperandRef::SymStatic { def_id } => { let llval = self diff --git a/compiler/rustc_codegen_ssa/src/base.rs b/compiler/rustc_codegen_ssa/src/base.rs index 2ff554a3b3637..a7343d642f672 100644 --- a/compiler/rustc_codegen_ssa/src/base.rs +++ b/compiler/rustc_codegen_ssa/src/base.rs @@ -22,7 +22,7 @@ use rustc_middle::middle::debugger_visualizer::DebuggerVisualizerFile; use rustc_middle::middle::dependency_format::Dependencies; use rustc_middle::middle::exported_symbols::{self, SymbolExportKind}; use rustc_middle::middle::lang_items; -use rustc_middle::mir::interpret::{ErrorHandled, Scalar}; +use rustc_middle::mir::interpret::{CTFE_ALLOC_SALT, ErrorHandled, Scalar}; use rustc_middle::mir::mono::{CodegenUnit, CodegenUnitNameBuilder, MonoItem, MonoItemPartitions}; use rustc_middle::mir::{BinOp, ConstValue}; use rustc_middle::query::Providers; @@ -446,7 +446,13 @@ where _ => span_bug!(*op_sp, "asm sym is not a function"), }; - GlobalAsmOperandRef::SymFn { instance } + GlobalAsmOperandRef::Const { + value: Scalar::from_pointer( + cx.tcx().reserve_and_set_fn_alloc(instance, CTFE_ALLOC_SALT).into(), + cx, + ), + ty: Ty::new_fn_ptr(cx.tcx(), ty.fn_sig(cx.tcx())), + } } rustc_hir::InlineAsmOperand::SymStatic { path: _, def_id } => { GlobalAsmOperandRef::SymStatic { def_id } diff --git a/compiler/rustc_codegen_ssa/src/common.rs b/compiler/rustc_codegen_ssa/src/common.rs index 80b648413a5db..dcc000f303fa2 100644 --- a/compiler/rustc_codegen_ssa/src/common.rs +++ b/compiler/rustc_codegen_ssa/src/common.rs @@ -2,9 +2,8 @@ use rustc_hir::LangItem; use rustc_hir::attrs::PeImportNameType; -use rustc_middle::mir::interpret::Scalar; use rustc_middle::ty::layout::TyAndLayout; -use rustc_middle::ty::{self, Instance, TyCtxt}; +use rustc_middle::ty::{self, Instance, ScalarInt, TyCtxt}; use rustc_middle::{bug, span_bug}; use rustc_session::cstore::{DllCallingConvention, DllImport}; use rustc_span::Span; @@ -150,10 +149,10 @@ pub(crate) fn shift_mask_val<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( pub fn asm_const_to_str<'tcx>( tcx: TyCtxt<'tcx>, sp: Span, - scalar: Scalar, + scalar: ScalarInt, ty_and_layout: TyAndLayout<'tcx>, ) -> String { - let value = scalar.assert_scalar_int().to_bits(ty_and_layout.size); + let value = scalar.to_bits(ty_and_layout.size); match ty_and_layout.ty.kind() { ty::Uint(_) => value.to_string(), ty::Int(int_ty) => match int_ty.normalize(tcx.sess.target.pointer_width) { diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs index ce4f0ad5c569d..3258c5dd3068b 100644 --- a/compiler/rustc_codegen_ssa/src/mir/block.rs +++ b/compiler/rustc_codegen_ssa/src/mir/block.rs @@ -6,6 +6,7 @@ use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece}; use rustc_data_structures::packed::Pu128; use rustc_hir::lang_items::LangItem; use rustc_lint_defs::builtin::TAIL_CALL_TRACK_CALLER; +use rustc_middle::mir::interpret::{CTFE_ALLOC_SALT, Scalar}; use rustc_middle::mir::{self, AssertKind, InlineAsmMacro, SwitchTargets, UnwindTerminateReason}; use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf, ValidityRequirement}; use rustc_middle::ty::print::{with_no_trimmed_paths, with_no_visible_paths}; @@ -1414,7 +1415,14 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { args, ) .unwrap(); - InlineAsmOperandRef::SymFn { instance } + + InlineAsmOperandRef::Const { + value: Scalar::from_pointer( + bx.tcx().reserve_and_set_fn_alloc(instance, CTFE_ALLOC_SALT).into(), + bx, + ), + ty: Ty::new_fn_ptr(bx.tcx(), const_.ty().fn_sig(bx.tcx())), + } } else { span_bug!(span, "invalid type for asm sym (fn)"); } diff --git a/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs b/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs index 0836922f61f54..a37e5425238db 100644 --- a/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs +++ b/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs @@ -1,6 +1,7 @@ use rustc_abi::{BackendRepr, Float, Integer, Primitive, RegKind}; use rustc_hir::attrs::{InstructionSetAttr, Linkage}; use rustc_hir::def_id::LOCAL_CRATE; +use rustc_middle::mir::interpret::{CTFE_ALLOC_SALT, Scalar}; use rustc_middle::mir::mono::{MonoItemData, Visibility}; use rustc_middle::mir::{self, InlineAsmOperand, START_BLOCK}; use rustc_middle::ty::layout::{FnAbiOf, LayoutOf, TyAndLayout}; @@ -100,7 +101,13 @@ fn inline_to_global_operand<'a, 'tcx, Cx: LayoutOf<'tcx, LayoutOfResult = TyAndL _ => bug!("asm sym is not a function"), }; - GlobalAsmOperandRef::SymFn { instance } + GlobalAsmOperandRef::Const { + value: Scalar::from_pointer( + cx.tcx().reserve_and_set_fn_alloc(instance, CTFE_ALLOC_SALT).into(), + cx, + ), + ty: Ty::new_fn_ptr(cx.tcx(), mono_type.fn_sig(cx.tcx())), + } } InlineAsmOperand::SymStatic { def_id } => { GlobalAsmOperandRef::SymStatic { def_id: *def_id } diff --git a/compiler/rustc_codegen_ssa/src/traits/asm.rs b/compiler/rustc_codegen_ssa/src/traits/asm.rs index f7bab1e07c3a9..48f42e3c970f2 100644 --- a/compiler/rustc_codegen_ssa/src/traits/asm.rs +++ b/compiler/rustc_codegen_ssa/src/traits/asm.rs @@ -31,9 +31,6 @@ pub enum InlineAsmOperandRef<'tcx, B: BackendTypes + ?Sized> { /// Type of the constant. This is needed to extract width and signedness. ty: Ty<'tcx>, }, - SymFn { - instance: Instance<'tcx>, - }, SymStatic { def_id: DefId, }, @@ -49,9 +46,6 @@ pub enum GlobalAsmOperandRef<'tcx> { /// Type of the constant. This is needed to extract width and signedness. ty: Ty<'tcx>, }, - SymFn { - instance: Instance<'tcx>, - }, SymStatic { def_id: DefId, }, From db6861022914a6339aa1b5e8e5a6e61fbd22d858 Mon Sep 17 00:00:00 2001 From: Gary Guo Date: Fri, 19 Dec 2025 16:05:00 +0000 Subject: [PATCH 06/10] Unify handling of `GlobalAlloc` inside backend With the previous commit, now we can see there are some code duplication for the handling of `GlobalAlloc` inside backends. Do some clean up to unify them. --- compiler/rustc_codegen_gcc/src/asm.rs | 54 ++----- compiler/rustc_codegen_gcc/src/common.rs | 119 ++++++++------- compiler/rustc_codegen_llvm/src/asm.rs | 54 ++----- compiler/rustc_codegen_llvm/src/common.rs | 141 ++++++++++-------- compiler/rustc_codegen_ssa/src/mir/place.rs | 2 +- .../rustc_codegen_ssa/src/traits/consts.rs | 14 +- compiler/rustc_codegen_ssa/src/traits/mod.rs | 2 +- 7 files changed, 190 insertions(+), 196 deletions(-) diff --git a/compiler/rustc_codegen_gcc/src/asm.rs b/compiler/rustc_codegen_gcc/src/asm.rs index d2c2955f041b6..2109cabd230e6 100644 --- a/compiler/rustc_codegen_gcc/src/asm.rs +++ b/compiler/rustc_codegen_gcc/src/asm.rs @@ -8,7 +8,7 @@ use rustc_codegen_ssa::mir::operand::OperandValue; use rustc_codegen_ssa::mir::place::PlaceRef; use rustc_codegen_ssa::traits::{ AsmBuilderMethods, AsmCodegenMethods, BaseTypeCodegenMethods, BuilderMethods, - GlobalAsmOperandRef, InlineAsmOperandRef, + ConstCodegenMethods, GlobalAsmOperandRef, InlineAsmOperandRef, }; use rustc_middle::bug; use rustc_middle::mir::interpret::{GlobalAlloc, Scalar}; @@ -145,6 +145,9 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> { // Clobbers collected from `out("explicit register") _` and `inout("explicit_reg") var => _` let mut clobbers = vec![]; + // Symbols name that needs to be inserted to asm const ptr template string. + let mut const_syms = vec![]; + // We're trying to preallocate space for the template let mut constants_len = 0; @@ -405,17 +408,8 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> { let (prov, offset) = ptr.prov_and_relative_offset(); assert_eq!(offset.bytes(), 0); let global_alloc = self.tcx.global_alloc(prov.alloc_id()); - let val = match global_alloc { - GlobalAlloc::Function { instance } => { - get_fn(self.cx, instance).get_address(None) - } - GlobalAlloc::Static(def_id) => { - self.cx.get_static(def_id).get_address(None) - } - GlobalAlloc::Memory(_) - | GlobalAlloc::VTable(..) - | GlobalAlloc::TypeId { .. } => unreachable!(), - }; + let (val, sym) = self.cx.alloc_to_backend(global_alloc).unwrap(); + const_syms.push(sym.unwrap()); inputs.push(AsmInOperand { constraint: "X".into(), rust_idx, val }); } }, @@ -514,27 +508,13 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> { } Scalar::Ptr(ptr, _) => { - let (prov, offset) = ptr.prov_and_relative_offset(); + let (_, offset) = ptr.prov_and_relative_offset(); assert_eq!(offset.bytes(), 0); - let global_alloc = self.tcx.global_alloc(prov.alloc_id()); - let symbol_name = match global_alloc { - GlobalAlloc::Function { instance } => { - // TODO(@Amanieu): Additional mangling is needed on - // some targets to add a leading underscore (Mach-O) - // or byte count suffixes (x86 Windows). - self.tcx.symbol_name(instance) - } - GlobalAlloc::Static(def_id) => { - // TODO(@Amanieu): Additional mangling is needed on - // some targets to add a leading underscore (Mach-O). - let instance = Instance::mono(self.tcx, def_id); - self.tcx.symbol_name(instance) - } - GlobalAlloc::Memory(_) - | GlobalAlloc::VTable(..) - | GlobalAlloc::TypeId { .. } => unreachable!(), - }; - template_str.push_str(symbol_name.name); + let instance = const_syms.remove(0); + // TODO(@Amanieu): Additional mangling is needed on + // some targets to add a leading underscore (Mach-O) + // or byte count suffixes (x86 Windows). + template_str.push_str(self.tcx.symbol_name(instance).name); } } } @@ -951,16 +931,14 @@ impl<'gcc, 'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> { // or byte count suffixes (x86 Windows). self.tcx.symbol_name(instance) } - GlobalAlloc::Static(def_id) => { + _ => { + let (_, syms) = + self.alloc_to_backend(global_alloc).unwrap(); // TODO(antoyo): set the global variable as used. // TODO(@Amanieu): Additional mangling is needed on // some targets to add a leading underscore (Mach-O). - let instance = Instance::mono(self.tcx, def_id); - self.tcx.symbol_name(instance) + self.tcx.symbol_name(syms.unwrap()) } - GlobalAlloc::Memory(_) - | GlobalAlloc::VTable(..) - | GlobalAlloc::TypeId { .. } => unreachable!(), }; template_str.push_str(symbol_name.name); } diff --git a/compiler/rustc_codegen_gcc/src/common.rs b/compiler/rustc_codegen_gcc/src/common.rs index 79cae9e028260..e8884836eb91e 100644 --- a/compiler/rustc_codegen_gcc/src/common.rs +++ b/compiler/rustc_codegen_gcc/src/common.rs @@ -6,6 +6,7 @@ use rustc_codegen_ssa::traits::{ }; use rustc_middle::mir::Mutability; use rustc_middle::mir::interpret::{GlobalAlloc, PointerArithmetic, Scalar}; +use rustc_middle::ty::Instance; use rustc_middle::ty::layout::LayoutOf; use crate::consts::const_alloc_to_gcc; @@ -114,7 +115,7 @@ pub fn type_is_pointer(typ: Type<'_>) -> bool { typ.get_pointee().is_some() } -impl<'gcc, 'tcx> ConstCodegenMethods for CodegenCx<'gcc, 'tcx> { +impl<'gcc, 'tcx> ConstCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> { fn const_null(&self, typ: Type<'gcc>) -> RValue<'gcc> { if type_is_pointer(typ) { self.context.new_null(typ) } else { self.const_int(typ, 0) } } @@ -225,6 +226,63 @@ impl<'gcc, 'tcx> ConstCodegenMethods for CodegenCx<'gcc, 'tcx> { None } + fn alloc_to_backend( + &self, + global_alloc: GlobalAlloc<'tcx>, + ) -> Result<(RValue<'gcc>, Option>), u64> { + let alloc = match global_alloc { + GlobalAlloc::Function { instance, .. } => { + return Ok((self.get_fn_addr(instance), Some(instance))); + } + GlobalAlloc::Static(def_id) => { + assert!(self.tcx.is_static(def_id)); + return Ok(( + self.get_static(def_id).get_address(None), + Some(Instance::mono(self.tcx, def_id)), + )); + } + GlobalAlloc::TypeId { .. } => { + // Drop the provenance, the offset contains the bytes of the hash, so + // just return 0 as base address. + return Err(0); + } + + GlobalAlloc::Memory(alloc) => { + if alloc.inner().len() == 0 { + // For ZSTs directly codegen an aligned pointer. + // This avoids generating a zero-sized constant value and actually needing a + // real address at runtime. + return Err(alloc.inner().align.bytes()); + } + + alloc + } + + GlobalAlloc::VTable(ty, dyn_ty) => { + self.tcx + .global_alloc(self.tcx.vtable_allocation(( + ty, + dyn_ty.principal().map(|principal| { + self.tcx.instantiate_bound_regions_with_erased(principal) + }), + ))) + .unwrap_memory() + } + }; + + let value = match alloc.inner().mutability { + Mutability::Mut => { + self.static_addr_of_mut(const_alloc_to_gcc(self, alloc), alloc.inner().align, None) + } + _ => self.static_addr_of(alloc, None), + }; + if !self.sess().fewer_names() { + // TODO(antoyo): set value name. + } + + Ok((value, None)) + } + fn scalar_to_backend(&self, cv: Scalar, layout: abi::Scalar, ty: Type<'gcc>) -> RValue<'gcc> { let bitsize = if layout.is_bool() { 1 } else { layout.size(self).bits() }; match cv { @@ -246,57 +304,16 @@ impl<'gcc, 'tcx> ConstCodegenMethods for CodegenCx<'gcc, 'tcx> { Scalar::Ptr(ptr, _size) => { let (prov, offset) = ptr.prov_and_relative_offset(); let alloc_id = prov.alloc_id(); - let base_addr = match self.tcx.global_alloc(alloc_id) { - GlobalAlloc::Memory(alloc) => { - // For ZSTs directly codegen an aligned pointer. - // This avoids generating a zero-sized constant value and actually needing a - // real address at runtime. - if alloc.inner().len() == 0 { - let val = alloc.inner().align.bytes().wrapping_add(offset.bytes()); - let val = self.const_usize(self.tcx.truncate_to_target_usize(val)); - return if matches!(layout.primitive(), Pointer(_)) { - self.context.new_cast(None, val, ty) - } else { - self.const_bitcast(val, ty) - }; - } - - let value = match alloc.inner().mutability { - Mutability::Mut => self.static_addr_of_mut( - const_alloc_to_gcc(self, alloc), - alloc.inner().align, - None, - ), - _ => self.static_addr_of(alloc, None), + let base_addr = match self.alloc_to_backend(self.tcx.global_alloc(alloc_id)) { + Ok((base_addr, _)) => base_addr, + Err(base_addr) => { + let val = base_addr.wrapping_add(offset.bytes()); + let val = self.const_usize(self.tcx.truncate_to_target_usize(val)); + return if matches!(layout.primitive(), Pointer(_)) { + self.context.new_cast(None, val, ty) + } else { + self.const_bitcast(val, ty) }; - if !self.sess().fewer_names() { - // TODO(antoyo): set value name. - } - value - } - GlobalAlloc::Function { instance, .. } => self.get_fn_addr(instance), - GlobalAlloc::VTable(ty, dyn_ty) => { - let alloc = self - .tcx - .global_alloc(self.tcx.vtable_allocation(( - ty, - dyn_ty.principal().map(|principal| { - self.tcx.instantiate_bound_regions_with_erased(principal) - }), - ))) - .unwrap_memory(); - self.static_addr_of(alloc, None) - } - GlobalAlloc::TypeId { .. } => { - let val = self.const_usize(offset.bytes()); - // This is still a variable of pointer type, even though we only use the provenance - // of that pointer in CTFE and Miri. But to make LLVM's type system happy, - // we need an int-to-ptr cast here (it doesn't matter at all which provenance that picks). - return self.context.new_cast(None, val, ty); - } - GlobalAlloc::Static(def_id) => { - assert!(self.tcx.is_static(def_id)); - self.get_static(def_id).get_address(None) } }; let ptr_type = base_addr.get_type(); diff --git a/compiler/rustc_codegen_llvm/src/asm.rs b/compiler/rustc_codegen_llvm/src/asm.rs index 9e3a56fd41596..e28abeebe9117 100644 --- a/compiler/rustc_codegen_llvm/src/asm.rs +++ b/compiler/rustc_codegen_llvm/src/asm.rs @@ -4,7 +4,7 @@ use rustc_codegen_ssa::mir::operand::OperandValue; use rustc_codegen_ssa::traits::*; use rustc_data_structures::assert_matches; use rustc_data_structures::fx::FxHashMap; -use rustc_middle::mir::interpret::{GlobalAlloc, Scalar as ConstScalar}; +use rustc_middle::mir::interpret::Scalar as ConstScalar; use rustc_middle::ty::Instance; use rustc_middle::ty::layout::TyAndLayout; use rustc_middle::{bug, span_bug}; @@ -163,21 +163,10 @@ impl<'ll, 'tcx> AsmBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> { let (prov, offset) = ptr.prov_and_relative_offset(); assert_eq!(offset.bytes(), 0); let global_alloc = self.tcx.global_alloc(prov.alloc_id()); - match global_alloc { - GlobalAlloc::Function { instance } => { - inputs.push(self.cx.get_fn(instance)); - op_idx.insert(idx, constraints.len()); - constraints.push("s".to_string()); - } - GlobalAlloc::Static(def_id) => { - inputs.push(self.cx.get_static(def_id)); - op_idx.insert(idx, constraints.len()); - constraints.push("s".to_string()); - } - GlobalAlloc::Memory(_) - | GlobalAlloc::VTable(..) - | GlobalAlloc::TypeId { .. } => unreachable!(), - } + let (value, _) = self.cx.alloc_to_backend(global_alloc).unwrap(); + inputs.push(value); + op_idx.insert(idx, constraints.len()); + constraints.push("s".to_string()); } }, InlineAsmOperandRef::SymStatic { def_id } => { @@ -235,21 +224,12 @@ impl<'ll, 'tcx> AsmBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> { template_str.push_str(&string); } ConstScalar::Ptr(ptr, _) => { - let (prov, offset) = ptr.prov_and_relative_offset(); + let (_, offset) = ptr.prov_and_relative_offset(); assert_eq!(offset.bytes(), 0); - let global_alloc = self.tcx.global_alloc(prov.alloc_id()); - match global_alloc { - GlobalAlloc::Function { .. } | GlobalAlloc::Static(_) => { - // Only emit the raw symbol name - template_str.push_str(&format!( - "${{{}:c}}", - op_idx[&operand_idx] - )); - } - GlobalAlloc::Memory(_) - | GlobalAlloc::VTable(..) - | GlobalAlloc::TypeId { .. } => unreachable!(), - } + + // Only emit the raw symbol name + template_str + .push_str(&format!("${{{}:c}}", op_idx[&operand_idx])); } } } @@ -466,18 +446,8 @@ impl<'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'_, 'tcx> { let (prov, offset) = ptr.prov_and_relative_offset(); assert_eq!(offset.bytes(), 0); let global_alloc = self.tcx.global_alloc(prov.alloc_id()); - let llval = match global_alloc { - GlobalAlloc::Function { instance } => self.get_fn(instance), - GlobalAlloc::Static(def_id) => self - .renamed_statics - .borrow() - .get(&def_id) - .copied() - .unwrap_or_else(|| self.get_static(def_id)), - GlobalAlloc::Memory(_) - | GlobalAlloc::VTable(..) - | GlobalAlloc::TypeId { .. } => unreachable!(), - }; + let (llval, sym) = self.alloc_to_backend(global_alloc).unwrap(); + assert!(sym.is_some()); self.add_compiler_used_global(llval); let symbol = llvm::build_string(|s| unsafe { diff --git a/compiler/rustc_codegen_llvm/src/common.rs b/compiler/rustc_codegen_llvm/src/common.rs index a134e97cc8915..4fa37b2656654 100644 --- a/compiler/rustc_codegen_llvm/src/common.rs +++ b/compiler/rustc_codegen_llvm/src/common.rs @@ -13,7 +13,7 @@ use rustc_hashes::Hash128; use rustc_hir::def_id::DefId; use rustc_middle::bug; use rustc_middle::mir::interpret::{GlobalAlloc, PointerArithmetic, Scalar}; -use rustc_middle::ty::TyCtxt; +use rustc_middle::ty::{Instance, TyCtxt}; use rustc_session::cstore::DllImport; use tracing::debug; @@ -130,7 +130,7 @@ impl<'ll, CX: Borrow>> GenericCx<'ll, CX> { } } -impl<'ll, 'tcx> ConstCodegenMethods for CodegenCx<'ll, 'tcx> { +impl<'ll, 'tcx> ConstCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> { fn const_null(&self, t: &'ll Type) -> &'ll Value { unsafe { llvm::LLVMConstNull(t) } } @@ -264,6 +264,74 @@ impl<'ll, 'tcx> ConstCodegenMethods for CodegenCx<'ll, 'tcx> { }) } + fn alloc_to_backend( + &self, + global_alloc: GlobalAlloc<'tcx>, + ) -> Result<(Self::Value, Option>), u64> { + let alloc = match global_alloc { + GlobalAlloc::Function { instance, .. } => { + return Ok((self.get_fn_addr(instance), Some(instance))); + } + GlobalAlloc::Static(def_id) => { + assert!(self.tcx.is_static(def_id)); + assert!(!self.tcx.is_thread_local_static(def_id)); + return Ok(( + // `alloc_to_backend` might be called by `global_asm!` codegen. In which case + // `global_asm!` would need to find the renamed statics to use for symbol name. + self.renamed_statics + .borrow() + .get(&def_id) + .copied() + .unwrap_or_else(|| self.get_static(def_id)), + Some(Instance::mono(self.tcx, def_id)), + )); + } + GlobalAlloc::TypeId { .. } => { + // Drop the provenance, the offset contains the bytes of the hash, so + // just return 0 as base address. + return Err(0); + } + + GlobalAlloc::Memory(alloc) => { + if alloc.inner().len() == 0 { + // For ZSTs directly codegen an aligned pointer. + // This avoids generating a zero-sized constant value and actually needing a + // real address at runtime. + return Err(alloc.inner().align.bytes()); + } + + alloc + } + GlobalAlloc::VTable(ty, dyn_ty) => { + self.tcx + .global_alloc(self.tcx.vtable_allocation(( + ty, + dyn_ty.principal().map(|principal| { + self.tcx.instantiate_bound_regions_with_erased(principal) + }), + ))) + .unwrap_memory() + } + }; + + let init = const_alloc_to_llvm(self, alloc.inner(), /*static*/ false); + let alloc = alloc.inner(); + let value = match alloc.mutability { + Mutability::Mut => self.static_addr_of_mut(init, alloc.align, None), + _ => self.static_addr_of_impl(init, alloc.align, None), + }; + if !self.sess().fewer_names() && llvm::get_value_name(value).is_empty() { + let hash = self.tcx.with_stable_hashing_context(|mut hcx| { + let mut hasher = StableHasher::new(); + alloc.hash_stable(&mut hcx, &mut hasher); + hasher.finish::() + }); + llvm::set_value_name(value, format!("alloc_{hash:032x}").as_bytes()); + } + + Ok((value, None)) + } + fn scalar_to_backend(&self, cv: Scalar, layout: abi::Scalar, llty: &'ll Type) -> &'ll Value { let bitsize = if layout.is_bool() { 1 } else { layout.size(self).bits() }; match cv { @@ -279,68 +347,19 @@ impl<'ll, 'tcx> ConstCodegenMethods for CodegenCx<'ll, 'tcx> { Scalar::Ptr(ptr, _size) => { let (prov, offset) = ptr.prov_and_relative_offset(); let global_alloc = self.tcx.global_alloc(prov.alloc_id()); - let base_addr = match global_alloc { - GlobalAlloc::Memory(alloc) => { - // For ZSTs directly codegen an aligned pointer. - // This avoids generating a zero-sized constant value and actually needing a - // real address at runtime. - if alloc.inner().len() == 0 { - let val = alloc.inner().align.bytes().wrapping_add(offset.bytes()); - let llval = self.const_usize(self.tcx.truncate_to_target_usize(val)); - return if matches!(layout.primitive(), Pointer(_)) { - unsafe { llvm::LLVMConstIntToPtr(llval, llty) } - } else { - self.const_bitcast(llval, llty) - }; + let base_addr_space = global_alloc.address_space(self); + let base_addr = match self.alloc_to_backend(global_alloc) { + Ok((base_addr, _)) => base_addr, + Err(base_addr) => { + let val = base_addr.wrapping_add(offset.bytes()); + let llval = self.const_usize(self.tcx.truncate_to_target_usize(val)); + return if matches!(layout.primitive(), Pointer(_)) { + unsafe { llvm::LLVMConstIntToPtr(llval, llty) } } else { - let init = - const_alloc_to_llvm(self, alloc.inner(), /*static*/ false); - let alloc = alloc.inner(); - let value = match alloc.mutability { - Mutability::Mut => self.static_addr_of_mut(init, alloc.align, None), - _ => self.static_addr_of_impl(init, alloc.align, None), - }; - if !self.sess().fewer_names() && llvm::get_value_name(value).is_empty() - { - let hash = self.tcx.with_stable_hashing_context(|mut hcx| { - let mut hasher = StableHasher::new(); - alloc.hash_stable(&mut hcx, &mut hasher); - hasher.finish::() - }); - llvm::set_value_name( - value, - format!("alloc_{hash:032x}").as_bytes(), - ); - } - value - } - } - GlobalAlloc::Function { instance, .. } => self.get_fn_addr(instance), - GlobalAlloc::VTable(ty, dyn_ty) => { - let alloc = self - .tcx - .global_alloc(self.tcx.vtable_allocation(( - ty, - dyn_ty.principal().map(|principal| { - self.tcx.instantiate_bound_regions_with_erased(principal) - }), - ))) - .unwrap_memory(); - let init = const_alloc_to_llvm(self, alloc.inner(), /*static*/ false); - self.static_addr_of_impl(init, alloc.inner().align, None) - } - GlobalAlloc::Static(def_id) => { - assert!(self.tcx.is_static(def_id)); - assert!(!self.tcx.is_thread_local_static(def_id)); - self.get_static(def_id) - } - GlobalAlloc::TypeId { .. } => { - // Drop the provenance, the offset contains the bytes of the hash - let llval = self.const_usize(offset.bytes()); - return unsafe { llvm::LLVMConstIntToPtr(llval, llty) }; + self.const_bitcast(llval, llty) + }; } }; - let base_addr_space = global_alloc.address_space(self); let llval = unsafe { llvm::LLVMConstInBoundsGEP2( self.type_i8(), diff --git a/compiler/rustc_codegen_ssa/src/mir/place.rs b/compiler/rustc_codegen_ssa/src/mir/place.rs index d62e622b6fed3..f2387595a37be 100644 --- a/compiler/rustc_codegen_ssa/src/mir/place.rs +++ b/compiler/rustc_codegen_ssa/src/mir/place.rs @@ -138,7 +138,7 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> { Self::alloca(bx, ptr_layout) } - pub fn len>(&self, cx: &Cx) -> V { + pub fn len>(&self, cx: &Cx) -> V { if let FieldsShape::Array { count, .. } = self.layout.fields { if self.layout.is_unsized() { assert_eq!(count, 0); diff --git a/compiler/rustc_codegen_ssa/src/traits/consts.rs b/compiler/rustc_codegen_ssa/src/traits/consts.rs index 4178a9742e268..4064667e087bd 100644 --- a/compiler/rustc_codegen_ssa/src/traits/consts.rs +++ b/compiler/rustc_codegen_ssa/src/traits/consts.rs @@ -1,9 +1,10 @@ use rustc_abi as abi; -use rustc_middle::mir::interpret::Scalar; +use rustc_middle::mir::interpret::{GlobalAlloc, Scalar}; +use rustc_middle::ty::Instance; use super::BackendTypes; -pub trait ConstCodegenMethods: BackendTypes { +pub trait ConstCodegenMethods<'tcx>: BackendTypes { // Constant constructors fn const_null(&self, t: Self::Type) -> Self::Value; /// Generate an uninitialized value (matching uninitialized memory in MIR). @@ -37,6 +38,15 @@ pub trait ConstCodegenMethods: BackendTypes { fn const_to_opt_uint(&self, v: Self::Value) -> Option; fn const_to_opt_u128(&self, v: Self::Value, sign_ext: bool) -> Option; + /// Turn a `GlobalAlloc` into a backend global, return the value and instance that is used to + /// generate the symbol name, if any. + /// + /// If the `GlobalAlloc` should not be mapped to a global, but absolute address should be used, + /// an integer is returned as `Err` instead. + fn alloc_to_backend( + &self, + global_alloc: GlobalAlloc<'tcx>, + ) -> Result<(Self::Value, Option>), u64>; fn scalar_to_backend(&self, cv: Scalar, layout: abi::Scalar, llty: Self::Type) -> Self::Value; fn const_ptr_byte_offset(&self, val: Self::Value, offset: abi::Size) -> Self::Value; diff --git a/compiler/rustc_codegen_ssa/src/traits/mod.rs b/compiler/rustc_codegen_ssa/src/traits/mod.rs index f46d07ea5008e..3694fd4f4ea17 100644 --- a/compiler/rustc_codegen_ssa/src/traits/mod.rs +++ b/compiler/rustc_codegen_ssa/src/traits/mod.rs @@ -55,7 +55,7 @@ pub trait CodegenObject = Copy + fmt::Debug; pub trait CodegenMethods<'tcx> = LayoutOf<'tcx, LayoutOfResult = TyAndLayout<'tcx>> + FnAbiOf<'tcx, FnAbiOfResult = &'tcx FnAbi<'tcx, Ty<'tcx>>> + TypeCodegenMethods<'tcx> - + ConstCodegenMethods + + ConstCodegenMethods<'tcx> + StaticCodegenMethods + DebugInfoCodegenMethods<'tcx> + AsmCodegenMethods<'tcx> From 8421a5a99cb6dc9efc288650e2cb2543ce8600c1 Mon Sep 17 00:00:00 2001 From: Gary Guo Date: Thu, 18 Dec 2025 20:00:30 +0000 Subject: [PATCH 07/10] Handle pointers with offset for asm const --- .../rustc_codegen_cranelift/src/global_asm.rs | 10 ++++++-- compiler/rustc_codegen_gcc/src/asm.rs | 21 +++++++++++++---- compiler/rustc_codegen_llvm/src/asm.rs | 23 ++++++++++++++----- 3 files changed, 41 insertions(+), 13 deletions(-) diff --git a/compiler/rustc_codegen_cranelift/src/global_asm.rs b/compiler/rustc_codegen_cranelift/src/global_asm.rs index 306b77246a3f7..439efa6bce5be 100644 --- a/compiler/rustc_codegen_cranelift/src/global_asm.rs +++ b/compiler/rustc_codegen_cranelift/src/global_asm.rs @@ -1,6 +1,7 @@ //! The AOT driver uses [`cranelift_object`] to write object files suitable for linking into a //! standalone executable. +use std::fmt::Write as _; use std::io::Write; use std::path::PathBuf; use std::process::{Command, Stdio}; @@ -8,7 +9,7 @@ use std::sync::Arc; use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece}; use rustc_codegen_ssa::traits::{AsmCodegenMethods, GlobalAsmOperandRef}; -use rustc_middle::mir::interpret::{GlobalAlloc, Scalar as ConstScalar}; +use rustc_middle::mir::interpret::{GlobalAlloc, PointerArithmetic, Scalar as ConstScalar}; use rustc_middle::ty::TyCtxt; use rustc_middle::ty::layout::{ FnAbiError, FnAbiOfHelpers, FnAbiRequest, HasTyCtxt, HasTypingEnv, LayoutError, LayoutOfHelpers, @@ -123,7 +124,6 @@ fn codegen_global_asm_inner<'tcx>( ConstScalar::Ptr(ptr, _) => { let (prov, offset) = ptr.prov_and_relative_offset(); - assert_eq!(offset.bytes(), 0); let global_alloc = tcx.global_alloc(prov.alloc_id()); let symbol_name = match global_alloc { GlobalAlloc::Function { instance } => { @@ -146,11 +146,17 @@ fn codegen_global_asm_inner<'tcx>( | GlobalAlloc::VTable(..) | GlobalAlloc::TypeId { .. } => unreachable!(), }; + global_asm.push_str(&escape_symbol_name( tcx, symbol_name.name, span, )); + + if offset != Size::ZERO { + let offset = tcx.sign_extend_to_target_isize(offset.bytes()); + write!(global_asm, "{offset:+}").unwrap(); + } } } } diff --git a/compiler/rustc_codegen_gcc/src/asm.rs b/compiler/rustc_codegen_gcc/src/asm.rs index 2109cabd230e6..94e1256ce2bd8 100644 --- a/compiler/rustc_codegen_gcc/src/asm.rs +++ b/compiler/rustc_codegen_gcc/src/asm.rs @@ -1,8 +1,10 @@ // cSpell:ignoreRegExp [afkspqvwy]reg use std::borrow::Cow; +use std::fmt::Write; use gccjit::{LValue, RValue, ToRValue, Type}; +use rustc_abi::Size; use rustc_ast::ast::{InlineAsmOptions, InlineAsmTemplatePiece}; use rustc_codegen_ssa::mir::operand::OperandValue; use rustc_codegen_ssa::mir::place::PlaceRef; @@ -11,7 +13,7 @@ use rustc_codegen_ssa::traits::{ ConstCodegenMethods, GlobalAsmOperandRef, InlineAsmOperandRef, }; use rustc_middle::bug; -use rustc_middle::mir::interpret::{GlobalAlloc, Scalar}; +use rustc_middle::mir::interpret::{GlobalAlloc, PointerArithmetic, Scalar}; use rustc_middle::ty::Instance; use rustc_middle::ty::layout::LayoutOf; use rustc_span::Span; @@ -405,8 +407,7 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> { InlineAsmOperandRef::Const { value, ty: _ } => match value { Scalar::Int(_) => (), Scalar::Ptr(ptr, _) => { - let (prov, offset) = ptr.prov_and_relative_offset(); - assert_eq!(offset.bytes(), 0); + let (prov, _) = ptr.prov_and_relative_offset(); let global_alloc = self.tcx.global_alloc(prov.alloc_id()); let (val, sym) = self.cx.alloc_to_backend(global_alloc).unwrap(); const_syms.push(sym.unwrap()); @@ -509,12 +510,17 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> { Scalar::Ptr(ptr, _) => { let (_, offset) = ptr.prov_and_relative_offset(); - assert_eq!(offset.bytes(), 0); let instance = const_syms.remove(0); // TODO(@Amanieu): Additional mangling is needed on // some targets to add a leading underscore (Mach-O) // or byte count suffixes (x86 Windows). template_str.push_str(self.tcx.symbol_name(instance).name); + + if offset != Size::ZERO { + let offset = + self.sign_extend_to_target_isize(offset.bytes()); + write!(template_str, "{offset:+}").unwrap(); + } } } } @@ -920,7 +926,6 @@ impl<'gcc, 'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> { Scalar::Ptr(ptr, _) => { let (prov, offset) = ptr.prov_and_relative_offset(); - assert_eq!(offset.bytes(), 0); let global_alloc = self.tcx.global_alloc(prov.alloc_id()); let symbol_name = match global_alloc { GlobalAlloc::Function { instance } => { @@ -941,6 +946,12 @@ impl<'gcc, 'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> { } }; template_str.push_str(symbol_name.name); + + if offset != Size::ZERO { + let offset = + self.sign_extend_to_target_isize(offset.bytes()); + write!(template_str, "{offset:+}").unwrap(); + } } } } diff --git a/compiler/rustc_codegen_llvm/src/asm.rs b/compiler/rustc_codegen_llvm/src/asm.rs index e28abeebe9117..cb3fc2dfe27f8 100644 --- a/compiler/rustc_codegen_llvm/src/asm.rs +++ b/compiler/rustc_codegen_llvm/src/asm.rs @@ -1,10 +1,12 @@ -use rustc_abi::{BackendRepr, Float, Integer, Primitive, Scalar}; +use std::fmt::Write; + +use rustc_abi::{BackendRepr, Float, Integer, Primitive, Scalar, Size}; use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece}; use rustc_codegen_ssa::mir::operand::OperandValue; use rustc_codegen_ssa::traits::*; use rustc_data_structures::assert_matches; use rustc_data_structures::fx::FxHashMap; -use rustc_middle::mir::interpret::Scalar as ConstScalar; +use rustc_middle::mir::interpret::{PointerArithmetic, Scalar as ConstScalar}; use rustc_middle::ty::Instance; use rustc_middle::ty::layout::TyAndLayout; use rustc_middle::{bug, span_bug}; @@ -160,8 +162,7 @@ impl<'ll, 'tcx> AsmBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> { InlineAsmOperandRef::Const { value, ty: _ } => match value { ConstScalar::Int(_) => (), ConstScalar::Ptr(ptr, _) => { - let (prov, offset) = ptr.prov_and_relative_offset(); - assert_eq!(offset.bytes(), 0); + let (prov, _) = ptr.prov_and_relative_offset(); let global_alloc = self.tcx.global_alloc(prov.alloc_id()); let (value, _) = self.cx.alloc_to_backend(global_alloc).unwrap(); inputs.push(value); @@ -225,11 +226,16 @@ impl<'ll, 'tcx> AsmBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> { } ConstScalar::Ptr(ptr, _) => { let (_, offset) = ptr.prov_and_relative_offset(); - assert_eq!(offset.bytes(), 0); // Only emit the raw symbol name template_str .push_str(&format!("${{{}:c}}", op_idx[&operand_idx])); + + if offset != Size::ZERO { + let offset = + self.sign_extend_to_target_isize(offset.bytes()); + write!(template_str, "{offset:+}").unwrap(); + } } } } @@ -444,7 +450,6 @@ impl<'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'_, 'tcx> { ConstScalar::Ptr(ptr, _) => { let (prov, offset) = ptr.prov_and_relative_offset(); - assert_eq!(offset.bytes(), 0); let global_alloc = self.tcx.global_alloc(prov.alloc_id()); let (llval, sym) = self.alloc_to_backend(global_alloc).unwrap(); assert!(sym.is_some()); @@ -456,6 +461,12 @@ impl<'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'_, 'tcx> { .expect("symbol is not valid UTF-8"); template_str .push_str(&escape_symbol_name(self.tcx, &symbol, span)); + + if offset != Size::ZERO { + let offset = + self.sign_extend_to_target_isize(offset.bytes()); + write!(template_str, "{offset:+}").unwrap(); + } } } } From 63595c6a8449e703a6ade3e2d9361e51292b8dd3 Mon Sep 17 00:00:00 2001 From: Gary Guo Date: Thu, 18 Dec 2025 20:45:34 +0000 Subject: [PATCH 08/10] Support codegen of asm const pointers without provenance CTFE pointers created via type ID, `without_provenance` or pointers to const ZSTs can now be codegenned with all 3 backends. These pointers are generated in the same way as integers. --- .../rustc_codegen_cranelift/src/global_asm.rs | 1 - .../rustc_codegen_cranelift/src/inline_asm.rs | 1 - compiler/rustc_codegen_gcc/src/asm.rs | 6 ++-- compiler/rustc_codegen_llvm/src/asm.rs | 4 +-- compiler/rustc_codegen_ssa/src/base.rs | 5 ++- compiler/rustc_codegen_ssa/src/common.rs | 33 +++++++++++++++++-- compiler/rustc_codegen_ssa/src/mir/block.rs | 5 ++- .../rustc_codegen_ssa/src/mir/naked_asm.rs | 6 +++- 8 files changed, 46 insertions(+), 15 deletions(-) diff --git a/compiler/rustc_codegen_cranelift/src/global_asm.rs b/compiler/rustc_codegen_cranelift/src/global_asm.rs index 439efa6bce5be..2ef7bfdc652ef 100644 --- a/compiler/rustc_codegen_cranelift/src/global_asm.rs +++ b/compiler/rustc_codegen_cranelift/src/global_asm.rs @@ -115,7 +115,6 @@ fn codegen_global_asm_inner<'tcx>( ConstScalar::Int(int) => { let string = rustc_codegen_ssa::common::asm_const_to_str( tcx, - span, int, FullyMonomorphizedLayoutCx(tcx).layout_of(ty), ); diff --git a/compiler/rustc_codegen_cranelift/src/inline_asm.rs b/compiler/rustc_codegen_cranelift/src/inline_asm.rs index 6011912db089a..e8a1508d2b596 100644 --- a/compiler/rustc_codegen_cranelift/src/inline_asm.rs +++ b/compiler/rustc_codegen_cranelift/src/inline_asm.rs @@ -104,7 +104,6 @@ pub(crate) fn codegen_inline_asm_terminator<'tcx>( let value = rustc_codegen_ssa::common::asm_const_to_str( fx.tcx, - span, scalar.assert_scalar_int(), fx.layout_of(ty), ); diff --git a/compiler/rustc_codegen_gcc/src/asm.rs b/compiler/rustc_codegen_gcc/src/asm.rs index 94e1256ce2bd8..fe8025fa3efe5 100644 --- a/compiler/rustc_codegen_gcc/src/asm.rs +++ b/compiler/rustc_codegen_gcc/src/asm.rs @@ -453,7 +453,7 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> { template_str.push_str(escaped_char); } } - InlineAsmTemplatePiece::Placeholder { operand_idx, modifier, span } => { + InlineAsmTemplatePiece::Placeholder { operand_idx, modifier, span: _ } => { let mut push_to_template = |modifier, gcc_idx| { use std::fmt::Write; @@ -501,7 +501,6 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> { // Const operands get injected directly into the template let string = rustc_codegen_ssa::common::asm_const_to_str( self.tcx, - span, int, self.layout_of(ty), ); @@ -907,7 +906,7 @@ impl<'gcc, 'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> { .unwrap_or(string.len()); } } - InlineAsmTemplatePiece::Placeholder { operand_idx, modifier: _, span } => { + InlineAsmTemplatePiece::Placeholder { operand_idx, modifier: _, span: _ } => { match operands[operand_idx] { GlobalAsmOperandRef::Const { value, ty } => { match value { @@ -917,7 +916,6 @@ impl<'gcc, 'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> { // here unlike normal inline assembly. let string = rustc_codegen_ssa::common::asm_const_to_str( self.tcx, - span, int, self.layout_of(ty), ); diff --git a/compiler/rustc_codegen_llvm/src/asm.rs b/compiler/rustc_codegen_llvm/src/asm.rs index cb3fc2dfe27f8..838f18eccc337 100644 --- a/compiler/rustc_codegen_llvm/src/asm.rs +++ b/compiler/rustc_codegen_llvm/src/asm.rs @@ -197,7 +197,7 @@ impl<'ll, 'tcx> AsmBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> { template_str.push_str(s) } } - InlineAsmTemplatePiece::Placeholder { operand_idx, modifier, span } => { + InlineAsmTemplatePiece::Placeholder { operand_idx, modifier, span: _ } => { match operands[operand_idx] { InlineAsmOperandRef::In { reg, .. } | InlineAsmOperandRef::Out { reg, .. } @@ -218,7 +218,6 @@ impl<'ll, 'tcx> AsmBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> { // Const operands get injected directly into the template let string = rustc_codegen_ssa::common::asm_const_to_str( self.tcx, - span, int, self.layout_of(ty), ); @@ -441,7 +440,6 @@ impl<'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'_, 'tcx> { // here unlike normal inline assembly. let string = rustc_codegen_ssa::common::asm_const_to_str( self.tcx, - span, int, self.layout_of(ty), ); diff --git a/compiler/rustc_codegen_ssa/src/base.rs b/compiler/rustc_codegen_ssa/src/base.rs index a7343d642f672..2a67b0e64b1ff 100644 --- a/compiler/rustc_codegen_ssa/src/base.rs +++ b/compiler/rustc_codegen_ssa/src/base.rs @@ -417,7 +417,10 @@ where const_value ) }; - GlobalAsmOperandRef::Const { value: scalar, ty } + GlobalAsmOperandRef::Const { + value: common::asm_const_ptr_clean(cx.tcx(), scalar), + ty, + } } Err(ErrorHandled::Reported { .. }) => { // An error has already been reported and diff --git a/compiler/rustc_codegen_ssa/src/common.rs b/compiler/rustc_codegen_ssa/src/common.rs index dcc000f303fa2..a56f51606b3dd 100644 --- a/compiler/rustc_codegen_ssa/src/common.rs +++ b/compiler/rustc_codegen_ssa/src/common.rs @@ -2,9 +2,10 @@ use rustc_hir::LangItem; use rustc_hir::attrs::PeImportNameType; +use rustc_middle::bug; +use rustc_middle::mir::interpret::{GlobalAlloc, PointerArithmetic, Scalar}; use rustc_middle::ty::layout::TyAndLayout; use rustc_middle::ty::{self, Instance, ScalarInt, TyCtxt}; -use rustc_middle::{bug, span_bug}; use rustc_session::cstore::{DllCallingConvention, DllImport}; use rustc_span::Span; use rustc_target::spec::{Abi, Env, Os, Target}; @@ -148,7 +149,6 @@ pub(crate) fn shift_mask_val<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( pub fn asm_const_to_str<'tcx>( tcx: TyCtxt<'tcx>, - sp: Span, scalar: ScalarInt, ty_and_layout: TyAndLayout<'tcx>, ) -> String { @@ -163,7 +163,34 @@ pub fn asm_const_to_str<'tcx>( ty::IntTy::I128 => (value as i128).to_string(), ty::IntTy::Isize => unreachable!(), }, - _ => span_bug!(sp, "asm const has bad type {}", ty_and_layout.ty), + // For unsigned integers or pointers without provenance, just print the unsigned value + _ => value.to_string(), + } +} + +/// "Clean" a const pointer by removing values where the resulting ASM will not be +/// ` + `. +/// +/// These values are converted to `ScalarInt`. +pub fn asm_const_ptr_clean<'tcx>(tcx: TyCtxt<'tcx>, scalar: Scalar) -> Scalar { + let Scalar::Ptr(ptr, _) = scalar else { + return scalar; + }; + let (prov, offset) = ptr.prov_and_relative_offset(); + let global_alloc = tcx.global_alloc(prov.alloc_id()); + match global_alloc { + GlobalAlloc::TypeId { .. } => { + // `TypeId` provenances are not a thing in codegen. Just erase and replace with scalar offset. + Scalar::from_u64(offset.bytes()) + } + GlobalAlloc::Memory(alloc) if alloc.inner().len() == 0 => { + // ZST const allocations don't actually get global defined when lowered. + // Turn them into integer without provenances now. + let val = alloc.inner().align.bytes().wrapping_add(offset.bytes()); + Scalar::from_target_usize(tcx.truncate_to_target_usize(val), &tcx) + } + // Other types of `GlobalAlloc` are fine. + _ => scalar, } } diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs index 3258c5dd3068b..6b4deaa5205f0 100644 --- a/compiler/rustc_codegen_ssa/src/mir/block.rs +++ b/compiler/rustc_codegen_ssa/src/mir/block.rs @@ -1403,7 +1403,10 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { const_value ) }; - InlineAsmOperandRef::Const { value: scalar, ty: value.ty() } + InlineAsmOperandRef::Const { + value: common::asm_const_ptr_clean(bx.tcx(), scalar), + ty: value.ty(), + } } mir::InlineAsmOperand::SymFn { ref value } => { let const_ = self.monomorphize(value.const_); diff --git a/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs b/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs index a37e5425238db..78849bcf9f3d4 100644 --- a/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs +++ b/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs @@ -11,6 +11,7 @@ use rustc_span::sym; use rustc_target::callconv::{ArgAbi, FnAbi, PassMode}; use rustc_target::spec::{Arch, BinaryFormat}; +use crate::common; use crate::mir::AsmCodegenMethods; use crate::traits::GlobalAsmOperandRef; @@ -85,7 +86,10 @@ fn inline_to_global_operand<'a, 'tcx, Cx: LayoutOf<'tcx, LayoutOfResult = TyAndL ) }; - GlobalAsmOperandRef::Const { value: scalar, ty: mono_type } + GlobalAsmOperandRef::Const { + value: common::asm_const_ptr_clean(cx.tcx(), scalar), + ty: mono_type, + } } InlineAsmOperand::SymFn { value } => { let mono_type = instance.instantiate_mir_and_normalize_erasing_regions( From 2f3732fc24db5248486127965d7a1f0f00f03d82 Mon Sep 17 00:00:00 2001 From: Gary Guo Date: Fri, 19 Dec 2025 17:10:58 +0000 Subject: [PATCH 09/10] Generate unique symbol names if const pointers refer to promoted static --- .../rustc_codegen_cranelift/src/global_asm.rs | 2 +- compiler/rustc_codegen_gcc/src/asm.rs | 13 +++++++------ compiler/rustc_codegen_gcc/src/common.rs | 17 +++++++++++++++-- compiler/rustc_codegen_llvm/src/asm.rs | 11 ++++++----- compiler/rustc_codegen_llvm/src/common.rs | 19 ++++++++++++++++++- compiler/rustc_codegen_ssa/src/base.rs | 9 ++++++++- compiler/rustc_codegen_ssa/src/mir/block.rs | 11 ++++++++++- .../rustc_codegen_ssa/src/mir/naked_asm.rs | 9 ++++++++- compiler/rustc_codegen_ssa/src/traits/asm.rs | 8 ++++++++ .../rustc_codegen_ssa/src/traits/consts.rs | 4 ++++ 10 files changed, 85 insertions(+), 18 deletions(-) diff --git a/compiler/rustc_codegen_cranelift/src/global_asm.rs b/compiler/rustc_codegen_cranelift/src/global_asm.rs index 2ef7bfdc652ef..346264d356460 100644 --- a/compiler/rustc_codegen_cranelift/src/global_asm.rs +++ b/compiler/rustc_codegen_cranelift/src/global_asm.rs @@ -110,7 +110,7 @@ fn codegen_global_asm_inner<'tcx>( InlineAsmTemplatePiece::Placeholder { operand_idx, modifier: _, span } => { use rustc_codegen_ssa::back::symbol_export::escape_symbol_name; match operands[operand_idx] { - GlobalAsmOperandRef::Const { value, ty } => { + GlobalAsmOperandRef::Const { value, ty, instance: _ } => { match value { ConstScalar::Int(int) => { let string = rustc_codegen_ssa::common::asm_const_to_str( diff --git a/compiler/rustc_codegen_gcc/src/asm.rs b/compiler/rustc_codegen_gcc/src/asm.rs index fe8025fa3efe5..d70c1db3765af 100644 --- a/compiler/rustc_codegen_gcc/src/asm.rs +++ b/compiler/rustc_codegen_gcc/src/asm.rs @@ -404,12 +404,12 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> { // processed in the previous pass } - InlineAsmOperandRef::Const { value, ty: _ } => match value { + InlineAsmOperandRef::Const { value, ty: _, instance } => match value { Scalar::Int(_) => (), Scalar::Ptr(ptr, _) => { let (prov, _) = ptr.prov_and_relative_offset(); let global_alloc = self.tcx.global_alloc(prov.alloc_id()); - let (val, sym) = self.cx.alloc_to_backend(global_alloc).unwrap(); + let (val, sym) = self.cx.alloc_to_backend(global_alloc, instance).unwrap(); const_syms.push(sym.unwrap()); inputs.push(AsmInOperand { constraint: "X".into(), rust_idx, val }); } @@ -495,7 +495,7 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> { push_to_template(modifier, gcc_index); } - InlineAsmOperandRef::Const { value, ty } => { + InlineAsmOperandRef::Const { value, ty, instance: _ } => { match value { Scalar::Int(int) => { // Const operands get injected directly into the template @@ -908,7 +908,7 @@ impl<'gcc, 'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> { } InlineAsmTemplatePiece::Placeholder { operand_idx, modifier: _, span: _ } => { match operands[operand_idx] { - GlobalAsmOperandRef::Const { value, ty } => { + GlobalAsmOperandRef::Const { value, ty, instance } => { match value { Scalar::Int(int) => { // Const operands get injected directly into the @@ -935,8 +935,9 @@ impl<'gcc, 'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> { self.tcx.symbol_name(instance) } _ => { - let (_, syms) = - self.alloc_to_backend(global_alloc).unwrap(); + let (_, syms) = self + .alloc_to_backend(global_alloc, instance) + .unwrap(); // TODO(antoyo): set the global variable as used. // TODO(@Amanieu): Additional mangling is needed on // some targets to add a leading underscore (Mach-O). diff --git a/compiler/rustc_codegen_gcc/src/common.rs b/compiler/rustc_codegen_gcc/src/common.rs index e8884836eb91e..671f9413d71de 100644 --- a/compiler/rustc_codegen_gcc/src/common.rs +++ b/compiler/rustc_codegen_gcc/src/common.rs @@ -1,4 +1,4 @@ -use gccjit::{LValue, RValue, ToRValue, Type}; +use gccjit::{GlobalKind, LValue, RValue, ToRValue, Type}; use rustc_abi::Primitive::Pointer; use rustc_abi::{self as abi, HasDataLayout}; use rustc_codegen_ssa::traits::{ @@ -229,6 +229,7 @@ impl<'gcc, 'tcx> ConstCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> { fn alloc_to_backend( &self, global_alloc: GlobalAlloc<'tcx>, + name_hint: Option>, ) -> Result<(RValue<'gcc>, Option>), u64> { let alloc = match global_alloc { GlobalAlloc::Function { instance, .. } => { @@ -270,6 +271,18 @@ impl<'gcc, 'tcx> ConstCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> { } }; + if let Some(name) = name_hint { + let sym = self.tcx.symbol_name(name); + + let init = crate::consts::const_alloc_to_gcc_uncached(self, alloc); + let alloc = alloc.inner(); + let typ = self.val_ty(init).get_aligned(alloc.align.bytes()); + + let global = self.declare_global_with_linkage(sym.name, typ, GlobalKind::Exported); + global.global_set_initializer_rvalue(init); + return Ok((global.get_address(None), Some(name))); + } + let value = match alloc.inner().mutability { Mutability::Mut => { self.static_addr_of_mut(const_alloc_to_gcc(self, alloc), alloc.inner().align, None) @@ -304,7 +317,7 @@ impl<'gcc, 'tcx> ConstCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> { Scalar::Ptr(ptr, _size) => { let (prov, offset) = ptr.prov_and_relative_offset(); let alloc_id = prov.alloc_id(); - let base_addr = match self.alloc_to_backend(self.tcx.global_alloc(alloc_id)) { + let base_addr = match self.alloc_to_backend(self.tcx.global_alloc(alloc_id), None) { Ok((base_addr, _)) => base_addr, Err(base_addr) => { let val = base_addr.wrapping_add(offset.bytes()); diff --git a/compiler/rustc_codegen_llvm/src/asm.rs b/compiler/rustc_codegen_llvm/src/asm.rs index 838f18eccc337..8a82e44ac2a5b 100644 --- a/compiler/rustc_codegen_llvm/src/asm.rs +++ b/compiler/rustc_codegen_llvm/src/asm.rs @@ -159,12 +159,12 @@ impl<'ll, 'tcx> AsmBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> { constraints.push(format!("{}", op_idx[&idx])); } } - InlineAsmOperandRef::Const { value, ty: _ } => match value { + InlineAsmOperandRef::Const { value, ty: _, instance: _ } => match value { ConstScalar::Int(_) => (), ConstScalar::Ptr(ptr, _) => { let (prov, _) = ptr.prov_and_relative_offset(); let global_alloc = self.tcx.global_alloc(prov.alloc_id()); - let (value, _) = self.cx.alloc_to_backend(global_alloc).unwrap(); + let (value, _) = self.cx.alloc_to_backend(global_alloc, None).unwrap(); inputs.push(value); op_idx.insert(idx, constraints.len()); constraints.push("s".to_string()); @@ -212,7 +212,7 @@ impl<'ll, 'tcx> AsmBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> { template_str.push_str(&format!("${{{}}}", op_idx[&operand_idx])); } } - InlineAsmOperandRef::Const { value, ty } => { + InlineAsmOperandRef::Const { value, ty, instance: _ } => { match value { ConstScalar::Int(int) => { // Const operands get injected directly into the template @@ -432,7 +432,7 @@ impl<'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'_, 'tcx> { InlineAsmTemplatePiece::Placeholder { operand_idx, modifier: _, span } => { use rustc_codegen_ssa::back::symbol_export::escape_symbol_name; match operands[operand_idx] { - GlobalAsmOperandRef::Const { value, ty } => { + GlobalAsmOperandRef::Const { value, ty, instance } => { match value { ConstScalar::Int(int) => { // Const operands get injected directly into the @@ -449,7 +449,8 @@ impl<'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'_, 'tcx> { ConstScalar::Ptr(ptr, _) => { let (prov, offset) = ptr.prov_and_relative_offset(); let global_alloc = self.tcx.global_alloc(prov.alloc_id()); - let (llval, sym) = self.alloc_to_backend(global_alloc).unwrap(); + let (llval, sym) = + self.alloc_to_backend(global_alloc, instance).unwrap(); assert!(sym.is_some()); self.add_compiler_used_global(llval); diff --git a/compiler/rustc_codegen_llvm/src/common.rs b/compiler/rustc_codegen_llvm/src/common.rs index 4fa37b2656654..466dd6d92e9d4 100644 --- a/compiler/rustc_codegen_llvm/src/common.rs +++ b/compiler/rustc_codegen_llvm/src/common.rs @@ -267,6 +267,7 @@ impl<'ll, 'tcx> ConstCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> { fn alloc_to_backend( &self, global_alloc: GlobalAlloc<'tcx>, + name_hint: Option>, ) -> Result<(Self::Value, Option>), u64> { let alloc = match global_alloc { GlobalAlloc::Function { instance, .. } => { @@ -316,6 +317,22 @@ impl<'ll, 'tcx> ConstCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> { let init = const_alloc_to_llvm(self, alloc.inner(), /*static*/ false); let alloc = alloc.inner(); + + if let Some(name) = name_hint { + let sym = self.tcx.symbol_name(name); + + // If a hint is provided, always use `static_addr_of_mut`, as `static_addr_of_impl` may + // deduplicate and provide one that doesn't have a desired name. + let value = self.static_addr_of_mut(init, alloc.align, None); + if alloc.mutability.is_not() { + llvm::set_global_constant(value, true); + } + + llvm::set_value_name(value, sym.name.as_bytes()); + llvm::set_linkage(value, llvm::Linkage::InternalLinkage); + return Ok((value, Some(name))); + } + let value = match alloc.mutability { Mutability::Mut => self.static_addr_of_mut(init, alloc.align, None), _ => self.static_addr_of_impl(init, alloc.align, None), @@ -348,7 +365,7 @@ impl<'ll, 'tcx> ConstCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> { let (prov, offset) = ptr.prov_and_relative_offset(); let global_alloc = self.tcx.global_alloc(prov.alloc_id()); let base_addr_space = global_alloc.address_space(self); - let base_addr = match self.alloc_to_backend(global_alloc) { + let base_addr = match self.alloc_to_backend(global_alloc, None) { Ok((base_addr, _)) => base_addr, Err(base_addr) => { let val = base_addr.wrapping_add(offset.bytes()); diff --git a/compiler/rustc_codegen_ssa/src/base.rs b/compiler/rustc_codegen_ssa/src/base.rs index 2a67b0e64b1ff..1f8c0de9f2d77 100644 --- a/compiler/rustc_codegen_ssa/src/base.rs +++ b/compiler/rustc_codegen_ssa/src/base.rs @@ -406,7 +406,8 @@ where .iter() .map(|(op, op_sp)| match *op { rustc_hir::InlineAsmOperand::Const { ref anon_const } => { - match cx.tcx().const_eval_poly(anon_const.def_id.to_def_id()) { + let def_id = anon_const.def_id.to_def_id(); + match cx.tcx().const_eval_poly(def_id) { Ok(const_value) => { let ty = cx.tcx().typeck_body(anon_const.body).node_type(anon_const.hir_id); @@ -420,6 +421,10 @@ where GlobalAsmOperandRef::Const { value: common::asm_const_ptr_clean(cx.tcx(), scalar), ty, + instance: Some(Instance::new_raw( + def_id, + ty::GenericArgs::identity_for_item(cx.tcx(), def_id), + )), } } Err(ErrorHandled::Reported { .. }) => { @@ -429,6 +434,7 @@ where GlobalAsmOperandRef::Const { value: Scalar::from_u32(0), ty: Ty::new_uint(cx.tcx(), UintTy::U32), + instance: None, } } Err(ErrorHandled::TooGeneric(_)) => { @@ -455,6 +461,7 @@ where cx, ), ty: Ty::new_fn_ptr(cx.tcx(), ty.fn_sig(cx.tcx())), + instance: None, } } rustc_hir::InlineAsmOperand::SymStatic { path: _, def_id } => { diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs index 6b4deaa5205f0..a387a1a9f3291 100644 --- a/compiler/rustc_codegen_ssa/src/mir/block.rs +++ b/compiler/rustc_codegen_ssa/src/mir/block.rs @@ -7,7 +7,9 @@ use rustc_data_structures::packed::Pu128; use rustc_hir::lang_items::LangItem; use rustc_lint_defs::builtin::TAIL_CALL_TRACK_CALLER; use rustc_middle::mir::interpret::{CTFE_ALLOC_SALT, Scalar}; -use rustc_middle::mir::{self, AssertKind, InlineAsmMacro, SwitchTargets, UnwindTerminateReason}; +use rustc_middle::mir::{ + self, AssertKind, Const, InlineAsmMacro, SwitchTargets, UnwindTerminateReason, +}; use rustc_middle::ty::layout::{HasTyCtxt, LayoutOf, ValidityRequirement}; use rustc_middle::ty::print::{with_no_trimmed_paths, with_no_visible_paths}; use rustc_middle::ty::{self, Instance, Ty, TypeVisitableExt}; @@ -1395,6 +1397,11 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { InlineAsmOperandRef::InOut { reg, late, in_value, out_place } } mir::InlineAsmOperand::Const { ref value } => { + let Const::Unevaluated(c, _) = &value.const_ else { + bug!("need unevaluated const to derive symbol name") + }; + let const_instance = Instance::new_raw(c.def, c.args); + let const_value = self.eval_mir_constant(value); let mir::ConstValue::Scalar(scalar) = const_value else { span_bug!( @@ -1406,6 +1413,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { InlineAsmOperandRef::Const { value: common::asm_const_ptr_clean(bx.tcx(), scalar), ty: value.ty(), + instance: Some(const_instance), } } mir::InlineAsmOperand::SymFn { ref value } => { @@ -1425,6 +1433,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { bx, ), ty: Ty::new_fn_ptr(bx.tcx(), const_.ty().fn_sig(bx.tcx())), + instance: None, } } else { span_bug!(span, "invalid type for asm sym (fn)"); diff --git a/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs b/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs index 78849bcf9f3d4..37789443d9347 100644 --- a/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs +++ b/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs @@ -3,7 +3,7 @@ use rustc_hir::attrs::{InstructionSetAttr, Linkage}; use rustc_hir::def_id::LOCAL_CRATE; use rustc_middle::mir::interpret::{CTFE_ALLOC_SALT, Scalar}; use rustc_middle::mir::mono::{MonoItemData, Visibility}; -use rustc_middle::mir::{self, InlineAsmOperand, START_BLOCK}; +use rustc_middle::mir::{self, Const, InlineAsmOperand, START_BLOCK}; use rustc_middle::ty::layout::{FnAbiOf, LayoutOf, TyAndLayout}; use rustc_middle::ty::{Instance, Ty, TyCtxt, TypeVisitableExt}; use rustc_middle::{bug, span_bug, ty}; @@ -64,6 +64,11 @@ fn inline_to_global_operand<'a, 'tcx, Cx: LayoutOf<'tcx, LayoutOfResult = TyAndL ) -> GlobalAsmOperandRef<'tcx> { match op { InlineAsmOperand::Const { value } => { + let Const::Unevaluated(c, _) = &value.const_ else { + bug!("need unevaluated const to derive symbol name") + }; + let const_instance = Instance::new_raw(c.def, c.args); + let const_value = instance .instantiate_mir_and_normalize_erasing_regions( cx.tcx(), @@ -89,6 +94,7 @@ fn inline_to_global_operand<'a, 'tcx, Cx: LayoutOf<'tcx, LayoutOfResult = TyAndL GlobalAsmOperandRef::Const { value: common::asm_const_ptr_clean(cx.tcx(), scalar), ty: mono_type, + instance: Some(const_instance), } } InlineAsmOperand::SymFn { value } => { @@ -111,6 +117,7 @@ fn inline_to_global_operand<'a, 'tcx, Cx: LayoutOf<'tcx, LayoutOfResult = TyAndL cx, ), ty: Ty::new_fn_ptr(cx.tcx(), mono_type.fn_sig(cx.tcx())), + instance: None, } } InlineAsmOperand::SymStatic { def_id } => { diff --git a/compiler/rustc_codegen_ssa/src/traits/asm.rs b/compiler/rustc_codegen_ssa/src/traits/asm.rs index 48f42e3c970f2..b81066f6dd8b6 100644 --- a/compiler/rustc_codegen_ssa/src/traits/asm.rs +++ b/compiler/rustc_codegen_ssa/src/traits/asm.rs @@ -30,6 +30,10 @@ pub enum InlineAsmOperandRef<'tcx, B: BackendTypes + ?Sized> { value: Scalar, /// Type of the constant. This is needed to extract width and signedness. ty: Ty<'tcx>, + /// Instance to the const that produces this operand. + /// + /// This is used to be able to generate unique name for promoted statics. + instance: Option>, }, SymStatic { def_id: DefId, @@ -45,6 +49,10 @@ pub enum GlobalAsmOperandRef<'tcx> { value: Scalar, /// Type of the constant. This is needed to extract width and signedness. ty: Ty<'tcx>, + /// Instance to the const that produces this operand. + /// + /// This is used to be able to generate unique name for promoted statics. + instance: Option>, }, SymStatic { def_id: DefId, diff --git a/compiler/rustc_codegen_ssa/src/traits/consts.rs b/compiler/rustc_codegen_ssa/src/traits/consts.rs index 4064667e087bd..311156aef4709 100644 --- a/compiler/rustc_codegen_ssa/src/traits/consts.rs +++ b/compiler/rustc_codegen_ssa/src/traits/consts.rs @@ -43,9 +43,13 @@ pub trait ConstCodegenMethods<'tcx>: BackendTypes { /// /// If the `GlobalAlloc` should not be mapped to a global, but absolute address should be used, /// an integer is returned as `Err` instead. + /// + /// If the caller needs to guarantee a symbol name, it can provide a name hint. The name will be + /// used to generate a new symbol if there isn't one already (i.e. the case of fn/static). fn alloc_to_backend( &self, global_alloc: GlobalAlloc<'tcx>, + name_hint: Option>, ) -> Result<(Self::Value, Option>), u64>; fn scalar_to_backend(&self, cv: Scalar, layout: abi::Scalar, llty: Self::Type) -> Self::Value; From f6a193de8f58292c28b0a702332cf56a4fff8cee Mon Sep 17 00:00:00 2001 From: Gary Guo Date: Thu, 24 Oct 2024 06:22:18 +0100 Subject: [PATCH 10/10] Implement asm_const_ptr feature The backend now fully supports codegen of const pointers, remove the block inside typeck behind a new feature gate. Tests are also added. --- compiler/rustc_feature/src/unstable.rs | 2 + compiler/rustc_hir_typeck/src/errors.rs | 7 +++ compiler/rustc_hir_typeck/src/inline_asm.rs | 35 +++++++++++++- compiler/rustc_span/src/symbol.rs | 1 + tests/assembly-llvm/asm/global_asm.rs | 5 ++ tests/assembly-llvm/asm/x86-types.rs | 11 ++++- tests/ui/asm/const-refs-to-static.rs | 9 ++-- tests/ui/asm/const-refs-to-static.stderr | 22 --------- tests/ui/asm/invalid-const-operand.rs | 12 +++-- tests/ui/asm/invalid-const-operand.stderr | 46 ++++++------------- .../feature-gate-asm_const_ptr.rs | 22 +++++++++ .../feature-gate-asm_const_ptr.stderr | 33 +++++++++++++ 12 files changed, 140 insertions(+), 65 deletions(-) delete mode 100644 tests/ui/asm/const-refs-to-static.stderr create mode 100644 tests/ui/feature-gates/feature-gate-asm_const_ptr.rs create mode 100644 tests/ui/feature-gates/feature-gate-asm_const_ptr.stderr diff --git a/compiler/rustc_feature/src/unstable.rs b/compiler/rustc_feature/src/unstable.rs index 3b15183be0b2d..d9911c3a57ca9 100644 --- a/compiler/rustc_feature/src/unstable.rs +++ b/compiler/rustc_feature/src/unstable.rs @@ -361,6 +361,8 @@ declare_features! ( (unstable, arbitrary_self_types_pointers, "1.83.0", Some(44874)), /// Target features on arm. (unstable, arm_target_feature, "1.27.0", Some(150246)), + /// Allows using `const` operands with pointer in inline assembly. + (unstable, asm_const_ptr, "CURRENT_RUSTC_VERSION", Some(128464)), /// Enables experimental inline assembly support for additional architectures. (unstable, asm_experimental_arch, "1.58.0", Some(93335)), /// Enables experimental register support in inline assembly. diff --git a/compiler/rustc_hir_typeck/src/errors.rs b/compiler/rustc_hir_typeck/src/errors.rs index 52f6b126a7d0e..74282f6f8406d 100644 --- a/compiler/rustc_hir_typeck/src/errors.rs +++ b/compiler/rustc_hir_typeck/src/errors.rs @@ -19,6 +19,13 @@ use rustc_span::{Ident, Span, Symbol}; use crate::FnCtxt; +#[derive(Diagnostic)] +#[diag("using pointers in asm `const` operand is experimental")] +pub(crate) struct AsmConstPtrUnstable { + #[primary_span] + pub span: Span, +} + #[derive(Diagnostic)] #[diag("base expression required after `..`", code = E0797)] pub(crate) struct BaseExpressionDoubleDot { diff --git a/compiler/rustc_hir_typeck/src/inline_asm.rs b/compiler/rustc_hir_typeck/src/inline_asm.rs index 7c1655f8201d7..90e3d470d061a 100644 --- a/compiler/rustc_hir_typeck/src/inline_asm.rs +++ b/compiler/rustc_hir_typeck/src/inline_asm.rs @@ -14,7 +14,7 @@ use rustc_target::asm::{ use rustc_trait_selection::infer::InferCtxtExt; use crate::FnCtxt; -use crate::errors::RegisterTypeUnstable; +use crate::errors::{AsmConstPtrUnstable, RegisterTypeUnstable}; pub(crate) struct InlineAsmCtxt<'a, 'tcx> { target_features: &'tcx FxIndexSet, @@ -510,7 +510,36 @@ impl<'a, 'tcx> InlineAsmCtxt<'a, 'tcx> { match ty.kind() { ty::Error(_) => {} _ if ty.is_integral() => {} + ty::FnPtr(..) => { + if !self.tcx().features().asm_const_ptr() { + self.tcx() + .sess + .create_feature_err( + AsmConstPtrUnstable { span: op_sp }, + sym::asm_const_ptr, + ) + .emit(); + } + } + ty::RawPtr(pointee, _) | ty::Ref(_, pointee, _) + if self.is_thin_ptr_ty(op_sp, *pointee) => + { + if !self.tcx().features().asm_const_ptr() { + self.tcx() + .sess + .create_feature_err( + AsmConstPtrUnstable { span: op_sp }, + sym::asm_const_ptr, + ) + .emit(); + } + } _ => { + let const_possible_ty = if !self.tcx().features().asm_const_ptr() { + "integer" + } else { + "integer or thin pointer" + }; self.fcx .dcx() .struct_span_err(op_sp, "invalid type for `const` operand") @@ -518,7 +547,9 @@ impl<'a, 'tcx> InlineAsmCtxt<'a, 'tcx> { self.tcx().def_span(anon_const.def_id), format!("is {} `{}`", ty.kind().article(), ty), ) - .with_help("`const` operands must be of an integer type") + .with_help(format!( + "`const` operands must be of an {const_possible_ty} type" + )) .emit(); } } diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs index 5f3b5d787c0f2..1f070f2e00194 100644 --- a/compiler/rustc_span/src/symbol.rs +++ b/compiler/rustc_span/src/symbol.rs @@ -418,6 +418,7 @@ symbols! { asm, asm_cfg, asm_const, + asm_const_ptr, asm_experimental_arch, asm_experimental_reg, asm_goto, diff --git a/tests/assembly-llvm/asm/global_asm.rs b/tests/assembly-llvm/asm/global_asm.rs index 8a4bf98c7450b..f7bf13281dbde 100644 --- a/tests/assembly-llvm/asm/global_asm.rs +++ b/tests/assembly-llvm/asm/global_asm.rs @@ -5,6 +5,7 @@ //@ compile-flags: -C symbol-mangling-version=v0 #![crate_type = "rlib"] +#![feature(asm_const_ptr)] use std::arch::global_asm; @@ -26,6 +27,10 @@ global_asm!("call {}", sym my_func); global_asm!("lea rax, [rip + {}]", sym MY_STATIC); // CHECK: call _RNvC[[CRATE_IDENT:[a-zA-Z0-9]{12}]]_10global_asm6foobar global_asm!("call {}", sym foobar); +// CHECK: lea rax, [rip + _RNKNaC[[CRATE_IDENT]]_10global_asms4_00B3_] +global_asm!("lea rax, [rip + {}]", const &1); +// CHECK: lea rax, [rip + _RNKNaC[[CRATE_IDENT]]_10global_asms5_00B3_+4] +global_asm!("lea rax, [rip + {}]", const &[1; 2][1]); // CHECK: _RNvC[[CRATE_IDENT]]_10global_asm6foobar: fn foobar() { loop {} diff --git a/tests/assembly-llvm/asm/x86-types.rs b/tests/assembly-llvm/asm/x86-types.rs index 9fe7ea00bd939..f703693f21c50 100644 --- a/tests/assembly-llvm/asm/x86-types.rs +++ b/tests/assembly-llvm/asm/x86-types.rs @@ -9,7 +9,7 @@ //@ compile-flags: -C target-feature=+avx512bw //@ compile-flags: -Zmerge-functions=disabled -#![feature(no_core, repr_simd, f16, f128)] +#![feature(no_core, repr_simd, f16, f128, asm_const_ptr)] #![crate_type = "rlib"] #![no_core] #![allow(asm_sub_register, non_camel_case_types)] @@ -101,6 +101,15 @@ pub unsafe fn sym_static() { asm!("mov al, byte ptr [{}]", sym extern_static); } +// CHECK-LABEL: const_ptr: +// CHECK: #APP +// CHECK: mov al, byte ptr [{{.*}}anon{{.*}}] +// CHECK: #NO_APP +#[no_mangle] +pub unsafe fn const_ptr() { + asm!("mov al, byte ptr [{}]", const &1u8); +} + macro_rules! check { ($func:ident $ty:ident $class:ident $mov:literal) => { #[no_mangle] diff --git a/tests/ui/asm/const-refs-to-static.rs b/tests/ui/asm/const-refs-to-static.rs index ce2c5b3246ec8..8058d70550aba 100644 --- a/tests/ui/asm/const-refs-to-static.rs +++ b/tests/ui/asm/const-refs-to-static.rs @@ -1,19 +1,20 @@ //@ needs-asm-support //@ ignore-nvptx64 //@ ignore-spirv +//@ build-pass + +#![feature(asm_const_ptr)] use std::arch::{asm, global_asm}; use std::ptr::addr_of; static FOO: u8 = 42; -global_asm!("{}", const addr_of!(FOO)); -//~^ ERROR invalid type for `const` operand +global_asm!("/* {} */", const addr_of!(FOO)); #[no_mangle] fn inline() { - unsafe { asm!("{}", const addr_of!(FOO)) }; - //~^ ERROR invalid type for `const` operand + unsafe { asm!("/* {} */", const addr_of!(FOO)) }; } fn main() {} diff --git a/tests/ui/asm/const-refs-to-static.stderr b/tests/ui/asm/const-refs-to-static.stderr deleted file mode 100644 index 10e1ca5bd6068..0000000000000 --- a/tests/ui/asm/const-refs-to-static.stderr +++ /dev/null @@ -1,22 +0,0 @@ -error: invalid type for `const` operand - --> $DIR/const-refs-to-static.rs:10:19 - | -LL | global_asm!("{}", const addr_of!(FOO)); - | ^^^^^^------------- - | | - | is a `*const u8` - | - = help: `const` operands must be of an integer type - -error: invalid type for `const` operand - --> $DIR/const-refs-to-static.rs:15:25 - | -LL | unsafe { asm!("{}", const addr_of!(FOO)) }; - | ^^^^^^------------- - | | - | is a `*const u8` - | - = help: `const` operands must be of an integer type - -error: aborting due to 2 previous errors - diff --git a/tests/ui/asm/invalid-const-operand.rs b/tests/ui/asm/invalid-const-operand.rs index 5c7b1a6b9654f..ab25b8e3db6a7 100644 --- a/tests/ui/asm/invalid-const-operand.rs +++ b/tests/ui/asm/invalid-const-operand.rs @@ -3,6 +3,8 @@ //@ ignore-spirv //@ reference: asm.operand-type.supported-operands.const +#![feature(asm_const_ptr)] + use std::arch::{asm, global_asm}; // Const operands must be integers and must be constants. @@ -13,11 +15,10 @@ global_asm!("{}", const 0i128); global_asm!("{}", const 0f32); //~^ ERROR invalid type for `const` operand global_asm!("{}", const 0 as *mut u8); -//~^ ERROR invalid type for `const` operand fn test1() { unsafe { - // Const operands must be integers and must be constants. + // Const operands must be integers or thin pointers asm!("{}", const 0); asm!("{}", const 0i32); @@ -25,9 +26,14 @@ fn test1() { asm!("{}", const 0f32); //~^ ERROR invalid type for `const` operand asm!("{}", const 0 as *mut u8); - //~^ ERROR invalid type for `const` operand asm!("{}", const &0); + asm!("{}", const b"Foo".as_slice()); //~^ ERROR invalid type for `const` operand + + asm!("{}", const test1 as fn()); + asm!("{}", const test1); + asm!("{}", const (|| {}) as fn()); + asm!("{}", const || {}); } } diff --git a/tests/ui/asm/invalid-const-operand.stderr b/tests/ui/asm/invalid-const-operand.stderr index 3a3129ff3f6be..dc6378ff571e4 100644 --- a/tests/ui/asm/invalid-const-operand.stderr +++ b/tests/ui/asm/invalid-const-operand.stderr @@ -1,5 +1,5 @@ error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/invalid-const-operand.rs:45:26 + --> $DIR/invalid-const-operand.rs:51:26 | LL | asm!("{}", const x); | ^ non-constant value @@ -11,7 +11,7 @@ LL + const x: /* Type */ = 0; | error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/invalid-const-operand.rs:48:36 + --> $DIR/invalid-const-operand.rs:54:36 | LL | asm!("{}", const const_foo(x)); | ^ non-constant value @@ -23,7 +23,7 @@ LL + const x: /* Type */ = 0; | error[E0435]: attempt to use a non-constant value in a constant - --> $DIR/invalid-const-operand.rs:51:36 + --> $DIR/invalid-const-operand.rs:57:36 | LL | asm!("{}", const const_bar(x)); | ^ non-constant value @@ -35,55 +35,35 @@ LL + const x: /* Type */ = 0; | error: invalid type for `const` operand - --> $DIR/invalid-const-operand.rs:13:19 + --> $DIR/invalid-const-operand.rs:15:19 | LL | global_asm!("{}", const 0f32); | ^^^^^^---- | | | is an `f32` | - = help: `const` operands must be of an integer type - -error: invalid type for `const` operand - --> $DIR/invalid-const-operand.rs:15:19 - | -LL | global_asm!("{}", const 0 as *mut u8); - | ^^^^^^------------ - | | - | is a `*mut u8` - | - = help: `const` operands must be of an integer type + = help: `const` operands must be of an integer or thin pointer type error: invalid type for `const` operand - --> $DIR/invalid-const-operand.rs:25:20 + --> $DIR/invalid-const-operand.rs:26:20 | LL | asm!("{}", const 0f32); | ^^^^^^---- | | | is an `f32` | - = help: `const` operands must be of an integer type - -error: invalid type for `const` operand - --> $DIR/invalid-const-operand.rs:27:20 - | -LL | asm!("{}", const 0 as *mut u8); - | ^^^^^^------------ - | | - | is a `*mut u8` - | - = help: `const` operands must be of an integer type + = help: `const` operands must be of an integer or thin pointer type error: invalid type for `const` operand - --> $DIR/invalid-const-operand.rs:29:20 + --> $DIR/invalid-const-operand.rs:30:20 | -LL | asm!("{}", const &0); - | ^^^^^^-- +LL | asm!("{}", const b"Foo".as_slice()); + | ^^^^^^----------------- | | - | is a `&i32` + | is a `&[u8]` | - = help: `const` operands must be of an integer type + = help: `const` operands must be of an integer or thin pointer type -error: aborting due to 8 previous errors +error: aborting due to 6 previous errors For more information about this error, try `rustc --explain E0435`. diff --git a/tests/ui/feature-gates/feature-gate-asm_const_ptr.rs b/tests/ui/feature-gates/feature-gate-asm_const_ptr.rs new file mode 100644 index 0000000000000..cdcb5995a0f08 --- /dev/null +++ b/tests/ui/feature-gates/feature-gate-asm_const_ptr.rs @@ -0,0 +1,22 @@ +//@ only-x86_64 + +use std::arch::{asm, global_asm, naked_asm}; + +global_asm!("/* {} */", const &0); +//~^ ERROR using pointers in asm `const` operand is experimental + +#[unsafe(naked)] +extern "C" fn naked() { + unsafe { + naked_asm!("ret /* {} */", const &0); + //~^ ERROR using pointers in asm `const` operand is experimental + } +} + +fn main() { + naked(); + unsafe { + asm!("/* {} */", const &0); + //~^ ERROR using pointers in asm `const` operand is experimental + } +} diff --git a/tests/ui/feature-gates/feature-gate-asm_const_ptr.stderr b/tests/ui/feature-gates/feature-gate-asm_const_ptr.stderr new file mode 100644 index 0000000000000..a804d8fe44be5 --- /dev/null +++ b/tests/ui/feature-gates/feature-gate-asm_const_ptr.stderr @@ -0,0 +1,33 @@ +error[E0658]: using pointers in asm `const` operand is experimental + --> $DIR/feature-gate-asm_const_ptr.rs:5:25 + | +LL | global_asm!("/* {} */", const &0); + | ^^^^^^^^ + | + = note: see issue #128464 for more information + = help: add `#![feature(asm_const_ptr)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +error[E0658]: using pointers in asm `const` operand is experimental + --> $DIR/feature-gate-asm_const_ptr.rs:11:36 + | +LL | naked_asm!("ret /* {} */", const &0); + | ^^^^^^^^ + | + = note: see issue #128464 for more information + = help: add `#![feature(asm_const_ptr)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +error[E0658]: using pointers in asm `const` operand is experimental + --> $DIR/feature-gate-asm_const_ptr.rs:19:26 + | +LL | asm!("/* {} */", const &0); + | ^^^^^^^^ + | + = note: see issue #128464 for more information + = help: add `#![feature(asm_const_ptr)]` to the crate attributes to enable + = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date + +error: aborting due to 3 previous errors + +For more information about this error, try `rustc --explain E0658`.