Skip to content

Commit 92c0cc4

Browse files
committed
Implement asm_const_ptr for global_asm and naked_asm
1 parent a393bbf commit 92c0cc4

File tree

13 files changed

+337
-95
lines changed

13 files changed

+337
-95
lines changed

Diff for: compiler/rustc_codegen_gcc/src/asm.rs

+95-29
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,15 @@
11
use std::borrow::Cow;
22

3-
use gccjit::{LValue, RValue, ToRValue, Type};
3+
use gccjit::{GlobalKind, LValue, RValue, ToRValue, Type};
44
use rustc_ast::ast::{InlineAsmOptions, InlineAsmTemplatePiece};
55
use rustc_codegen_ssa::mir::operand::OperandValue;
66
use rustc_codegen_ssa::mir::place::PlaceRef;
77
use rustc_codegen_ssa::traits::{
88
AsmBuilderMethods, AsmCodegenMethods, BaseTypeCodegenMethods, BuilderMethods,
99
GlobalAsmOperandRef, InlineAsmOperandRef,
1010
};
11-
use rustc_middle::bug;
1211
use rustc_middle::ty::Instance;
12+
use rustc_middle::{bug, mir};
1313
use rustc_span::Span;
1414
use rustc_target::asm::*;
1515

@@ -827,6 +827,98 @@ impl<'gcc, 'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
827827
let att_dialect = matches!(asm_arch, InlineAsmArch::X86 | InlineAsmArch::X86_64)
828828
&& options.contains(InlineAsmOptions::ATT_SYNTAX);
829829

830+
// Convert all operands to string interpolations
831+
let converted_operands = operands
832+
.iter()
833+
.enumerate()
834+
.map(|(operand_idx, operand)| {
835+
match *operand {
836+
GlobalAsmOperandRef::Interpolate { ref string } => {
837+
// Const operands get injected directly into the
838+
// template. Note that we don't need to escape $
839+
// here unlike normal inline assembly.
840+
string.to_owned()
841+
}
842+
GlobalAsmOperandRef::ConstPointer { value, instance } => {
843+
let (prov, offset) = value.into_parts();
844+
let global_alloc = self.tcx.global_alloc(prov.alloc_id());
845+
let symbol = 'sym: {
846+
let alloc = match global_alloc {
847+
mir::interpret::GlobalAlloc::Function { instance } => {
848+
let function = get_fn(self, instance);
849+
self.add_used_function(function);
850+
// TODO(@Amanieu): Additional mangling is needed on
851+
// some targets to add a leading underscore (Mach-O)
852+
// or byte count suffixes (x86 Windows).
853+
break 'sym self.tcx.symbol_name(instance).name.to_owned();
854+
}
855+
mir::interpret::GlobalAlloc::VTable(ty, dyn_ty) => self
856+
.tcx
857+
.global_alloc(self.tcx.vtable_allocation((
858+
ty,
859+
dyn_ty.principal().map(|principal| {
860+
self.tcx
861+
.instantiate_bound_regions_with_erased(principal)
862+
}),
863+
)))
864+
.unwrap_memory(),
865+
mir::interpret::GlobalAlloc::Static(def_id) => {
866+
// TODO(antoyo): set the global variable as used.
867+
// TODO(@Amanieu): Additional mangling is needed on
868+
// some targets to add a leading underscore (Mach-O).
869+
let instance = Instance::mono(self.tcx, def_id);
870+
break 'sym self.tcx.symbol_name(instance).name.to_owned();
871+
}
872+
mir::interpret::GlobalAlloc::Memory(alloc) => alloc,
873+
};
874+
875+
// For ZSTs directly codegen an aligned pointer.
876+
if alloc.inner().len() == 0 {
877+
assert_eq!(offset.bytes(), 0);
878+
return format!("{}", alloc.inner().align.bytes());
879+
}
880+
881+
let asm_name = self.tcx.symbol_name(instance);
882+
let sym_name = format!("{asm_name}.{operand_idx}");
883+
884+
let init = crate::consts::const_alloc_to_gcc(self, alloc);
885+
let alloc = alloc.inner();
886+
let typ = self.val_ty(init).get_aligned(alloc.align.bytes());
887+
888+
let global = self.declare_global_with_linkage(
889+
&sym_name,
890+
typ,
891+
GlobalKind::Exported,
892+
);
893+
global.global_set_initializer_rvalue(init);
894+
// TODO(nbdd0121): set unnamed address.
895+
// TODO(nbdd0121): set the global variable as used.
896+
897+
sym_name
898+
};
899+
900+
let offset = offset.bytes();
901+
if offset != 0 { format!("{symbol}+{offset}") } else { symbol }
902+
}
903+
GlobalAsmOperandRef::SymFn { instance } => {
904+
let function = get_fn(self, instance);
905+
self.add_used_function(function);
906+
// TODO(@Amanieu): Additional mangling is needed on
907+
// some targets to add a leading underscore (Mach-O)
908+
// or byte count suffixes (x86 Windows).
909+
self.tcx.symbol_name(instance).name.to_owned()
910+
}
911+
GlobalAsmOperandRef::SymStatic { def_id } => {
912+
// TODO(antoyo): set the global variable as used.
913+
// TODO(@Amanieu): Additional mangling is needed on
914+
// some targets to add a leading underscore (Mach-O).
915+
let instance = Instance::mono(self.tcx, def_id);
916+
self.tcx.symbol_name(instance).name.to_owned()
917+
}
918+
}
919+
})
920+
.collect::<Vec<_>>();
921+
830922
// Build the template string
831923
let mut template_str = ".pushsection .text\n".to_owned();
832924
if att_dialect {
@@ -850,33 +942,7 @@ impl<'gcc, 'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
850942
}
851943
}
852944
InlineAsmTemplatePiece::Placeholder { operand_idx, modifier: _, span: _ } => {
853-
match operands[operand_idx] {
854-
GlobalAsmOperandRef::Interpolate { ref string } => {
855-
// Const operands get injected directly into the
856-
// template. Note that we don't need to escape %
857-
// here unlike normal inline assembly.
858-
template_str.push_str(string);
859-
}
860-
861-
GlobalAsmOperandRef::SymFn { instance } => {
862-
let function = get_fn(self, instance);
863-
self.add_used_function(function);
864-
// TODO(@Amanieu): Additional mangling is needed on
865-
// some targets to add a leading underscore (Mach-O)
866-
// or byte count suffixes (x86 Windows).
867-
let name = self.tcx.symbol_name(instance).name;
868-
template_str.push_str(name);
869-
}
870-
871-
GlobalAsmOperandRef::SymStatic { def_id } => {
872-
// TODO(antoyo): set the global variable as used.
873-
// TODO(@Amanieu): Additional mangling is needed on
874-
// some targets to add a leading underscore (Mach-O).
875-
let instance = Instance::mono(self.tcx, def_id);
876-
let name = self.tcx.symbol_name(instance).name;
877-
template_str.push_str(name);
878-
}
879-
}
945+
template_str.push_str(&converted_operands[operand_idx]);
880946
}
881947
}
882948
}

Diff for: compiler/rustc_codegen_llvm/src/asm.rs

+107-32
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ use rustc_codegen_ssa::traits::*;
77
use rustc_data_structures::fx::FxHashMap;
88
use rustc_middle::ty::Instance;
99
use rustc_middle::ty::layout::TyAndLayout;
10-
use rustc_middle::{bug, span_bug};
10+
use rustc_middle::{bug, mir, span_bug};
1111
use rustc_span::{Pos, Span, Symbol, sym};
1212
use rustc_target::asm::*;
1313
use smallvec::SmallVec;
@@ -396,6 +396,111 @@ impl<'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'_, 'tcx> {
396396
let intel_syntax = matches!(asm_arch, InlineAsmArch::X86 | InlineAsmArch::X86_64)
397397
&& !options.contains(InlineAsmOptions::ATT_SYNTAX);
398398

399+
// Convert all operands to string interpolations
400+
let converted_operands = operands
401+
.iter()
402+
.enumerate()
403+
.map(|(operand_idx, operand)| {
404+
match *operand {
405+
GlobalAsmOperandRef::Interpolate { ref string } => {
406+
// Const operands get injected directly into the
407+
// template. Note that we don't need to escape $
408+
// here unlike normal inline assembly.
409+
string.to_owned()
410+
}
411+
GlobalAsmOperandRef::ConstPointer { value, instance } => {
412+
let (prov, offset) = value.into_parts();
413+
let global_alloc = self.tcx.global_alloc(prov.alloc_id());
414+
let llval = 'llval: {
415+
let alloc = match global_alloc {
416+
mir::interpret::GlobalAlloc::Function { instance } => {
417+
break 'llval self.get_fn(instance);
418+
}
419+
mir::interpret::GlobalAlloc::VTable(ty, dyn_ty) => self
420+
.tcx
421+
.global_alloc(self.tcx.vtable_allocation((
422+
ty,
423+
dyn_ty.principal().map(|principal| {
424+
self.tcx
425+
.instantiate_bound_regions_with_erased(principal)
426+
}),
427+
)))
428+
.unwrap_memory(),
429+
mir::interpret::GlobalAlloc::Static(def_id) => {
430+
break 'llval self
431+
.renamed_statics
432+
.borrow()
433+
.get(&def_id)
434+
.copied()
435+
.unwrap_or_else(|| self.get_static(def_id));
436+
}
437+
mir::interpret::GlobalAlloc::Memory(alloc) => alloc,
438+
};
439+
440+
// For ZSTs directly codegen an aligned pointer.
441+
if alloc.inner().len() == 0 {
442+
assert_eq!(offset.bytes(), 0);
443+
return format!("{}", alloc.inner().align.bytes());
444+
}
445+
446+
let asm_name = self.tcx.symbol_name(instance);
447+
let sym_name = format!("{asm_name}.{operand_idx}");
448+
449+
let init = crate::consts::const_alloc_to_llvm(
450+
self, alloc, /*static*/ false,
451+
);
452+
let alloc = alloc.inner();
453+
let g = self.static_addr_of_mut(init, alloc.align, None);
454+
if alloc.mutability.is_not() {
455+
// NB: we can't use `static_addr_of_impl` here to avoid sharing
456+
// the global, as we need to set name and linkage.
457+
unsafe { llvm::LLVMSetGlobalConstant(g, llvm::True) };
458+
}
459+
460+
llvm::set_value_name(g, sym_name.as_bytes());
461+
462+
// `static_addr_of_mut` gives us a private global which can't be
463+
// used by global asm. Update it to a hidden internal global instead.
464+
llvm::set_linkage(g, llvm::Linkage::InternalLinkage);
465+
llvm::set_visibility(g, llvm::Visibility::Hidden);
466+
g
467+
};
468+
self.add_compiler_used_global(llval);
469+
let symbol = llvm::build_string(|s| unsafe {
470+
llvm::LLVMRustGetMangledName(llval, s);
471+
})
472+
.expect("symbol is not valid UTF-8");
473+
474+
let offset = offset.bytes();
475+
if offset != 0 { format!("{symbol}+{offset}") } else { symbol }
476+
}
477+
GlobalAsmOperandRef::SymFn { instance } => {
478+
let llval = self.get_fn(instance);
479+
self.add_compiler_used_global(llval);
480+
let symbol = llvm::build_string(|s| unsafe {
481+
llvm::LLVMRustGetMangledName(llval, s);
482+
})
483+
.expect("symbol is not valid UTF-8");
484+
symbol
485+
}
486+
GlobalAsmOperandRef::SymStatic { def_id } => {
487+
let llval = self
488+
.renamed_statics
489+
.borrow()
490+
.get(&def_id)
491+
.copied()
492+
.unwrap_or_else(|| self.get_static(def_id));
493+
self.add_compiler_used_global(llval);
494+
let symbol = llvm::build_string(|s| unsafe {
495+
llvm::LLVMRustGetMangledName(llval, s);
496+
})
497+
.expect("symbol is not valid UTF-8");
498+
symbol
499+
}
500+
}
501+
})
502+
.collect::<Vec<_>>();
503+
399504
// Build the template string
400505
let mut template_str = String::new();
401506
if intel_syntax {
@@ -405,37 +510,7 @@ impl<'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'_, 'tcx> {
405510
match *piece {
406511
InlineAsmTemplatePiece::String(ref s) => template_str.push_str(s),
407512
InlineAsmTemplatePiece::Placeholder { operand_idx, modifier: _, span: _ } => {
408-
match operands[operand_idx] {
409-
GlobalAsmOperandRef::Interpolate { ref string } => {
410-
// Const operands get injected directly into the
411-
// template. Note that we don't need to escape $
412-
// here unlike normal inline assembly.
413-
template_str.push_str(string);
414-
}
415-
GlobalAsmOperandRef::SymFn { instance } => {
416-
let llval = self.get_fn(instance);
417-
self.add_compiler_used_global(llval);
418-
let symbol = llvm::build_string(|s| unsafe {
419-
llvm::LLVMRustGetMangledName(llval, s);
420-
})
421-
.expect("symbol is not valid UTF-8");
422-
template_str.push_str(&symbol);
423-
}
424-
GlobalAsmOperandRef::SymStatic { def_id } => {
425-
let llval = self
426-
.renamed_statics
427-
.borrow()
428-
.get(&def_id)
429-
.copied()
430-
.unwrap_or_else(|| self.get_static(def_id));
431-
self.add_compiler_used_global(llval);
432-
let symbol = llvm::build_string(|s| unsafe {
433-
llvm::LLVMRustGetMangledName(llval, s);
434-
})
435-
.expect("symbol is not valid UTF-8");
436-
template_str.push_str(&symbol);
437-
}
438-
}
513+
template_str.push_str(&converted_operands[operand_idx])
439514
}
440515
}
441516
}

Diff for: compiler/rustc_codegen_ssa/src/common.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -166,7 +166,7 @@ pub fn asm_const_to_str<'tcx>(
166166
};
167167
let value = scalar.assert_scalar_int().to_bits(ty_and_layout.size);
168168
match ty_and_layout.ty.kind() {
169-
ty::Uint(_) => value.to_string(),
169+
ty::Uint(_) | ty::RawPtr(..) | ty::Ref(..) => value.to_string(),
170170
ty::Int(int_ty) => match int_ty.normalize(tcx.sess.target.pointer_width) {
171171
ty::IntTy::I8 => (value as i8).to_string(),
172172
ty::IntTy::I16 => (value as i16).to_string(),

Diff for: compiler/rustc_codegen_ssa/src/mir/naked_asm.rs

+23-9
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ use rustc_abi::{BackendRepr, Float, Integer, Primitive, RegKind};
22
use rustc_attr_parsing::InstructionSetAttr;
33
use rustc_hir::def_id::DefId;
44
use rustc_middle::mir::mono::{Linkage, MonoItem, MonoItemData, Visibility};
5-
use rustc_middle::mir::{Body, InlineAsmOperand};
5+
use rustc_middle::mir::{self, Body, InlineAsmOperand};
66
use rustc_middle::ty::layout::{FnAbiOf, HasTyCtxt, HasTypingEnv, LayoutOf};
77
use rustc_middle::ty::{Instance, Ty, TyCtxt};
88
use rustc_middle::{bug, span_bug, ty};
@@ -69,14 +69,28 @@ fn inline_to_global_operand<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
6969
ty::EarlyBinder::bind(value.ty()),
7070
);
7171

72-
let string = common::asm_const_to_str(
73-
cx.tcx(),
74-
value.span,
75-
const_value,
76-
cx.layout_of(mono_type),
77-
);
78-
79-
GlobalAsmOperandRef::Interpolate { string }
72+
let mir::ConstValue::Scalar(scalar) = const_value else {
73+
span_bug!(
74+
value.span,
75+
"expected Scalar for promoted asm const, but got {:#?}",
76+
const_value
77+
)
78+
};
79+
match scalar {
80+
mir::interpret::Scalar::Int(_) => {
81+
let string = common::asm_const_to_str(
82+
cx.tcx(),
83+
value.span,
84+
const_value,
85+
cx.layout_of(mono_type),
86+
);
87+
GlobalAsmOperandRef::Interpolate { string }
88+
}
89+
mir::interpret::Scalar::Ptr(value, _) => GlobalAsmOperandRef::ConstPointer {
90+
value,
91+
instance: Instance::mono(cx.tcx(), instance.def_id()),
92+
},
93+
}
8094
}
8195
InlineAsmOperand::SymFn { value } => {
8296
let mono_type = instance.instantiate_mir_and_normalize_erasing_regions(

0 commit comments

Comments
 (0)