1use std::collections::hash_map::Entry;
2use std::fmt::Write;
3
4use rustc_ast::*;
5use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap};
6use rustc_hir as hir;
7use rustc_hir::def::{DefKind, Res};
8use rustc_session::parse::feature_err;
9use rustc_span::{Span, sym};
10use rustc_target::asm;
11
12use super::LoweringContext;
13use super::errors::{
14 AbiSpecifiedMultipleTimes, AttSyntaxOnlyX86, ClobberAbiNotSupported,
15 InlineAsmUnsupportedTarget, InvalidAbiClobberAbi, InvalidAsmTemplateModifierConst,
16 InvalidAsmTemplateModifierLabel, InvalidAsmTemplateModifierRegClass,
17 InvalidAsmTemplateModifierRegClassSub, InvalidAsmTemplateModifierSym, InvalidRegister,
18 InvalidRegisterClass, RegisterClassOnlyClobber, RegisterClassOnlyClobberStable,
19 RegisterConflict,
20};
21use crate::{
22 AllowReturnTypeNotation, ImplTraitContext, ImplTraitPosition, ParamMode,
23 ResolverAstLoweringExt, fluent_generated as fluent,
24};
25
26impl<'a, 'hir> LoweringContext<'a, 'hir> {
27 pub(crate) fn lower_inline_asm(
28 &mut self,
29 sp: Span,
30 asm: &InlineAsm,
31 ) -> &'hir hir::InlineAsm<'hir> {
32 let asm_arch =
35 if self.tcx.sess.opts.actually_rustdoc { None } else { self.tcx.sess.asm_arch };
36 if asm_arch.is_none() && !self.tcx.sess.opts.actually_rustdoc {
37 self.dcx().emit_err(InlineAsmUnsupportedTarget { span: sp });
38 }
39 if let Some(asm_arch) = asm_arch {
40 let is_stable = matches!(
43 asm_arch,
44 asm::InlineAsmArch::X86
45 | asm::InlineAsmArch::X86_64
46 | asm::InlineAsmArch::Arm
47 | asm::InlineAsmArch::AArch64
48 | asm::InlineAsmArch::Arm64EC
49 | asm::InlineAsmArch::RiscV32
50 | asm::InlineAsmArch::RiscV64
51 | asm::InlineAsmArch::LoongArch32
52 | asm::InlineAsmArch::LoongArch64
53 | asm::InlineAsmArch::S390x
54 );
55 if !is_stable && !self.tcx.features().asm_experimental_arch() {
56 feature_err(
57 &self.tcx.sess,
58 sym::asm_experimental_arch,
59 sp,
60 fluent::ast_lowering_unstable_inline_assembly,
61 )
62 .emit();
63 }
64 }
65 let allow_experimental_reg = self.tcx.features().asm_experimental_reg();
66 if asm.options.contains(InlineAsmOptions::ATT_SYNTAX)
67 && !matches!(asm_arch, Some(asm::InlineAsmArch::X86 | asm::InlineAsmArch::X86_64))
68 && !self.tcx.sess.opts.actually_rustdoc
69 {
70 self.dcx().emit_err(AttSyntaxOnlyX86 { span: sp });
71 }
72 if asm.options.contains(InlineAsmOptions::MAY_UNWIND) && !self.tcx.features().asm_unwind() {
73 feature_err(
74 &self.tcx.sess,
75 sym::asm_unwind,
76 sp,
77 fluent::ast_lowering_unstable_may_unwind,
78 )
79 .emit();
80 }
81
82 let mut clobber_abis = FxIndexMap::default();
83 if let Some(asm_arch) = asm_arch {
84 for (abi_name, abi_span) in &asm.clobber_abis {
85 match asm::InlineAsmClobberAbi::parse(
86 asm_arch,
87 &self.tcx.sess.target,
88 &self.tcx.sess.unstable_target_features,
89 *abi_name,
90 ) {
91 Ok(abi) => {
92 match clobber_abis.get(&abi) {
94 Some((prev_name, prev_sp)) => {
95 let source_map = self.tcx.sess.source_map();
98 let equivalent = source_map.span_to_snippet(*prev_sp)
99 != source_map.span_to_snippet(*abi_span);
100
101 self.dcx().emit_err(AbiSpecifiedMultipleTimes {
102 abi_span: *abi_span,
103 prev_name: *prev_name,
104 prev_span: *prev_sp,
105 equivalent,
106 });
107 }
108 None => {
109 clobber_abis.insert(abi, (*abi_name, *abi_span));
110 }
111 }
112 }
113 Err(&[]) => {
114 self.dcx().emit_err(ClobberAbiNotSupported { abi_span: *abi_span });
115 }
116 Err(supported_abis) => {
117 let mut abis = format!("`{}`", supported_abis[0]);
118 for m in &supported_abis[1..] {
119 let _ = write!(abis, ", `{m}`");
120 }
121 self.dcx().emit_err(InvalidAbiClobberAbi {
122 abi_span: *abi_span,
123 supported_abis: abis,
124 });
125 }
126 }
127 }
128 }
129
130 let sess = self.tcx.sess;
134 let mut operands: Vec<_> = asm
135 .operands
136 .iter()
137 .map(|(op, op_sp)| {
138 let lower_reg = |®: &_| match reg {
139 InlineAsmRegOrRegClass::Reg(reg) => {
140 asm::InlineAsmRegOrRegClass::Reg(if let Some(asm_arch) = asm_arch {
141 asm::InlineAsmReg::parse(asm_arch, reg).unwrap_or_else(|error| {
142 self.dcx().emit_err(InvalidRegister {
143 op_span: *op_sp,
144 reg,
145 error,
146 });
147 asm::InlineAsmReg::Err
148 })
149 } else {
150 asm::InlineAsmReg::Err
151 })
152 }
153 InlineAsmRegOrRegClass::RegClass(reg_class) => {
154 asm::InlineAsmRegOrRegClass::RegClass(if let Some(asm_arch) = asm_arch {
155 asm::InlineAsmRegClass::parse(asm_arch, reg_class).unwrap_or_else(
156 |supported_register_classes| {
157 let mut register_classes =
158 format!("`{}`", supported_register_classes[0]);
159 for m in &supported_register_classes[1..] {
160 let _ = write!(register_classes, ", `{m}`");
161 }
162 self.dcx().emit_err(InvalidRegisterClass {
163 op_span: *op_sp,
164 reg_class,
165 supported_register_classes: register_classes,
166 });
167 asm::InlineAsmRegClass::Err
168 },
169 )
170 } else {
171 asm::InlineAsmRegClass::Err
172 })
173 }
174 };
175
176 let op = match op {
177 InlineAsmOperand::In { reg, expr } => hir::InlineAsmOperand::In {
178 reg: lower_reg(reg),
179 expr: self.lower_expr(expr),
180 },
181 InlineAsmOperand::Out { reg, late, expr } => hir::InlineAsmOperand::Out {
182 reg: lower_reg(reg),
183 late: *late,
184 expr: expr.as_ref().map(|expr| self.lower_expr(expr)),
185 },
186 InlineAsmOperand::InOut { reg, late, expr } => hir::InlineAsmOperand::InOut {
187 reg: lower_reg(reg),
188 late: *late,
189 expr: self.lower_expr(expr),
190 },
191 InlineAsmOperand::SplitInOut { reg, late, in_expr, out_expr } => {
192 hir::InlineAsmOperand::SplitInOut {
193 reg: lower_reg(reg),
194 late: *late,
195 in_expr: self.lower_expr(in_expr),
196 out_expr: out_expr.as_ref().map(|expr| self.lower_expr(expr)),
197 }
198 }
199 InlineAsmOperand::Const { anon_const } => hir::InlineAsmOperand::Const {
200 anon_const: self.lower_const_block(anon_const),
201 },
202 InlineAsmOperand::Sym { sym } => {
203 let static_def_id = self
204 .resolver
205 .get_partial_res(sym.id)
206 .and_then(|res| res.full_res())
207 .and_then(|res| match res {
208 Res::Def(DefKind::Static { .. }, def_id) => Some(def_id),
209 _ => None,
210 });
211
212 if let Some(def_id) = static_def_id {
213 let path = self.lower_qpath(
214 sym.id,
215 &sym.qself,
216 &sym.path,
217 ParamMode::Optional,
218 AllowReturnTypeNotation::No,
219 ImplTraitContext::Disallowed(ImplTraitPosition::Path),
220 None,
221 );
222 hir::InlineAsmOperand::SymStatic { path, def_id }
223 } else {
224 let expr = Expr {
227 id: sym.id,
228 kind: ExprKind::Path(sym.qself.clone(), sym.path.clone()),
229 span: *op_sp,
230 attrs: AttrVec::new(),
231 tokens: None,
232 };
233
234 hir::InlineAsmOperand::SymFn { expr: self.lower_expr(&expr) }
235 }
236 }
237 InlineAsmOperand::Label { block } => {
238 hir::InlineAsmOperand::Label { block: self.lower_block(block, false) }
239 }
240 };
241 (op, self.lower_span(*op_sp))
242 })
243 .collect();
244
245 for p in &asm.template {
247 if let InlineAsmTemplatePiece::Placeholder {
248 operand_idx,
249 modifier: Some(modifier),
250 span: placeholder_span,
251 } = *p
252 {
253 let op_sp = asm.operands[operand_idx].1;
254 match &operands[operand_idx].0 {
255 hir::InlineAsmOperand::In { reg, .. }
256 | hir::InlineAsmOperand::Out { reg, .. }
257 | hir::InlineAsmOperand::InOut { reg, .. }
258 | hir::InlineAsmOperand::SplitInOut { reg, .. } => {
259 let class = reg.reg_class();
260 if class == asm::InlineAsmRegClass::Err {
261 continue;
262 }
263 let valid_modifiers = class.valid_modifiers(asm_arch.unwrap());
264 if !valid_modifiers.contains(&modifier) {
265 let sub = if !valid_modifiers.is_empty() {
266 let mut mods = format!("`{}`", valid_modifiers[0]);
267 for m in &valid_modifiers[1..] {
268 let _ = write!(mods, ", `{m}`");
269 }
270 InvalidAsmTemplateModifierRegClassSub::SupportModifier {
271 class_name: class.name(),
272 modifiers: mods,
273 }
274 } else {
275 InvalidAsmTemplateModifierRegClassSub::DoesNotSupportModifier {
276 class_name: class.name(),
277 }
278 };
279 self.dcx().emit_err(InvalidAsmTemplateModifierRegClass {
280 placeholder_span,
281 op_span: op_sp,
282 sub,
283 });
284 }
285 }
286 hir::InlineAsmOperand::Const { .. } => {
287 self.dcx().emit_err(InvalidAsmTemplateModifierConst {
288 placeholder_span,
289 op_span: op_sp,
290 });
291 }
292 hir::InlineAsmOperand::SymFn { .. }
293 | hir::InlineAsmOperand::SymStatic { .. } => {
294 self.dcx().emit_err(InvalidAsmTemplateModifierSym {
295 placeholder_span,
296 op_span: op_sp,
297 });
298 }
299 hir::InlineAsmOperand::Label { .. } => {
300 self.dcx().emit_err(InvalidAsmTemplateModifierLabel {
301 placeholder_span,
302 op_span: op_sp,
303 });
304 }
305 }
306 }
307 }
308
309 let mut used_input_regs = FxHashMap::default();
310 let mut used_output_regs = FxHashMap::default();
311
312 for (idx, &(ref op, op_sp)) in operands.iter().enumerate() {
313 if let Some(reg) = op.reg() {
314 let reg_class = reg.reg_class();
315 if reg_class == asm::InlineAsmRegClass::Err {
316 continue;
317 }
318
319 if reg_class.is_clobber_only(asm_arch.unwrap(), allow_experimental_reg)
324 && !op.is_clobber()
325 {
326 if allow_experimental_reg || reg_class.is_clobber_only(asm_arch.unwrap(), true)
327 {
328 self.dcx().emit_err(RegisterClassOnlyClobber {
330 op_span: op_sp,
331 reg_class_name: reg_class.name(),
332 });
333 } else {
334 self.tcx
336 .sess
337 .create_feature_err(
338 RegisterClassOnlyClobberStable {
339 op_span: op_sp,
340 reg_class_name: reg_class.name(),
341 },
342 sym::asm_experimental_reg,
343 )
344 .emit();
345 }
346 continue;
347 }
348
349 if let asm::InlineAsmRegOrRegClass::Reg(reg) = reg {
351 let (input, output) = match op {
352 hir::InlineAsmOperand::In { .. } => (true, false),
353
354 hir::InlineAsmOperand::Out { late, .. } => (!late, true),
356
357 hir::InlineAsmOperand::InOut { .. }
358 | hir::InlineAsmOperand::SplitInOut { .. } => (true, true),
359
360 hir::InlineAsmOperand::Const { .. }
361 | hir::InlineAsmOperand::SymFn { .. }
362 | hir::InlineAsmOperand::SymStatic { .. }
363 | hir::InlineAsmOperand::Label { .. } => {
364 unreachable!("{op:?} is not a register operand");
365 }
366 };
367
368 let mut skip = false;
370
371 let mut check = |used_regs: &mut FxHashMap<asm::InlineAsmReg, usize>,
372 input,
373 r: asm::InlineAsmReg| {
374 match used_regs.entry(r) {
375 Entry::Occupied(o) => {
376 if skip {
377 return;
378 }
379 skip = true;
380
381 let idx2 = *o.get();
382 let (ref op2, op_sp2) = operands[idx2];
383
384 let in_out = match (op, op2) {
385 (
386 hir::InlineAsmOperand::In { .. },
387 hir::InlineAsmOperand::Out { late, .. },
388 )
389 | (
390 hir::InlineAsmOperand::Out { late, .. },
391 hir::InlineAsmOperand::In { .. },
392 ) => {
393 assert!(!*late);
394 let out_op_sp = if input { op_sp2 } else { op_sp };
395 Some(out_op_sp)
396 }
397 _ => None,
398 };
399 let reg_str = |idx| -> &str {
400 let (op, _): &(InlineAsmOperand, Span) = &asm.operands[idx];
403 if let Some(ast::InlineAsmRegOrRegClass::Reg(reg_sym)) =
404 op.reg()
405 {
406 reg_sym.as_str()
407 } else {
408 unreachable!("{op:?} is not a register operand");
409 }
410 };
411
412 self.dcx().emit_err(RegisterConflict {
413 op_span1: op_sp,
414 op_span2: op_sp2,
415 reg1_name: reg_str(idx),
416 reg2_name: reg_str(idx2),
417 in_out,
418 });
419 }
420 Entry::Vacant(v) => {
421 if r == reg {
422 v.insert(idx);
423 }
424 }
425 }
426 };
427 let mut overlapping_with = vec![];
428 reg.overlapping_regs(|r| {
429 overlapping_with.push(r);
430 });
431 for r in overlapping_with {
432 if input {
433 check(&mut used_input_regs, true, r);
434 }
435 if output {
436 check(&mut used_output_regs, false, r);
437 }
438 }
439 }
440 }
441 }
442
443 let mut clobbered = FxHashSet::default();
446 for (abi, (_, abi_span)) in clobber_abis {
447 for &clobber in abi.clobbered_regs() {
448 if clobbered.contains(&clobber) {
450 continue;
451 }
452
453 let mut overlapping_with = vec![];
454 clobber.overlapping_regs(|reg| {
455 overlapping_with.push(reg);
456 });
457 let output_used =
458 overlapping_with.iter().any(|reg| used_output_regs.contains_key(®));
459
460 if !output_used {
461 operands.push((
462 hir::InlineAsmOperand::Out {
463 reg: asm::InlineAsmRegOrRegClass::Reg(clobber),
464 late: true,
465 expr: None,
466 },
467 self.lower_span(abi_span),
468 ));
469 clobbered.insert(clobber);
470 }
471 }
472 }
473
474 if let Some((_, op_sp)) =
476 operands.iter().find(|(op, _)| matches!(op, hir::InlineAsmOperand::Label { .. }))
477 {
478 let output_operand_used = operands.iter().any(|(op, _)| {
480 matches!(
481 op,
482 hir::InlineAsmOperand::Out { expr: Some(_), .. }
483 | hir::InlineAsmOperand::InOut { .. }
484 | hir::InlineAsmOperand::SplitInOut { out_expr: Some(_), .. }
485 )
486 });
487 if output_operand_used && !self.tcx.features().asm_goto_with_outputs() {
488 feature_err(
489 sess,
490 sym::asm_goto_with_outputs,
491 *op_sp,
492 fluent::ast_lowering_unstable_inline_assembly_label_operand_with_outputs,
493 )
494 .emit();
495 }
496 }
497
498 let operands = self.arena.alloc_from_iter(operands);
499 let template = self.arena.alloc_from_iter(asm.template.iter().cloned());
500 let template_strs = self.arena.alloc_from_iter(
501 asm.template_strs
502 .iter()
503 .map(|(sym, snippet, span)| (*sym, *snippet, self.lower_span(*span))),
504 );
505 let line_spans =
506 self.arena.alloc_from_iter(asm.line_spans.iter().map(|span| self.lower_span(*span)));
507 let hir_asm = hir::InlineAsm {
508 asm_macro: asm.asm_macro,
509 template,
510 template_strs,
511 operands,
512 options: asm.options,
513 line_spans,
514 };
515 self.arena.alloc(hir_asm)
516 }
517}