rustc_ast_lowering/
delegation.rs

1//! This module implements expansion of delegation items with early resolved paths.
2//! It includes a delegation to a free functions:
3//!
4//! ```ignore (illustrative)
5//! reuse module::name { target_expr_template }
6//! ```
7//!
8//! And delegation to a trait methods:
9//!
10//! ```ignore (illustrative)
11//! reuse <Type as Trait>::name { target_expr_template }
12//! ```
13//!
14//! After expansion for both cases we get:
15//!
16//! ```ignore (illustrative)
17//! fn name(
18//!     arg0: InferDelegation(sig_id, Input(0)),
19//!     arg1: InferDelegation(sig_id, Input(1)),
20//!     ...,
21//!     argN: InferDelegation(sig_id, Input(N)),
22//! ) -> InferDelegation(sig_id, Output) {
23//!     callee_path(target_expr_template(arg0), arg1, ..., argN)
24//! }
25//! ```
26//!
27//! Where `callee_path` is a path in delegation item e.g. `<Type as Trait>::name`.
28//! `sig_id` is a id of item from which the signature is inherited. It may be a delegation
29//! item id (`item_id`) in case of impl trait or path resolution id (`path_id`) otherwise.
30//!
31//! Since we do not have a proper way to obtain function type information by path resolution
32//! in AST, we mark each function parameter type as `InferDelegation` and inherit it during
33//! HIR ty lowering.
34//!
35//! Similarly generics, predicates and header are set to the "default" values.
36//! In case of discrepancy with callee function the `UnsupportedDelegation` error will
37//! also be emitted during HIR ty lowering.
38
39use std::iter;
40
41use ast::visit::Visitor;
42use hir::def::{DefKind, PartialRes, Res};
43use hir::{BodyId, HirId};
44use rustc_abi::ExternAbi;
45use rustc_ast::*;
46use rustc_errors::ErrorGuaranteed;
47use rustc_hir::def_id::DefId;
48use rustc_middle::span_bug;
49use rustc_middle::ty::{Asyncness, ResolverAstLowering};
50use rustc_span::symbol::kw;
51use rustc_span::{Ident, Span, Symbol};
52use {rustc_ast as ast, rustc_hir as hir};
53
54use super::{GenericArgsMode, ImplTraitContext, LoweringContext, ParamMode};
55use crate::{AllowReturnTypeNotation, ImplTraitPosition, ResolverAstLoweringExt};
56
57pub(crate) struct DelegationResults<'hir> {
58    pub body_id: hir::BodyId,
59    pub sig: hir::FnSig<'hir>,
60    pub ident: Ident,
61    pub generics: &'hir hir::Generics<'hir>,
62}
63
64impl<'hir> LoweringContext<'_, 'hir> {
65    fn is_method(&self, def_id: DefId, span: Span) -> bool {
66        match self.tcx.def_kind(def_id) {
67            DefKind::Fn => false,
68            DefKind::AssocFn => match def_id.as_local() {
69                Some(local_def_id) => self
70                    .resolver
71                    .delegation_fn_sigs
72                    .get(&local_def_id)
73                    .is_some_and(|sig| sig.has_self),
74                None => self.tcx.associated_item(def_id).is_method(),
75            },
76            _ => span_bug!(span, "unexpected DefKind for delegation item"),
77        }
78    }
79
80    pub(crate) fn lower_delegation(
81        &mut self,
82        delegation: &Delegation,
83        item_id: NodeId,
84        is_in_trait_impl: bool,
85    ) -> DelegationResults<'hir> {
86        let span = self.lower_span(delegation.path.segments.last().unwrap().ident.span);
87        let sig_id = self.get_delegation_sig_id(item_id, delegation.id, span, is_in_trait_impl);
88        match sig_id {
89            Ok(sig_id) => {
90                let is_method = self.is_method(sig_id, span);
91                let (param_count, c_variadic) = self.param_count(sig_id);
92                let decl = self.lower_delegation_decl(sig_id, param_count, c_variadic, span);
93                let sig = self.lower_delegation_sig(sig_id, decl, span);
94                let body_id = self.lower_delegation_body(delegation, is_method, param_count, span);
95                let ident = self.lower_ident(delegation.ident);
96                let generics = self.lower_delegation_generics(span);
97                DelegationResults { body_id, sig, ident, generics }
98            }
99            Err(err) => self.generate_delegation_error(err, span),
100        }
101    }
102
103    fn get_delegation_sig_id(
104        &self,
105        item_id: NodeId,
106        path_id: NodeId,
107        span: Span,
108        is_in_trait_impl: bool,
109    ) -> Result<DefId, ErrorGuaranteed> {
110        let sig_id = if is_in_trait_impl { item_id } else { path_id };
111        self.get_resolution_id(sig_id, span)
112    }
113
114    fn get_resolution_id(&self, node_id: NodeId, span: Span) -> Result<DefId, ErrorGuaranteed> {
115        let def_id =
116            self.resolver.get_partial_res(node_id).and_then(|r| r.expect_full_res().opt_def_id());
117        def_id.ok_or_else(|| {
118            self.tcx.dcx().span_delayed_bug(
119                span,
120                format!("LoweringContext: couldn't resolve node {:?} in delegation item", node_id),
121            )
122        })
123    }
124
125    fn lower_delegation_generics(&mut self, span: Span) -> &'hir hir::Generics<'hir> {
126        self.arena.alloc(hir::Generics {
127            params: &[],
128            predicates: &[],
129            has_where_clause_predicates: false,
130            where_clause_span: span,
131            span,
132        })
133    }
134
135    // Function parameter count, including C variadic `...` if present.
136    fn param_count(&self, sig_id: DefId) -> (usize, bool /*c_variadic*/) {
137        if let Some(local_sig_id) = sig_id.as_local() {
138            // Map may be filled incorrectly due to recursive delegation.
139            // Error will be emitted later during HIR ty lowering.
140            match self.resolver.delegation_fn_sigs.get(&local_sig_id) {
141                Some(sig) => (sig.param_count, sig.c_variadic),
142                None => (0, false),
143            }
144        } else {
145            let sig = self.tcx.fn_sig(sig_id).skip_binder().skip_binder();
146            (sig.inputs().len() + usize::from(sig.c_variadic), sig.c_variadic)
147        }
148    }
149
150    fn lower_delegation_decl(
151        &mut self,
152        sig_id: DefId,
153        param_count: usize,
154        c_variadic: bool,
155        span: Span,
156    ) -> &'hir hir::FnDecl<'hir> {
157        // The last parameter in C variadic functions is skipped in the signature,
158        // like during regular lowering.
159        let decl_param_count = param_count - c_variadic as usize;
160        let inputs = self.arena.alloc_from_iter((0..decl_param_count).map(|arg| hir::Ty {
161            hir_id: self.next_id(),
162            kind: hir::TyKind::InferDelegation(sig_id, hir::InferDelegationKind::Input(arg)),
163            span,
164        }));
165
166        let output = self.arena.alloc(hir::Ty {
167            hir_id: self.next_id(),
168            kind: hir::TyKind::InferDelegation(sig_id, hir::InferDelegationKind::Output),
169            span,
170        });
171
172        self.arena.alloc(hir::FnDecl {
173            inputs,
174            output: hir::FnRetTy::Return(output),
175            c_variadic,
176            lifetime_elision_allowed: true,
177            implicit_self: hir::ImplicitSelfKind::None,
178        })
179    }
180
181    fn lower_delegation_sig(
182        &mut self,
183        sig_id: DefId,
184        decl: &'hir hir::FnDecl<'hir>,
185        span: Span,
186    ) -> hir::FnSig<'hir> {
187        let header = if let Some(local_sig_id) = sig_id.as_local() {
188            match self.resolver.delegation_fn_sigs.get(&local_sig_id) {
189                Some(sig) => {
190                    let parent = self.tcx.parent(sig_id);
191                    // HACK: we override the default safety instead of generating attributes from the ether.
192                    // We are not forwarding the attributes, as the delegation fn sigs are collected on the ast,
193                    // and here we need the hir attributes.
194                    let default_safety =
195                        if sig.target_feature || self.tcx.def_kind(parent) == DefKind::ForeignMod {
196                            hir::Safety::Unsafe
197                        } else {
198                            hir::Safety::Safe
199                        };
200                    self.lower_fn_header(sig.header, default_safety, &[])
201                }
202                None => self.generate_header_error(),
203            }
204        } else {
205            let sig = self.tcx.fn_sig(sig_id).skip_binder().skip_binder();
206            let asyncness = match self.tcx.asyncness(sig_id) {
207                Asyncness::Yes => hir::IsAsync::Async(span),
208                Asyncness::No => hir::IsAsync::NotAsync,
209            };
210            hir::FnHeader {
211                safety: if self.tcx.codegen_fn_attrs(sig_id).safe_target_features {
212                    hir::HeaderSafety::SafeTargetFeatures
213                } else {
214                    hir::HeaderSafety::Normal(sig.safety)
215                },
216                constness: self.tcx.constness(sig_id),
217                asyncness,
218                abi: sig.abi,
219            }
220        };
221        hir::FnSig { decl, header, span }
222    }
223
224    fn generate_param(
225        &mut self,
226        is_method: bool,
227        idx: usize,
228        span: Span,
229    ) -> (hir::Param<'hir>, NodeId) {
230        let pat_node_id = self.next_node_id();
231        let pat_id = self.lower_node_id(pat_node_id);
232        // FIXME(cjgillot) AssocItem currently relies on self parameter being exactly named `self`.
233        let name = if is_method && idx == 0 {
234            kw::SelfLower
235        } else {
236            Symbol::intern(&format!("arg{idx}"))
237        };
238        let ident = Ident::with_dummy_span(name);
239        let pat = self.arena.alloc(hir::Pat {
240            hir_id: pat_id,
241            kind: hir::PatKind::Binding(hir::BindingMode::NONE, pat_id, ident, None),
242            span,
243            default_binding_modes: false,
244        });
245
246        (hir::Param { hir_id: self.next_id(), pat, ty_span: span, span }, pat_node_id)
247    }
248
249    fn generate_arg(
250        &mut self,
251        is_method: bool,
252        idx: usize,
253        param_id: HirId,
254        span: Span,
255    ) -> hir::Expr<'hir> {
256        // FIXME(cjgillot) AssocItem currently relies on self parameter being exactly named `self`.
257        let name = if is_method && idx == 0 {
258            kw::SelfLower
259        } else {
260            Symbol::intern(&format!("arg{idx}"))
261        };
262        let segments = self.arena.alloc_from_iter(iter::once(hir::PathSegment {
263            ident: Ident::with_dummy_span(name),
264            hir_id: self.next_id(),
265            res: Res::Local(param_id),
266            args: None,
267            infer_args: false,
268        }));
269
270        let path = self.arena.alloc(hir::Path { span, res: Res::Local(param_id), segments });
271        self.mk_expr(hir::ExprKind::Path(hir::QPath::Resolved(None, path)), span)
272    }
273
274    fn lower_delegation_body(
275        &mut self,
276        delegation: &Delegation,
277        is_method: bool,
278        param_count: usize,
279        span: Span,
280    ) -> BodyId {
281        let block = delegation.body.as_deref();
282
283        self.lower_body(|this| {
284            let mut parameters: Vec<hir::Param<'_>> = Vec::with_capacity(param_count);
285            let mut args: Vec<hir::Expr<'_>> = Vec::with_capacity(param_count);
286
287            for idx in 0..param_count {
288                let (param, pat_node_id) = this.generate_param(is_method, idx, span);
289                parameters.push(param);
290
291                let arg = if let Some(block) = block
292                    && idx == 0
293                {
294                    let mut self_resolver = SelfResolver {
295                        resolver: this.resolver,
296                        path_id: delegation.id,
297                        self_param_id: pat_node_id,
298                    };
299                    self_resolver.visit_block(block);
300                    // Target expr needs to lower `self` path.
301                    this.ident_and_label_to_local_id.insert(pat_node_id, param.pat.hir_id.local_id);
302                    this.lower_target_expr(&block)
303                } else {
304                    this.generate_arg(is_method, idx, param.pat.hir_id, span)
305                };
306                args.push(arg);
307            }
308
309            let final_expr = this.finalize_body_lowering(delegation, args, span);
310            (this.arena.alloc_from_iter(parameters), final_expr)
311        })
312    }
313
314    // FIXME(fn_delegation): Alternatives for target expression lowering:
315    // https://github.com/rust-lang/rfcs/pull/3530#issuecomment-2197170600.
316    fn lower_target_expr(&mut self, block: &Block) -> hir::Expr<'hir> {
317        if let [stmt] = block.stmts.as_slice()
318            && let StmtKind::Expr(expr) = &stmt.kind
319        {
320            return self.lower_expr_mut(expr);
321        }
322
323        let block = self.lower_block(block, false);
324        self.mk_expr(hir::ExprKind::Block(block, None), block.span)
325    }
326
327    // Generates expression for the resulting body. If possible, `MethodCall` is used
328    // to allow autoref/autoderef for target expression. For example in:
329    //
330    // trait Trait : Sized {
331    //     fn by_value(self) -> i32 { 1 }
332    //     fn by_mut_ref(&mut self) -> i32 { 2 }
333    //     fn by_ref(&self) -> i32 { 3 }
334    // }
335    //
336    // struct NewType(SomeType);
337    // impl Trait for NewType {
338    //     reuse Trait::* { self.0 }
339    // }
340    //
341    // `self.0` will automatically coerce.
342    fn finalize_body_lowering(
343        &mut self,
344        delegation: &Delegation,
345        args: Vec<hir::Expr<'hir>>,
346        span: Span,
347    ) -> hir::Expr<'hir> {
348        let args = self.arena.alloc_from_iter(args);
349
350        let has_generic_args =
351            delegation.path.segments.iter().rev().skip(1).any(|segment| segment.args.is_some());
352
353        let call = if self
354            .get_resolution_id(delegation.id, span)
355            .and_then(|def_id| Ok(self.is_method(def_id, span)))
356            .unwrap_or_default()
357            && delegation.qself.is_none()
358            && !has_generic_args
359            && !args.is_empty()
360        {
361            let ast_segment = delegation.path.segments.last().unwrap();
362            let segment = self.lower_path_segment(
363                delegation.path.span,
364                ast_segment,
365                ParamMode::Optional,
366                GenericArgsMode::Err,
367                ImplTraitContext::Disallowed(ImplTraitPosition::Path),
368                None,
369            );
370            let segment = self.arena.alloc(segment);
371
372            self.arena.alloc(hir::Expr {
373                hir_id: self.next_id(),
374                kind: hir::ExprKind::MethodCall(segment, &args[0], &args[1..], span),
375                span,
376            })
377        } else {
378            let path = self.lower_qpath(
379                delegation.id,
380                &delegation.qself,
381                &delegation.path,
382                ParamMode::Optional,
383                AllowReturnTypeNotation::No,
384                ImplTraitContext::Disallowed(ImplTraitPosition::Path),
385                None,
386            );
387
388            let callee_path = self.arena.alloc(self.mk_expr(hir::ExprKind::Path(path), span));
389            self.arena.alloc(self.mk_expr(hir::ExprKind::Call(callee_path, args), span))
390        };
391        let block = self.arena.alloc(hir::Block {
392            stmts: &[],
393            expr: Some(call),
394            hir_id: self.next_id(),
395            rules: hir::BlockCheckMode::DefaultBlock,
396            span,
397            targeted_by_break: false,
398        });
399
400        self.mk_expr(hir::ExprKind::Block(block, None), span)
401    }
402
403    fn generate_delegation_error(
404        &mut self,
405        err: ErrorGuaranteed,
406        span: Span,
407    ) -> DelegationResults<'hir> {
408        let generics = self.lower_delegation_generics(span);
409
410        let decl = self.arena.alloc(hir::FnDecl {
411            inputs: &[],
412            output: hir::FnRetTy::DefaultReturn(span),
413            c_variadic: false,
414            lifetime_elision_allowed: true,
415            implicit_self: hir::ImplicitSelfKind::None,
416        });
417
418        let header = self.generate_header_error();
419        let sig = hir::FnSig { decl, header, span };
420
421        let ident = Ident::dummy();
422        let body_id = self.lower_body(|this| (&[], this.mk_expr(hir::ExprKind::Err(err), span)));
423        DelegationResults { ident, generics, body_id, sig }
424    }
425
426    fn generate_header_error(&self) -> hir::FnHeader {
427        hir::FnHeader {
428            safety: hir::Safety::Safe.into(),
429            constness: hir::Constness::NotConst,
430            asyncness: hir::IsAsync::NotAsync,
431            abi: ExternAbi::Rust,
432        }
433    }
434
435    #[inline]
436    fn mk_expr(&mut self, kind: hir::ExprKind<'hir>, span: Span) -> hir::Expr<'hir> {
437        hir::Expr { hir_id: self.next_id(), kind, span }
438    }
439}
440
441struct SelfResolver<'a> {
442    resolver: &'a mut ResolverAstLowering,
443    path_id: NodeId,
444    self_param_id: NodeId,
445}
446
447impl<'a> SelfResolver<'a> {
448    fn try_replace_id(&mut self, id: NodeId) {
449        if let Some(res) = self.resolver.partial_res_map.get(&id)
450            && let Some(Res::Local(sig_id)) = res.full_res()
451            && sig_id == self.path_id
452        {
453            let new_res = PartialRes::new(Res::Local(self.self_param_id));
454            self.resolver.partial_res_map.insert(id, new_res);
455        }
456    }
457}
458
459impl<'ast, 'a> Visitor<'ast> for SelfResolver<'a> {
460    fn visit_id(&mut self, id: NodeId) {
461        self.try_replace_id(id);
462    }
463}