rustc_ast_lowering/
delegation.rs

1//! This module implements expansion of delegation items with early resolved paths.
2//! It includes a delegation to a free functions:
3//!
4//! ```ignore (illustrative)
5//! reuse module::name { target_expr_template }
6//! ```
7//!
8//! And delegation to a trait methods:
9//!
10//! ```ignore (illustrative)
11//! reuse <Type as Trait>::name { target_expr_template }
12//! ```
13//!
14//! After expansion for both cases we get:
15//!
16//! ```ignore (illustrative)
17//! fn name(
18//!     arg0: InferDelegation(sig_id, Input(0)),
19//!     arg1: InferDelegation(sig_id, Input(1)),
20//!     ...,
21//!     argN: InferDelegation(sig_id, Input(N)),
22//! ) -> InferDelegation(sig_id, Output) {
23//!     callee_path(target_expr_template(arg0), arg1, ..., argN)
24//! }
25//! ```
26//!
27//! Where `callee_path` is a path in delegation item e.g. `<Type as Trait>::name`.
28//! `sig_id` is a id of item from which the signature is inherited. It may be a delegation
29//! item id (`item_id`) in case of impl trait or path resolution id (`path_id`) otherwise.
30//!
31//! Since we do not have a proper way to obtain function type information by path resolution
32//! in AST, we mark each function parameter type as `InferDelegation` and inherit it during
33//! HIR ty lowering.
34//!
35//! Similarly generics, predicates and header are set to the "default" values.
36//! In case of discrepancy with callee function the `UnsupportedDelegation` error will
37//! also be emitted during HIR ty lowering.
38
39use std::iter;
40
41use ast::visit::Visitor;
42use hir::def::{DefKind, PartialRes, Res};
43use hir::{BodyId, HirId};
44use rustc_abi::ExternAbi;
45use rustc_ast::*;
46use rustc_errors::ErrorGuaranteed;
47use rustc_hir::def_id::DefId;
48use rustc_middle::span_bug;
49use rustc_middle::ty::{Asyncness, ResolverAstLowering};
50use rustc_span::{Ident, Span, Symbol};
51use {rustc_ast as ast, rustc_hir as hir};
52
53use super::{GenericArgsMode, ImplTraitContext, LoweringContext, ParamMode};
54use crate::{AllowReturnTypeNotation, ImplTraitPosition, ResolverAstLoweringExt};
55
56pub(crate) struct DelegationResults<'hir> {
57    pub body_id: hir::BodyId,
58    pub sig: hir::FnSig<'hir>,
59    pub ident: Ident,
60    pub generics: &'hir hir::Generics<'hir>,
61}
62
63impl<'hir> LoweringContext<'_, 'hir> {
64    /// Defines whether the delegatee is an associated function whose first parameter is `self`.
65    pub(crate) fn delegatee_is_method(
66        &self,
67        item_id: NodeId,
68        path_id: NodeId,
69        span: Span,
70        is_in_trait_impl: bool,
71    ) -> bool {
72        let sig_id = self.get_delegation_sig_id(item_id, path_id, span, is_in_trait_impl);
73        let Ok(sig_id) = sig_id else {
74            return false;
75        };
76        self.is_method(sig_id, span)
77    }
78
79    fn is_method(&self, def_id: DefId, span: Span) -> bool {
80        match self.tcx.def_kind(def_id) {
81            DefKind::Fn => false,
82            DefKind::AssocFn => match def_id.as_local() {
83                Some(local_def_id) => self
84                    .resolver
85                    .delegation_fn_sigs
86                    .get(&local_def_id)
87                    .is_some_and(|sig| sig.has_self),
88                None => self.tcx.associated_item(def_id).is_method(),
89            },
90            _ => span_bug!(span, "unexpected DefKind for delegation item"),
91        }
92    }
93
94    pub(crate) fn lower_delegation(
95        &mut self,
96        delegation: &Delegation,
97        item_id: NodeId,
98        is_in_trait_impl: bool,
99    ) -> DelegationResults<'hir> {
100        let span = self.lower_span(delegation.path.segments.last().unwrap().ident.span);
101        let sig_id = self.get_delegation_sig_id(item_id, delegation.id, span, is_in_trait_impl);
102        match sig_id {
103            Ok(sig_id) => {
104                let (param_count, c_variadic) = self.param_count(sig_id);
105                let decl = self.lower_delegation_decl(sig_id, param_count, c_variadic, span);
106                let sig = self.lower_delegation_sig(sig_id, decl, span);
107                let body_id = self.lower_delegation_body(delegation, param_count, span);
108                let ident = self.lower_ident(delegation.ident);
109                let generics = self.lower_delegation_generics(span);
110                DelegationResults { body_id, sig, ident, generics }
111            }
112            Err(err) => self.generate_delegation_error(err, span),
113        }
114    }
115
116    fn get_delegation_sig_id(
117        &self,
118        item_id: NodeId,
119        path_id: NodeId,
120        span: Span,
121        is_in_trait_impl: bool,
122    ) -> Result<DefId, ErrorGuaranteed> {
123        let sig_id = if is_in_trait_impl { item_id } else { path_id };
124        self.get_resolution_id(sig_id, span)
125    }
126
127    fn get_resolution_id(&self, node_id: NodeId, span: Span) -> Result<DefId, ErrorGuaranteed> {
128        let def_id =
129            self.resolver.get_partial_res(node_id).and_then(|r| r.expect_full_res().opt_def_id());
130        def_id.ok_or_else(|| {
131            self.tcx.dcx().span_delayed_bug(
132                span,
133                format!("LoweringContext: couldn't resolve node {:?} in delegation item", node_id),
134            )
135        })
136    }
137
138    fn lower_delegation_generics(&mut self, span: Span) -> &'hir hir::Generics<'hir> {
139        self.arena.alloc(hir::Generics {
140            params: &[],
141            predicates: &[],
142            has_where_clause_predicates: false,
143            where_clause_span: span,
144            span,
145        })
146    }
147
148    // Function parameter count, including C variadic `...` if present.
149    fn param_count(&self, sig_id: DefId) -> (usize, bool /*c_variadic*/) {
150        if let Some(local_sig_id) = sig_id.as_local() {
151            // Map may be filled incorrectly due to recursive delegation.
152            // Error will be emitted later during HIR ty lowering.
153            match self.resolver.delegation_fn_sigs.get(&local_sig_id) {
154                Some(sig) => (sig.param_count, sig.c_variadic),
155                None => (0, false),
156            }
157        } else {
158            let sig = self.tcx.fn_sig(sig_id).skip_binder().skip_binder();
159            (sig.inputs().len() + usize::from(sig.c_variadic), sig.c_variadic)
160        }
161    }
162
163    fn lower_delegation_decl(
164        &mut self,
165        sig_id: DefId,
166        param_count: usize,
167        c_variadic: bool,
168        span: Span,
169    ) -> &'hir hir::FnDecl<'hir> {
170        // The last parameter in C variadic functions is skipped in the signature,
171        // like during regular lowering.
172        let decl_param_count = param_count - c_variadic as usize;
173        let inputs = self.arena.alloc_from_iter((0..decl_param_count).map(|arg| hir::Ty {
174            hir_id: self.next_id(),
175            kind: hir::TyKind::InferDelegation(sig_id, hir::InferDelegationKind::Input(arg)),
176            span,
177        }));
178
179        let output = self.arena.alloc(hir::Ty {
180            hir_id: self.next_id(),
181            kind: hir::TyKind::InferDelegation(sig_id, hir::InferDelegationKind::Output),
182            span,
183        });
184
185        self.arena.alloc(hir::FnDecl {
186            inputs,
187            output: hir::FnRetTy::Return(output),
188            c_variadic,
189            lifetime_elision_allowed: true,
190            implicit_self: hir::ImplicitSelfKind::None,
191        })
192    }
193
194    fn lower_delegation_sig(
195        &mut self,
196        sig_id: DefId,
197        decl: &'hir hir::FnDecl<'hir>,
198        span: Span,
199    ) -> hir::FnSig<'hir> {
200        let header = if let Some(local_sig_id) = sig_id.as_local() {
201            match self.resolver.delegation_fn_sigs.get(&local_sig_id) {
202                Some(sig) => {
203                    let parent = self.tcx.parent(sig_id);
204                    // HACK: we override the default safety instead of generating attributes from the ether.
205                    // We are not forwarding the attributes, as the delegation fn sigs are collected on the ast,
206                    // and here we need the hir attributes.
207                    let default_safety =
208                        if sig.target_feature || self.tcx.def_kind(parent) == DefKind::ForeignMod {
209                            hir::Safety::Unsafe
210                        } else {
211                            hir::Safety::Safe
212                        };
213                    self.lower_fn_header(sig.header, default_safety, &[])
214                }
215                None => self.generate_header_error(),
216            }
217        } else {
218            let sig = self.tcx.fn_sig(sig_id).skip_binder().skip_binder();
219            let asyncness = match self.tcx.asyncness(sig_id) {
220                Asyncness::Yes => hir::IsAsync::Async(span),
221                Asyncness::No => hir::IsAsync::NotAsync,
222            };
223            hir::FnHeader {
224                safety: if self.tcx.codegen_fn_attrs(sig_id).safe_target_features {
225                    hir::HeaderSafety::SafeTargetFeatures
226                } else {
227                    hir::HeaderSafety::Normal(sig.safety)
228                },
229                constness: self.tcx.constness(sig_id),
230                asyncness,
231                abi: sig.abi,
232            }
233        };
234        hir::FnSig { decl, header, span }
235    }
236
237    fn generate_param(&mut self, idx: usize, span: Span) -> (hir::Param<'hir>, NodeId) {
238        let pat_node_id = self.next_node_id();
239        let pat_id = self.lower_node_id(pat_node_id);
240        let ident = Ident::with_dummy_span(Symbol::intern(&format!("arg{idx}")));
241        let pat = self.arena.alloc(hir::Pat {
242            hir_id: pat_id,
243            kind: hir::PatKind::Binding(hir::BindingMode::NONE, pat_id, ident, None),
244            span,
245            default_binding_modes: false,
246        });
247
248        (hir::Param { hir_id: self.next_id(), pat, ty_span: span, span }, pat_node_id)
249    }
250
251    fn generate_arg(&mut self, idx: usize, param_id: HirId, span: Span) -> hir::Expr<'hir> {
252        let segments = self.arena.alloc_from_iter(iter::once(hir::PathSegment {
253            ident: Ident::with_dummy_span(Symbol::intern(&format!("arg{idx}"))),
254            hir_id: self.next_id(),
255            res: Res::Local(param_id),
256            args: None,
257            infer_args: false,
258        }));
259
260        let path = self.arena.alloc(hir::Path { span, res: Res::Local(param_id), segments });
261        self.mk_expr(hir::ExprKind::Path(hir::QPath::Resolved(None, path)), span)
262    }
263
264    fn lower_delegation_body(
265        &mut self,
266        delegation: &Delegation,
267        param_count: usize,
268        span: Span,
269    ) -> BodyId {
270        let block = delegation.body.as_deref();
271
272        self.lower_body(|this| {
273            let mut parameters: Vec<hir::Param<'_>> = Vec::with_capacity(param_count);
274            let mut args: Vec<hir::Expr<'_>> = Vec::with_capacity(param_count);
275
276            for idx in 0..param_count {
277                let (param, pat_node_id) = this.generate_param(idx, span);
278                parameters.push(param);
279
280                let arg = if let Some(block) = block
281                    && idx == 0
282                {
283                    let mut self_resolver = SelfResolver {
284                        resolver: this.resolver,
285                        path_id: delegation.id,
286                        self_param_id: pat_node_id,
287                    };
288                    self_resolver.visit_block(block);
289                    // Target expr needs to lower `self` path.
290                    this.ident_and_label_to_local_id.insert(pat_node_id, param.pat.hir_id.local_id);
291                    this.lower_target_expr(&block)
292                } else {
293                    this.generate_arg(idx, param.pat.hir_id, span)
294                };
295                args.push(arg);
296            }
297
298            let final_expr = this.finalize_body_lowering(delegation, args, span);
299            (this.arena.alloc_from_iter(parameters), final_expr)
300        })
301    }
302
303    // FIXME(fn_delegation): Alternatives for target expression lowering:
304    // https://github.com/rust-lang/rfcs/pull/3530#issuecomment-2197170600.
305    fn lower_target_expr(&mut self, block: &Block) -> hir::Expr<'hir> {
306        if let [stmt] = block.stmts.as_slice()
307            && let StmtKind::Expr(expr) = &stmt.kind
308        {
309            return self.lower_expr_mut(expr);
310        }
311
312        let block = self.lower_block(block, false);
313        self.mk_expr(hir::ExprKind::Block(block, None), block.span)
314    }
315
316    // Generates expression for the resulting body. If possible, `MethodCall` is used
317    // to allow autoref/autoderef for target expression. For example in:
318    //
319    // trait Trait : Sized {
320    //     fn by_value(self) -> i32 { 1 }
321    //     fn by_mut_ref(&mut self) -> i32 { 2 }
322    //     fn by_ref(&self) -> i32 { 3 }
323    // }
324    //
325    // struct NewType(SomeType);
326    // impl Trait for NewType {
327    //     reuse Trait::* { self.0 }
328    // }
329    //
330    // `self.0` will automatically coerce.
331    fn finalize_body_lowering(
332        &mut self,
333        delegation: &Delegation,
334        args: Vec<hir::Expr<'hir>>,
335        span: Span,
336    ) -> hir::Expr<'hir> {
337        let args = self.arena.alloc_from_iter(args);
338
339        let has_generic_args =
340            delegation.path.segments.iter().rev().skip(1).any(|segment| segment.args.is_some());
341
342        let call = if self
343            .get_resolution_id(delegation.id, span)
344            .and_then(|def_id| Ok(self.is_method(def_id, span)))
345            .unwrap_or_default()
346            && delegation.qself.is_none()
347            && !has_generic_args
348            && !args.is_empty()
349        {
350            let ast_segment = delegation.path.segments.last().unwrap();
351            let segment = self.lower_path_segment(
352                delegation.path.span,
353                ast_segment,
354                ParamMode::Optional,
355                GenericArgsMode::Err,
356                ImplTraitContext::Disallowed(ImplTraitPosition::Path),
357                None,
358            );
359            let segment = self.arena.alloc(segment);
360
361            self.arena.alloc(hir::Expr {
362                hir_id: self.next_id(),
363                kind: hir::ExprKind::MethodCall(segment, &args[0], &args[1..], span),
364                span,
365            })
366        } else {
367            let path = self.lower_qpath(
368                delegation.id,
369                &delegation.qself,
370                &delegation.path,
371                ParamMode::Optional,
372                AllowReturnTypeNotation::No,
373                ImplTraitContext::Disallowed(ImplTraitPosition::Path),
374                None,
375            );
376
377            let callee_path = self.arena.alloc(self.mk_expr(hir::ExprKind::Path(path), span));
378            self.arena.alloc(self.mk_expr(hir::ExprKind::Call(callee_path, args), span))
379        };
380        let block = self.arena.alloc(hir::Block {
381            stmts: &[],
382            expr: Some(call),
383            hir_id: self.next_id(),
384            rules: hir::BlockCheckMode::DefaultBlock,
385            span,
386            targeted_by_break: false,
387        });
388
389        self.mk_expr(hir::ExprKind::Block(block, None), span)
390    }
391
392    fn generate_delegation_error(
393        &mut self,
394        err: ErrorGuaranteed,
395        span: Span,
396    ) -> DelegationResults<'hir> {
397        let generics = self.lower_delegation_generics(span);
398
399        let decl = self.arena.alloc(hir::FnDecl {
400            inputs: &[],
401            output: hir::FnRetTy::DefaultReturn(span),
402            c_variadic: false,
403            lifetime_elision_allowed: true,
404            implicit_self: hir::ImplicitSelfKind::None,
405        });
406
407        let header = self.generate_header_error();
408        let sig = hir::FnSig { decl, header, span };
409
410        let ident = Ident::dummy();
411        let body_id = self.lower_body(|this| (&[], this.mk_expr(hir::ExprKind::Err(err), span)));
412        DelegationResults { ident, generics, body_id, sig }
413    }
414
415    fn generate_header_error(&self) -> hir::FnHeader {
416        hir::FnHeader {
417            safety: hir::Safety::Safe.into(),
418            constness: hir::Constness::NotConst,
419            asyncness: hir::IsAsync::NotAsync,
420            abi: ExternAbi::Rust,
421        }
422    }
423
424    #[inline]
425    fn mk_expr(&mut self, kind: hir::ExprKind<'hir>, span: Span) -> hir::Expr<'hir> {
426        hir::Expr { hir_id: self.next_id(), kind, span }
427    }
428}
429
430struct SelfResolver<'a> {
431    resolver: &'a mut ResolverAstLowering,
432    path_id: NodeId,
433    self_param_id: NodeId,
434}
435
436impl<'a> SelfResolver<'a> {
437    fn try_replace_id(&mut self, id: NodeId) {
438        if let Some(res) = self.resolver.partial_res_map.get(&id)
439            && let Some(Res::Local(sig_id)) = res.full_res()
440            && sig_id == self.path_id
441        {
442            let new_res = PartialRes::new(Res::Local(self.self_param_id));
443            self.resolver.partial_res_map.insert(id, new_res);
444        }
445    }
446}
447
448impl<'ast, 'a> Visitor<'ast> for SelfResolver<'a> {
449    fn visit_path(&mut self, path: &'ast Path, id: NodeId) {
450        self.try_replace_id(id);
451        visit::walk_path(self, path);
452    }
453
454    fn visit_path_segment(&mut self, seg: &'ast PathSegment) {
455        self.try_replace_id(seg.id);
456        visit::walk_path_segment(self, seg);
457    }
458}