@@ -7,6 +7,7 @@ use rustc_index::vec::Idx;
7
7
use rustc_middle:: middle:: codegen_fn_attrs:: { CodegenFnAttrFlags , CodegenFnAttrs } ;
8
8
use rustc_middle:: mir:: visit:: * ;
9
9
use rustc_middle:: mir:: * ;
10
+ use rustc_middle:: ty:: subst:: Subst ;
10
11
use rustc_middle:: ty:: { self , ConstKind , Instance , InstanceDef , ParamEnv , Ty , TyCtxt } ;
11
12
use rustc_span:: { hygiene:: ExpnKind , ExpnData , Span } ;
12
13
use rustc_target:: spec:: abi:: Abi ;
@@ -28,6 +29,7 @@ pub struct Inline;
28
29
#[ derive( Copy , Clone , Debug ) ]
29
30
struct CallSite < ' tcx > {
30
31
callee : Instance < ' tcx > ,
32
+ fn_sig : ty:: PolyFnSig < ' tcx > ,
31
33
block : BasicBlock ,
32
34
target : Option < BasicBlock > ,
33
35
source_info : SourceInfo ,
@@ -173,22 +175,23 @@ impl Inliner<'tcx> {
173
175
174
176
// Only consider direct calls to functions
175
177
let terminator = bb_data. terminator ( ) ;
176
- if let TerminatorKind :: Call { func : ref op , ref destination, .. } = terminator. kind {
177
- if let ty :: FnDef ( callee_def_id , substs ) = * op . ty ( caller_body, self . tcx ) . kind ( ) {
178
- // To resolve an instance its substs have to be fully normalized, so
179
- // we do this here .
180
- let normalized_substs = self . tcx . normalize_erasing_regions ( self . param_env , substs) ;
178
+ if let TerminatorKind :: Call { ref func , ref destination, .. } = terminator. kind {
179
+ let func_ty = func . ty ( caller_body, self . tcx ) ;
180
+ if let ty :: FnDef ( def_id , substs) = * func_ty . kind ( ) {
181
+ // To resolve an instance its substs have to be fully normalized .
182
+ let substs = self . tcx . normalize_erasing_regions ( self . param_env , substs) ;
181
183
let callee =
182
- Instance :: resolve ( self . tcx , self . param_env , callee_def_id, normalized_substs)
183
- . ok ( )
184
- . flatten ( ) ?;
184
+ Instance :: resolve ( self . tcx , self . param_env , def_id, substs) . ok ( ) . flatten ( ) ?;
185
185
186
186
if let InstanceDef :: Virtual ( ..) | InstanceDef :: Intrinsic ( _) = callee. def {
187
187
return None ;
188
188
}
189
189
190
+ let fn_sig = self . tcx . fn_sig ( def_id) . subst ( self . tcx , substs) ;
191
+
190
192
return Some ( CallSite {
191
193
callee,
194
+ fn_sig,
192
195
block : bb,
193
196
target : destination. map ( |( _, target) | target) ,
194
197
source_info : terminator. source_info ,
@@ -203,9 +206,8 @@ impl Inliner<'tcx> {
203
206
debug ! ( "should_inline({:?})" , callsite) ;
204
207
let tcx = self . tcx ;
205
208
206
- // Cannot inline generators which haven't been transformed yet
207
- if callee_body. yield_ty . is_some ( ) {
208
- debug ! ( " yield ty present - not inlining" ) ;
209
+ if callsite. fn_sig . c_variadic ( ) {
210
+ debug ! ( "callee is variadic - not inlining" ) ;
209
211
return false ;
210
212
}
211
213
@@ -218,11 +220,7 @@ impl Inliner<'tcx> {
218
220
return false ;
219
221
}
220
222
221
- let self_no_sanitize =
222
- self . codegen_fn_attrs . no_sanitize & self . tcx . sess . opts . debugging_opts . sanitizer ;
223
- let callee_no_sanitize =
224
- codegen_fn_attrs. no_sanitize & self . tcx . sess . opts . debugging_opts . sanitizer ;
225
- if self_no_sanitize != callee_no_sanitize {
223
+ if self . codegen_fn_attrs . no_sanitize != codegen_fn_attrs. no_sanitize {
226
224
debug ! ( "`callee has incompatible no_sanitize attribute - not inlining" ) ;
227
225
return false ;
228
226
}
@@ -256,9 +254,9 @@ impl Inliner<'tcx> {
256
254
self . tcx . sess . opts . debugging_opts . inline_mir_threshold
257
255
} ;
258
256
259
- // Significantly lower the threshold for inlining cold functions
260
257
if codegen_fn_attrs. flags . contains ( CodegenFnAttrFlags :: COLD ) {
261
- threshold /= 5 ;
258
+ debug ! ( "#[cold] present - not inlining" ) ;
259
+ return false ;
262
260
}
263
261
264
262
// Give a bonus functions with a small number of blocks,
@@ -447,7 +445,7 @@ impl Inliner<'tcx> {
447
445
} ;
448
446
449
447
// Copy the arguments if needed.
450
- let args: Vec < _ > = self . make_call_args ( args, & callsite, caller_body) ;
448
+ let args: Vec < _ > = self . make_call_args ( args, & callsite, caller_body, & callee_body ) ;
451
449
452
450
let mut integrator = Integrator {
453
451
args : & args,
@@ -528,6 +526,7 @@ impl Inliner<'tcx> {
528
526
args : Vec < Operand < ' tcx > > ,
529
527
callsite : & CallSite < ' tcx > ,
530
528
caller_body : & mut Body < ' tcx > ,
529
+ callee_body : & Body < ' tcx > ,
531
530
) -> Vec < Local > {
532
531
let tcx = self . tcx ;
533
532
@@ -554,9 +553,7 @@ impl Inliner<'tcx> {
554
553
// tmp2 = tuple_tmp.2
555
554
//
556
555
// and the vector is `[closure_ref, tmp0, tmp1, tmp2]`.
557
- // FIXME(eddyb) make this check for `"rust-call"` ABI combined with
558
- // `callee_body.spread_arg == None`, instead of special-casing closures.
559
- if tcx. is_closure ( callsite. callee . def_id ( ) ) {
556
+ if callsite. fn_sig . abi ( ) == Abi :: RustCall && callee_body. spread_arg . is_none ( ) {
560
557
let mut args = args. into_iter ( ) ;
561
558
let self_ = self . create_temp_if_necessary ( args. next ( ) . unwrap ( ) , callsite, caller_body) ;
562
559
let tuple = self . create_temp_if_necessary ( args. next ( ) . unwrap ( ) , callsite, caller_body) ;
0 commit comments