1 /* Structure for saving state for a nested function. 2 Copyright (C) 1989-2013 Free Software Foundation, Inc. 3 4 This file is part of GCC. 5 6 GCC is free software; you can redistribute it and/or modify it under 7 the terms of the GNU General Public License as published by the Free 8 Software Foundation; either version 3, or (at your option) any later 9 version. 10 11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 12 WARRANTY; without even the implied warranty of MERCHANTABILITY or 13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 14 for more details. 15 16 You should have received a copy of the GNU General Public License 17 along with GCC; see the file COPYING3. If not see 18 <http://www.gnu.org/licenses/>. */ 19 20 #ifndef GCC_FUNCTION_H 21 #define GCC_FUNCTION_H 22 23 #include "hashtab.h" 24 #include "vec.h" 25 #include "machmode.h" 26 #include "tm.h" /* For CUMULATIVE_ARGS. */ 27 #include "hard-reg-set.h" /* For HARD_REG_SET in struct rtl_data. */ 28 #include "input.h" /* For location_t. */ 29 30 /* Stack of pending (incomplete) sequences saved by `start_sequence'. 31 Each element describes one pending sequence. 32 The main insn-chain is saved in the last element of the chain, 33 unless the chain is empty. */ 34 35 struct GTY(()) sequence_stack { 36 /* First and last insns in the chain of the saved sequence. */ 37 rtx first; 38 rtx last; 39 struct sequence_stack *next; 40 }; 41 42 struct GTY(()) emit_status { 44 /* This is reset to LAST_VIRTUAL_REGISTER + 1 at the start of each function. 45 After rtl generation, it is 1 plus the largest register number used. */ 46 int x_reg_rtx_no; 47 48 /* Lowest label number in current function. */ 49 int x_first_label_num; 50 51 /* The ends of the doubly-linked chain of rtl for the current function. 52 Both are reset to null at the start of rtl generation for the function. 53 54 start_sequence saves both of these on `sequence_stack' and then starts 55 a new, nested sequence of insns. */ 56 rtx x_first_insn; 57 rtx x_last_insn; 58 59 /* Stack of pending (incomplete) sequences saved by `start_sequence'. 60 Each element describes one pending sequence. 61 The main insn-chain is saved in the last element of the chain, 62 unless the chain is empty. */ 63 struct sequence_stack *sequence_stack; 64 65 /* INSN_UID for next insn emitted. 66 Reset to 1 for each function compiled. */ 67 int x_cur_insn_uid; 68 69 /* INSN_UID for next debug insn emitted. Only used if 70 --param min-nondebug-insn-uid=<value> is given with nonzero value. */ 71 int x_cur_debug_insn_uid; 72 73 /* The length of the regno_pointer_align, regno_decl, and x_regno_reg_rtx 74 vectors. Since these vectors are needed during the expansion phase when 75 the total number of registers in the function is not yet known, the 76 vectors are copied and made bigger when necessary. */ 77 int regno_pointer_align_length; 78 79 /* Indexed by pseudo register number, if nonzero gives the known alignment 80 for that pseudo (if REG_POINTER is set in x_regno_reg_rtx). 81 Allocated in parallel with x_regno_reg_rtx. */ 82 unsigned char * GTY((skip)) regno_pointer_align; 83 }; 84 85 86 /* Indexed by register number, gives an rtx for that register (and only 87 that register). For pseudo registers, it is the unique rtx for 88 that pseudo. For hard registers, it is an rtx of the mode specified 89 by reg_raw_mode. 90 91 FIXME: We could put it into emit_status struct, but gengtype is not 92 able to deal with length attribute nested in top level structures. */ 93 94 extern GTY ((length ("crtl->emit.x_reg_rtx_no"))) rtx * regno_reg_rtx; 95 96 /* For backward compatibility... eventually these should all go away. */ 97 #define reg_rtx_no (crtl->emit.x_reg_rtx_no) 98 #define seq_stack (crtl->emit.sequence_stack) 99 100 #define REGNO_POINTER_ALIGN(REGNO) (crtl->emit.regno_pointer_align[REGNO]) 101 102 struct GTY(()) expr_status { 103 /* Number of units that we should eventually pop off the stack. 104 These are the arguments to function calls that have already returned. */ 105 int x_pending_stack_adjust; 106 107 /* Under some ABIs, it is the caller's responsibility to pop arguments 108 pushed for function calls. A naive implementation would simply pop 109 the arguments immediately after each call. However, if several 110 function calls are made in a row, it is typically cheaper to pop 111 all the arguments after all of the calls are complete since a 112 single pop instruction can be used. Therefore, GCC attempts to 113 defer popping the arguments until absolutely necessary. (For 114 example, at the end of a conditional, the arguments must be popped, 115 since code outside the conditional won't know whether or not the 116 arguments need to be popped.) 117 118 When INHIBIT_DEFER_POP is nonzero, however, the compiler does not 119 attempt to defer pops. Instead, the stack is popped immediately 120 after each call. Rather then setting this variable directly, use 121 NO_DEFER_POP and OK_DEFER_POP. */ 122 int x_inhibit_defer_pop; 123 124 /* If PREFERRED_STACK_BOUNDARY and PUSH_ROUNDING are defined, the stack 125 boundary can be momentarily unaligned while pushing the arguments. 126 Record the delta since last aligned boundary here in order to get 127 stack alignment in the nested function calls working right. */ 128 int x_stack_pointer_delta; 129 130 /* Nonzero means __builtin_saveregs has already been done in this function. 131 The value is the pseudoreg containing the value __builtin_saveregs 132 returned. */ 133 rtx x_saveregs_value; 134 135 /* Similarly for __builtin_apply_args. */ 136 rtx x_apply_args_value; 137 138 /* List of labels that must never be deleted. */ 139 rtx x_forced_labels; 140 }; 141 142 typedef struct call_site_record_d *call_site_record; 143 144 /* RTL representation of exception handling. */ 145 struct GTY(()) rtl_eh { 146 rtx ehr_stackadj; 147 rtx ehr_handler; 148 rtx ehr_label; 149 150 rtx sjlj_fc; 151 rtx sjlj_exit_after; 152 153 vec<uchar, va_gc> *action_record_data; 154 155 vec<call_site_record, va_gc> *call_site_record_v[2]; 156 }; 157 158 #define pending_stack_adjust (crtl->expr.x_pending_stack_adjust) 159 #define inhibit_defer_pop (crtl->expr.x_inhibit_defer_pop) 160 #define saveregs_value (crtl->expr.x_saveregs_value) 161 #define apply_args_value (crtl->expr.x_apply_args_value) 162 #define forced_labels (crtl->expr.x_forced_labels) 163 #define stack_pointer_delta (crtl->expr.x_stack_pointer_delta) 164 165 struct gimple_df; 166 struct temp_slot; 167 typedef struct temp_slot *temp_slot_p; 168 struct call_site_record_d; 169 struct dw_fde_struct; 170 171 struct ipa_opt_pass_d; 172 typedef struct ipa_opt_pass_d *ipa_opt_pass; 173 174 175 struct GTY(()) varasm_status { 176 /* If we're using a per-function constant pool, this is it. */ 177 struct rtx_constant_pool *pool; 178 179 /* Number of tree-constants deferred during the expansion of this 180 function. */ 181 unsigned int deferred_constants; 182 }; 183 184 /* Information mainlined about RTL representation of incoming arguments. */ 185 struct GTY(()) incoming_args { 186 /* Number of bytes of args popped by function being compiled on its return. 187 Zero if no bytes are to be popped. 188 May affect compilation of return insn or of function epilogue. */ 189 int pops_args; 190 191 /* If function's args have a fixed size, this is that size, in bytes. 192 Otherwise, it is -1. 193 May affect compilation of return insn or of function epilogue. */ 194 int size; 195 196 /* # bytes the prologue should push and pretend that the caller pushed them. 197 The prologue must do this, but only if parms can be passed in 198 registers. */ 199 int pretend_args_size; 200 201 /* This is the offset from the arg pointer to the place where the first 202 anonymous arg can be found, if there is one. */ 203 rtx arg_offset_rtx; 204 205 /* Quantities of various kinds of registers 206 used for the current function's args. */ 207 CUMULATIVE_ARGS info; 208 209 /* The arg pointer hard register, or the pseudo into which it was copied. */ 210 rtx internal_arg_pointer; 211 }; 212 213 /* Data for function partitioning. */ 214 struct GTY(()) function_subsections { 215 /* Assembly labels for the hot and cold text sections, to 216 be used by debugger functions for determining the size of text 217 sections. */ 218 219 const char *hot_section_label; 220 const char *cold_section_label; 221 const char *hot_section_end_label; 222 const char *cold_section_end_label; 223 }; 224 225 /* Describe an empty area of space in the stack frame. These can be chained 226 into a list; this is used to keep track of space wasted for alignment 227 reasons. */ 228 struct GTY(()) frame_space 229 { 230 struct frame_space *next; 231 232 HOST_WIDE_INT start; 233 HOST_WIDE_INT length; 234 }; 235 236 /* Datastructures maintained for currently processed function in RTL form. */ 237 struct GTY(()) rtl_data { 238 struct expr_status expr; 239 struct emit_status emit; 240 struct varasm_status varasm; 241 struct incoming_args args; 242 struct function_subsections subsections; 243 struct rtl_eh eh; 244 245 /* For function.c */ 246 247 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is 248 defined, the needed space is pushed by the prologue. */ 249 int outgoing_args_size; 250 251 /* If nonzero, an RTL expression for the location at which the current 252 function returns its result. If the current function returns its 253 result in a register, current_function_return_rtx will always be 254 the hard register containing the result. */ 255 rtx return_rtx; 256 257 /* Vector of initial-value pairs. Each pair consists of a pseudo 258 register of approprite mode that stores the initial value a hard 259 register REGNO, and that hard register itself. */ 260 /* ??? This could be a VEC but there is currently no way to define an 261 opaque VEC type. */ 262 struct initial_value_struct *hard_reg_initial_vals; 263 264 /* A variable living at the top of the frame that holds a known value. 265 Used for detecting stack clobbers. */ 266 tree stack_protect_guard; 267 268 /* List (chain of EXPR_LIST) of labels heading the current handlers for 269 nonlocal gotos. */ 270 rtx x_nonlocal_goto_handler_labels; 271 272 /* Label that will go on function epilogue. 273 Jumping to this label serves as a "return" instruction 274 on machines which require execution of the epilogue on all returns. */ 275 rtx x_return_label; 276 277 /* Label that will go on the end of function epilogue. 278 Jumping to this label serves as a "naked return" instruction 279 on machines which require execution of the epilogue on all returns. */ 280 rtx x_naked_return_label; 281 282 /* List (chain of EXPR_LISTs) of all stack slots in this function. 283 Made for the sake of unshare_all_rtl. */ 284 rtx x_stack_slot_list; 285 286 /* List of empty areas in the stack frame. */ 287 struct frame_space *frame_space_list; 288 289 /* Place after which to insert the tail_recursion_label if we need one. */ 290 rtx x_stack_check_probe_note; 291 292 /* Location at which to save the argument pointer if it will need to be 293 referenced. There are two cases where this is done: if nonlocal gotos 294 exist, or if vars stored at an offset from the argument pointer will be 295 needed by inner routines. */ 296 rtx x_arg_pointer_save_area; 297 298 /* Dynamic Realign Argument Pointer used for realigning stack. */ 299 rtx drap_reg; 300 301 /* Offset to end of allocated area of stack frame. 302 If stack grows down, this is the address of the last stack slot allocated. 303 If stack grows up, this is the address for the next slot. */ 304 HOST_WIDE_INT x_frame_offset; 305 306 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */ 307 rtx x_parm_birth_insn; 308 309 /* List of all used temporaries allocated, by level. */ 310 vec<temp_slot_p, va_gc> *x_used_temp_slots; 311 312 /* List of available temp slots. */ 313 struct temp_slot *x_avail_temp_slots; 314 315 /* Current nesting level for temporaries. */ 316 int x_temp_slot_level; 317 318 /* The largest alignment needed on the stack, including requirement 319 for outgoing stack alignment. */ 320 unsigned int stack_alignment_needed; 321 322 /* Preferred alignment of the end of stack frame, which is preferred 323 to call other functions. */ 324 unsigned int preferred_stack_boundary; 325 326 /* The minimum alignment of parameter stack. */ 327 unsigned int parm_stack_boundary; 328 329 /* The largest alignment of slot allocated on the stack. */ 330 unsigned int max_used_stack_slot_alignment; 331 332 /* The stack alignment estimated before reload, with consideration of 333 following factors: 334 1. Alignment of local stack variables (max_used_stack_slot_alignment) 335 2. Alignment requirement to call other functions 336 (preferred_stack_boundary) 337 3. Alignment of non-local stack variables but might be spilled in 338 local stack. */ 339 unsigned int stack_alignment_estimated; 340 341 /* For reorg. */ 342 343 /* Nonzero if function being compiled called builtin_return_addr or 344 builtin_frame_address with nonzero count. */ 345 bool accesses_prior_frames; 346 347 /* Nonzero if the function calls __builtin_eh_return. */ 348 bool calls_eh_return; 349 350 /* Nonzero if function saves all registers, e.g. if it has a nonlocal 351 label that can reach the exit block via non-exceptional paths. */ 352 bool saves_all_registers; 353 354 /* Nonzero if function being compiled has nonlocal gotos to parent 355 function. */ 356 bool has_nonlocal_goto; 357 358 /* Nonzero if function being compiled has an asm statement. */ 359 bool has_asm_statement; 360 361 /* This bit is used by the exception handling logic. It is set if all 362 calls (if any) are sibling calls. Such functions do not have to 363 have EH tables generated, as they cannot throw. A call to such a 364 function, however, should be treated as throwing if any of its callees 365 can throw. */ 366 bool all_throwers_are_sibcalls; 367 368 /* Nonzero if stack limit checking should be enabled in the current 369 function. */ 370 bool limit_stack; 371 372 /* Nonzero if profiling code should be generated. */ 373 bool profile; 374 375 /* Nonzero if the current function uses the constant pool. */ 376 bool uses_const_pool; 377 378 /* Nonzero if the current function uses pic_offset_table_rtx. */ 379 bool uses_pic_offset_table; 380 381 /* Nonzero if the current function needs an lsda for exception handling. */ 382 bool uses_eh_lsda; 383 384 /* Set when the tail call has been produced. */ 385 bool tail_call_emit; 386 387 /* Nonzero if code to initialize arg_pointer_save_area has been emitted. */ 388 bool arg_pointer_save_area_init; 389 390 /* Nonzero if current function must be given a frame pointer. 391 Set in reload1.c or lra-eliminations.c if anything is allocated 392 on the stack there. */ 393 bool frame_pointer_needed; 394 395 /* When set, expand should optimize for speed. */ 396 bool maybe_hot_insn_p; 397 398 /* Nonzero if function stack realignment is needed. This flag may be 399 set twice: before and after reload. It is set before reload wrt 400 stack alignment estimation before reload. It will be changed after 401 reload if by then criteria of stack realignment is different. 402 The value set after reload is the accurate one and is finalized. */ 403 bool stack_realign_needed; 404 405 /* Nonzero if function stack realignment is tried. This flag is set 406 only once before reload. It affects register elimination. This 407 is used to generate DWARF debug info for stack variables. */ 408 bool stack_realign_tried; 409 410 /* Nonzero if function being compiled needs dynamic realigned 411 argument pointer (drap) if stack needs realigning. */ 412 bool need_drap; 413 414 /* Nonzero if function stack realignment estimation is done, namely 415 stack_realign_needed flag has been set before reload wrt estimated 416 stack alignment info. */ 417 bool stack_realign_processed; 418 419 /* Nonzero if function stack realignment has been finalized, namely 420 stack_realign_needed flag has been set and finalized after reload. */ 421 bool stack_realign_finalized; 422 423 /* True if dbr_schedule has already been called for this function. */ 424 bool dbr_scheduled_p; 425 426 /* True if current function can not throw. Unlike 427 TREE_NOTHROW (current_function_decl) it is set even for overwritable 428 function where currently compiled version of it is nothrow. */ 429 bool nothrow; 430 431 /* True if we performed shrink-wrapping for the current function. */ 432 bool shrink_wrapped; 433 434 /* Nonzero if function being compiled doesn't modify the stack pointer 435 (ignoring the prologue and epilogue). This is only valid after 436 pass_stack_ptr_mod has run. */ 437 bool sp_is_unchanging; 438 439 /* Nonzero if function being compiled doesn't contain any calls 440 (ignoring the prologue and epilogue). This is set prior to 441 local register allocation and is valid for the remaining 442 compiler passes. */ 443 bool is_leaf; 444 445 /* Nonzero if the function being compiled is a leaf function which only 446 uses leaf registers. This is valid after reload (specifically after 447 sched2) and is useful only if the port defines LEAF_REGISTERS. */ 448 bool uses_only_leaf_regs; 449 450 /* Like regs_ever_live, but 1 if a reg is set or clobbered from an 451 asm. Unlike regs_ever_live, elements of this array corresponding 452 to eliminable regs (like the frame pointer) are set if an asm 453 sets them. */ 454 HARD_REG_SET asm_clobbers; 455 }; 456 457 #define return_label (crtl->x_return_label) 458 #define naked_return_label (crtl->x_naked_return_label) 459 #define stack_slot_list (crtl->x_stack_slot_list) 460 #define parm_birth_insn (crtl->x_parm_birth_insn) 461 #define frame_offset (crtl->x_frame_offset) 462 #define stack_check_probe_note (crtl->x_stack_check_probe_note) 463 #define arg_pointer_save_area (crtl->x_arg_pointer_save_area) 464 #define used_temp_slots (crtl->x_used_temp_slots) 465 #define avail_temp_slots (crtl->x_avail_temp_slots) 466 #define temp_slot_level (crtl->x_temp_slot_level) 467 #define nonlocal_goto_handler_labels (crtl->x_nonlocal_goto_handler_labels) 468 #define frame_pointer_needed (crtl->frame_pointer_needed) 469 #define stack_realign_fp (crtl->stack_realign_needed && !crtl->need_drap) 470 #define stack_realign_drap (crtl->stack_realign_needed && crtl->need_drap) 471 472 extern GTY(()) struct rtl_data x_rtl; 473 474 /* Accessor to RTL datastructures. We keep them statically allocated now since 475 we never keep multiple functions. For threaded compiler we might however 476 want to do differently. */ 477 #define crtl (&x_rtl) 478 479 struct GTY(()) stack_usage 480 { 481 /* # of bytes of static stack space allocated by the function. */ 482 HOST_WIDE_INT static_stack_size; 483 484 /* # of bytes of dynamic stack space allocated by the function. This is 485 meaningful only if has_unbounded_dynamic_stack_size is zero. */ 486 HOST_WIDE_INT dynamic_stack_size; 487 488 /* # of bytes of space pushed onto the stack after the prologue. If 489 !ACCUMULATE_OUTGOING_ARGS, it contains the outgoing arguments. */ 490 int pushed_stack_size; 491 492 /* Nonzero if the amount of stack space allocated dynamically cannot 493 be bounded at compile-time. */ 494 unsigned int has_unbounded_dynamic_stack_size : 1; 495 }; 496 497 #define current_function_static_stack_size (cfun->su->static_stack_size) 498 #define current_function_dynamic_stack_size (cfun->su->dynamic_stack_size) 499 #define current_function_pushed_stack_size (cfun->su->pushed_stack_size) 500 #define current_function_has_unbounded_dynamic_stack_size \ 501 (cfun->su->has_unbounded_dynamic_stack_size) 502 #define current_function_allocates_dynamic_stack_space \ 503 (current_function_dynamic_stack_size != 0 \ 504 || current_function_has_unbounded_dynamic_stack_size) 505 506 /* This structure can save all the important global and static variables 507 describing the status of the current function. */ 508 509 struct GTY(()) function { 510 struct eh_status *eh; 511 512 /* The control flow graph for this function. */ 513 struct control_flow_graph *cfg; 514 515 /* GIMPLE body for this function. */ 516 gimple_seq gimple_body; 517 518 /* SSA and dataflow information. */ 519 struct gimple_df *gimple_df; 520 521 /* The loops in this function. */ 522 struct loops *x_current_loops; 523 524 /* The stack usage of this function. */ 525 struct stack_usage *su; 526 527 /* Value histograms attached to particular statements. */ 528 htab_t GTY((skip)) value_histograms; 529 530 /* For function.c. */ 531 532 /* Points to the FUNCTION_DECL of this function. */ 533 tree decl; 534 535 /* A PARM_DECL that should contain the static chain for this function. 536 It will be initialized at the beginning of the function. */ 537 tree static_chain_decl; 538 539 /* An expression that contains the non-local goto save area. The first 540 word is the saved frame pointer and the second is the saved stack 541 pointer. */ 542 tree nonlocal_goto_save_area; 543 544 /* Vector of function local variables, functions, types and constants. */ 545 vec<tree, va_gc> *local_decls; 546 547 /* For md files. */ 548 549 /* tm.h can use this to store whatever it likes. */ 550 struct machine_function * GTY ((maybe_undef)) machine; 551 552 /* Language-specific code can use this to store whatever it likes. */ 553 struct language_function * language; 554 555 /* Used types hash table. */ 556 htab_t GTY ((param_is (union tree_node))) used_types_hash; 557 558 /* Dwarf2 Frame Description Entry, containing the Call Frame Instructions 559 used for unwinding. Only set when either dwarf2 unwinding or dwarf2 560 debugging is enabled. */ 561 struct dw_fde_struct *fde; 562 563 /* Last statement uid. */ 564 int last_stmt_uid; 565 566 /* Function sequence number for profiling, debugging, etc. */ 567 int funcdef_no; 568 569 /* Line number of the start of the function for debugging purposes. */ 570 location_t function_start_locus; 571 572 /* Line number of the end of the function. */ 573 location_t function_end_locus; 574 575 /* Properties used by the pass manager. */ 576 unsigned int curr_properties; 577 unsigned int last_verified; 578 579 /* Non-null if the function does something that would prevent it from 580 being copied; this applies to both versioning and inlining. Set to 581 a string describing the reason for failure. */ 582 const char * GTY((skip)) cannot_be_copied_reason; 583 584 /* Collected bit flags. */ 585 586 /* Number of units of general registers that need saving in stdarg 587 function. What unit is depends on the backend, either it is number 588 of bytes, or it can be number of registers. */ 589 unsigned int va_list_gpr_size : 8; 590 591 /* Number of units of floating point registers that need saving in stdarg 592 function. */ 593 unsigned int va_list_fpr_size : 8; 594 595 /* Nonzero if function being compiled can call setjmp. */ 596 unsigned int calls_setjmp : 1; 597 598 /* Nonzero if function being compiled can call alloca, 599 either as a subroutine or builtin. */ 600 unsigned int calls_alloca : 1; 601 602 /* Nonzero if function being compiled receives nonlocal gotos 603 from nested functions. */ 604 unsigned int has_nonlocal_label : 1; 605 606 /* Nonzero if we've set cannot_be_copied_reason. I.e. if 607 (cannot_be_copied_set && !cannot_be_copied_reason), the function 608 can in fact be copied. */ 609 unsigned int cannot_be_copied_set : 1; 610 611 /* Nonzero if current function uses stdarg.h or equivalent. */ 612 unsigned int stdarg : 1; 613 614 unsigned int after_inlining : 1; 615 unsigned int always_inline_functions_inlined : 1; 616 617 /* Nonzero if function being compiled can throw synchronous non-call 618 exceptions. */ 619 unsigned int can_throw_non_call_exceptions : 1; 620 621 /* Nonzero if instructions that may throw exceptions but don't otherwise 622 contribute to the execution of the program can be deleted. */ 623 unsigned int can_delete_dead_exceptions : 1; 624 625 /* Fields below this point are not set for abstract functions; see 626 allocate_struct_function. */ 627 628 /* Nonzero if function being compiled needs to be given an address 629 where the value should be stored. */ 630 unsigned int returns_struct : 1; 631 632 /* Nonzero if function being compiled needs to 633 return the address of where it has put a structure value. */ 634 unsigned int returns_pcc_struct : 1; 635 636 /* Nonzero if this function has local DECL_HARD_REGISTER variables. 637 In this case code motion has to be done more carefully. */ 638 unsigned int has_local_explicit_reg_vars : 1; 639 640 /* Nonzero if the current function is a thunk, i.e., a lightweight 641 function implemented by the output_mi_thunk hook) that just 642 adjusts one of its arguments and forwards to another 643 function. */ 644 unsigned int is_thunk : 1; 645 }; 646 647 /* Add the decl D to the local_decls list of FUN. */ 648 649 static inline void 650 add_local_decl (struct function *fun, tree d) 651 { 652 vec_safe_push (fun->local_decls, d); 653 } 654 655 #define FOR_EACH_LOCAL_DECL(FUN, I, D) \ 656 FOR_EACH_VEC_SAFE_ELT_REVERSE ((FUN)->local_decls, I, D) 657 658 /* If va_list_[gf]pr_size is set to this, it means we don't know how 659 many units need to be saved. */ 660 #define VA_LIST_MAX_GPR_SIZE 255 661 #define VA_LIST_MAX_FPR_SIZE 255 662 663 /* The function currently being compiled. */ 664 extern GTY(()) struct function *cfun; 665 666 /* In order to ensure that cfun is not set directly, we redefine it so 667 that it is not an lvalue. Rather than assign to cfun, use 668 push_cfun or set_cfun. */ 669 #define cfun (cfun + 0) 670 671 /* Nonzero if we've already converted virtual regs to hard regs. */ 672 extern int virtuals_instantiated; 673 674 /* Nonzero if at least one trampoline has been created. */ 675 extern int trampolines_created; 676 677 struct GTY(()) types_used_by_vars_entry { 678 tree type; 679 tree var_decl; 680 }; 681 682 /* Hash table making the relationship between a global variable 683 and the types it references in its initializer. The key of the 684 entry is a referenced type, and the value is the DECL of the global 685 variable. types_use_by_vars_do_hash and types_used_by_vars_eq below are 686 the hash and equality functions to use for this hash table. */ 687 extern GTY((param_is (struct types_used_by_vars_entry))) htab_t 688 types_used_by_vars_hash; 689 690 hashval_t types_used_by_vars_do_hash (const void*); 691 int types_used_by_vars_eq (const void *, const void *); 692 void types_used_by_var_decl_insert (tree type, tree var_decl); 693 694 /* During parsing of a global variable, this vector contains the types 695 referenced by the global variable. */ 696 extern GTY(()) vec<tree, va_gc> *types_used_by_cur_var_decl; 697 698 /* cfun shouldn't be set directly; use one of these functions instead. */ 699 extern void set_cfun (struct function *new_cfun); 700 extern void push_cfun (struct function *new_cfun); 701 extern void pop_cfun (void); 702 extern void instantiate_decl_rtl (rtx x); 703 704 /* For backward compatibility... eventually these should all go away. */ 705 #define current_function_funcdef_no (cfun->funcdef_no) 706 707 #define current_loops (cfun->x_current_loops) 708 #define dom_computed (cfun->cfg->x_dom_computed) 709 #define n_bbs_in_dom_tree (cfun->cfg->x_n_bbs_in_dom_tree) 710 #define VALUE_HISTOGRAMS(fun) (fun)->value_histograms 711 712 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END}, 713 and create duplicate blocks. */ 714 extern void reorder_blocks (void); 715 716 /* Set BLOCK_NUMBER for all the blocks in FN. */ 717 extern void number_blocks (tree); 718 719 extern void clear_block_marks (tree); 720 extern tree blocks_nreverse (tree); 721 extern tree block_chainon (tree, tree); 722 723 /* Return size needed for stack frame based on slots so far allocated. 724 This size counts from zero. It is not rounded to STACK_BOUNDARY; 725 the caller may have to do that. */ 726 extern HOST_WIDE_INT get_frame_size (void); 727 728 /* Issue an error message and return TRUE if frame OFFSET overflows in 729 the signed target pointer arithmetics for function FUNC. Otherwise 730 return FALSE. */ 731 extern bool frame_offset_overflow (HOST_WIDE_INT, tree); 732 733 /* A pointer to a function to create target specific, per-function 734 data structures. */ 735 extern struct machine_function * (*init_machine_status) (void); 736 737 /* Save and restore status information for a nested function. */ 738 extern void free_after_parsing (struct function *); 739 extern void free_after_compilation (struct function *); 740 741 extern void init_varasm_status (void); 742 743 #ifdef RTX_CODE 744 extern void diddle_return_value (void (*)(rtx, void*), void*); 745 extern void clobber_return_register (void); 746 #endif 747 748 extern rtx get_arg_pointer_save_area (void); 749 750 /* Returns the name of the current function. */ 751 extern const char *fndecl_name (tree); 752 extern const char *function_name (struct function *); 753 extern const char *current_function_name (void); 754 755 extern void do_warn_unused_parameter (tree); 756 757 extern bool pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode, 758 tree, bool); 759 extern bool reference_callee_copied (CUMULATIVE_ARGS *, enum machine_mode, 760 tree, bool); 761 762 extern void used_types_insert (tree); 763 764 extern int get_next_funcdef_no (void); 765 extern int get_last_funcdef_no (void); 766 767 #ifdef HAVE_simple_return 768 extern bool requires_stack_frame_p (rtx, HARD_REG_SET, HARD_REG_SET); 769 #endif 770 771 extern rtx get_hard_reg_initial_val (enum machine_mode, unsigned int); 772 extern rtx has_hard_reg_initial_val (enum machine_mode, unsigned int); 773 extern rtx get_hard_reg_initial_reg (rtx); 774 extern bool initial_value_entry (int i, rtx *, rtx *); 775 776 /* Called from gimple_expand_cfg. */ 777 extern unsigned int emit_initial_value_sets (void); 778 779 /* In predict.c */ 780 extern bool optimize_function_for_size_p (struct function *); 781 extern bool optimize_function_for_speed_p (struct function *); 782 783 #endif /* GCC_FUNCTION_H */ 784