1 /* Structure for saving state for a nested function. 2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 3 1999, 2000, 2003, 2004, 2005, 2006, 2007, 2008, 2009 4 Free Software Foundation, Inc. 5 6 This file is part of GCC. 7 8 GCC is free software; you can redistribute it and/or modify it under 9 the terms of the GNU General Public License as published by the Free 10 Software Foundation; either version 3, or (at your option) any later 11 version. 12 13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 14 WARRANTY; without even the implied warranty of MERCHANTABILITY or 15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 16 for more details. 17 18 You should have received a copy of the GNU General Public License 19 along with GCC; see the file COPYING3. If not see 20 <http://www.gnu.org/licenses/>. */ 21 22 #ifndef GCC_FUNCTION_H 23 #define GCC_FUNCTION_H 24 25 #include "tree.h" 26 #include "hashtab.h" 27 #include "varray.h" 28 29 /* Stack of pending (incomplete) sequences saved by `start_sequence'. 30 Each element describes one pending sequence. 31 The main insn-chain is saved in the last element of the chain, 32 unless the chain is empty. */ 33 34 struct sequence_stack GTY(()) 35 { 36 /* First and last insns in the chain of the saved sequence. */ 37 rtx first; 38 rtx last; 39 struct sequence_stack *next; 40 }; 41 42 struct emit_status GTY(()) 44 { 45 /* This is reset to LAST_VIRTUAL_REGISTER + 1 at the start of each function. 46 After rtl generation, it is 1 plus the largest register number used. */ 47 int x_reg_rtx_no; 48 49 /* Lowest label number in current function. */ 50 int x_first_label_num; 51 52 /* The ends of the doubly-linked chain of rtl for the current function. 53 Both are reset to null at the start of rtl generation for the function. 54 55 start_sequence saves both of these on `sequence_stack' and then starts 56 a new, nested sequence of insns. */ 57 rtx x_first_insn; 58 rtx x_last_insn; 59 60 /* Stack of pending (incomplete) sequences saved by `start_sequence'. 61 Each element describes one pending sequence. 62 The main insn-chain is saved in the last element of the chain, 63 unless the chain is empty. */ 64 struct sequence_stack *sequence_stack; 65 66 /* INSN_UID for next insn emitted. 67 Reset to 1 for each function compiled. */ 68 int x_cur_insn_uid; 69 70 /* Location the last line-number NOTE emitted. 71 This is used to avoid generating duplicates. */ 72 location_t x_last_location; 73 74 /* The length of the regno_pointer_align, regno_decl, and x_regno_reg_rtx 75 vectors. Since these vectors are needed during the expansion phase when 76 the total number of registers in the function is not yet known, the 77 vectors are copied and made bigger when necessary. */ 78 int regno_pointer_align_length; 79 80 /* Indexed by pseudo register number, if nonzero gives the known alignment 81 for that pseudo (if REG_POINTER is set in x_regno_reg_rtx). 82 Allocated in parallel with x_regno_reg_rtx. */ 83 unsigned char * GTY((skip)) regno_pointer_align; 84 }; 85 86 87 /* Indexed by pseudo register number, gives the rtx for that pseudo. 88 Allocated in parallel with regno_pointer_align. 89 FIXME: We could put it into emit_status struct, but gengtype is not able to deal 90 with length attribute nested in top level structures. */ 91 92 extern GTY ((length ("crtl->emit.x_reg_rtx_no"))) rtx * regno_reg_rtx; 93 94 /* For backward compatibility... eventually these should all go away. */ 95 #define reg_rtx_no (crtl->emit.x_reg_rtx_no) 96 #define seq_stack (crtl->emit.sequence_stack) 97 98 #define REGNO_POINTER_ALIGN(REGNO) (crtl->emit.regno_pointer_align[REGNO]) 99 100 struct expr_status GTY(()) 101 { 102 /* Number of units that we should eventually pop off the stack. 103 These are the arguments to function calls that have already returned. */ 104 int x_pending_stack_adjust; 105 106 /* Under some ABIs, it is the caller's responsibility to pop arguments 107 pushed for function calls. A naive implementation would simply pop 108 the arguments immediately after each call. However, if several 109 function calls are made in a row, it is typically cheaper to pop 110 all the arguments after all of the calls are complete since a 111 single pop instruction can be used. Therefore, GCC attempts to 112 defer popping the arguments until absolutely necessary. (For 113 example, at the end of a conditional, the arguments must be popped, 114 since code outside the conditional won't know whether or not the 115 arguments need to be popped.) 116 117 When INHIBIT_DEFER_POP is nonzero, however, the compiler does not 118 attempt to defer pops. Instead, the stack is popped immediately 119 after each call. Rather then setting this variable directly, use 120 NO_DEFER_POP and OK_DEFER_POP. */ 121 int x_inhibit_defer_pop; 122 123 /* If PREFERRED_STACK_BOUNDARY and PUSH_ROUNDING are defined, the stack 124 boundary can be momentarily unaligned while pushing the arguments. 125 Record the delta since last aligned boundary here in order to get 126 stack alignment in the nested function calls working right. */ 127 int x_stack_pointer_delta; 128 129 /* Nonzero means __builtin_saveregs has already been done in this function. 130 The value is the pseudoreg containing the value __builtin_saveregs 131 returned. */ 132 rtx x_saveregs_value; 133 134 /* Similarly for __builtin_apply_args. */ 135 rtx x_apply_args_value; 136 137 /* List of labels that must never be deleted. */ 138 rtx x_forced_labels; 139 }; 140 141 typedef struct call_site_record *call_site_record; 142 DEF_VEC_P(call_site_record); 143 DEF_VEC_ALLOC_P(call_site_record, gc); 144 145 /* RTL representation of exception handling. */ 146 struct rtl_eh GTY(()) 147 { 148 rtx filter; 149 rtx exc_ptr; 150 151 int built_landing_pads; 152 153 rtx ehr_stackadj; 154 rtx ehr_handler; 155 rtx ehr_label; 156 157 rtx sjlj_fc; 158 rtx sjlj_exit_after; 159 160 htab_t GTY ((param_is (struct ehl_map_entry))) exception_handler_label_map; 161 162 VEC(tree,gc) *ttype_data; 163 varray_type ehspec_data; 164 varray_type action_record_data; 165 166 VEC(call_site_record,gc) *call_site_record; 167 }; 168 169 #define pending_stack_adjust (crtl->expr.x_pending_stack_adjust) 170 #define inhibit_defer_pop (crtl->expr.x_inhibit_defer_pop) 171 #define saveregs_value (crtl->expr.x_saveregs_value) 172 #define apply_args_value (crtl->expr.x_apply_args_value) 173 #define forced_labels (crtl->expr.x_forced_labels) 174 #define stack_pointer_delta (crtl->expr.x_stack_pointer_delta) 175 176 struct gimple_df; 177 struct temp_slot; 178 typedef struct temp_slot *temp_slot_p; 179 struct call_site_record; 180 181 DEF_VEC_P(temp_slot_p); 182 DEF_VEC_ALLOC_P(temp_slot_p,gc); 183 struct ipa_opt_pass; 184 typedef struct ipa_opt_pass *ipa_opt_pass; 185 186 DEF_VEC_P(ipa_opt_pass); 187 DEF_VEC_ALLOC_P(ipa_opt_pass,heap); 188 189 enum function_frequency { 190 /* This function most likely won't be executed at all. 191 (set only when profile feedback is available or via function attribute). */ 192 FUNCTION_FREQUENCY_UNLIKELY_EXECUTED, 193 /* The default value. */ 194 FUNCTION_FREQUENCY_NORMAL, 195 /* Optimize this function hard 196 (set only when profile feedback is available or via function attribute). */ 197 FUNCTION_FREQUENCY_HOT 198 }; 199 200 struct varasm_status GTY(()) 201 { 202 /* If we're using a per-function constant pool, this is it. */ 203 struct rtx_constant_pool *pool; 204 205 /* Number of tree-constants deferred during the expansion of this 206 function. */ 207 unsigned int deferred_constants; 208 }; 209 210 /* Information mainlined about RTL representation of incoming arguments. */ 211 struct incoming_args GTY(()) 212 { 213 /* Number of bytes of args popped by function being compiled on its return. 214 Zero if no bytes are to be popped. 215 May affect compilation of return insn or of function epilogue. */ 216 int pops_args; 217 218 /* If function's args have a fixed size, this is that size, in bytes. 219 Otherwise, it is -1. 220 May affect compilation of return insn or of function epilogue. */ 221 int size; 222 223 /* # bytes the prologue should push and pretend that the caller pushed them. 224 The prologue must do this, but only if parms can be passed in 225 registers. */ 226 int pretend_args_size; 227 228 /* This is the offset from the arg pointer to the place where the first 229 anonymous arg can be found, if there is one. */ 230 rtx arg_offset_rtx; 231 232 /* Quantities of various kinds of registers 233 used for the current function's args. */ 234 CUMULATIVE_ARGS info; 235 236 /* The arg pointer hard register, or the pseudo into which it was copied. */ 237 rtx internal_arg_pointer; 238 }; 239 240 /* Data for function partitioning. */ 241 struct function_subsections GTY(()) 242 { 243 /* Assembly labels for the hot and cold text sections, to 244 be used by debugger functions for determining the size of text 245 sections. */ 246 247 const char *hot_section_label; 248 const char *cold_section_label; 249 const char *hot_section_end_label; 250 const char *cold_section_end_label; 251 252 /* String to be used for name of cold text sections, via 253 targetm.asm_out.named_section. */ 254 255 const char *unlikely_text_section_name; 256 }; 257 258 /* Datastructures maintained for currently processed function in RTL form. */ 259 struct rtl_data GTY(()) 260 { 261 struct expr_status expr; 262 struct emit_status emit; 263 struct varasm_status varasm; 264 struct incoming_args args; 265 struct function_subsections subsections; 266 struct rtl_eh eh; 267 268 /* For function.c */ 269 270 /* # of bytes of outgoing arguments. If ACCUMULATE_OUTGOING_ARGS is 271 defined, the needed space is pushed by the prologue. */ 272 int outgoing_args_size; 273 274 /* If nonzero, an RTL expression for the location at which the current 275 function returns its result. If the current function returns its 276 result in a register, current_function_return_rtx will always be 277 the hard register containing the result. */ 278 rtx return_rtx; 279 280 /* Opaque pointer used by get_hard_reg_initial_val and 281 has_hard_reg_initial_val (see integrate.[hc]). */ 282 struct initial_value_struct *hard_reg_initial_vals; 283 284 /* A variable living at the top of the frame that holds a known value. 285 Used for detecting stack clobbers. */ 286 tree stack_protect_guard; 287 288 /* List (chain of EXPR_LIST) of labels heading the current handlers for 289 nonlocal gotos. */ 290 rtx x_nonlocal_goto_handler_labels; 291 292 /* Label that will go on function epilogue. 293 Jumping to this label serves as a "return" instruction 294 on machines which require execution of the epilogue on all returns. */ 295 rtx x_return_label; 296 297 /* Label that will go on the end of function epilogue. 298 Jumping to this label serves as a "naked return" instruction 299 on machines which require execution of the epilogue on all returns. */ 300 rtx x_naked_return_label; 301 302 /* List (chain of EXPR_LISTs) of all stack slots in this function. 303 Made for the sake of unshare_all_rtl. */ 304 rtx x_stack_slot_list; 305 306 /* Place after which to insert the tail_recursion_label if we need one. */ 307 rtx x_stack_check_probe_note; 308 309 /* Location at which to save the argument pointer if it will need to be 310 referenced. There are two cases where this is done: if nonlocal gotos 311 exist, or if vars stored at an offset from the argument pointer will be 312 needed by inner routines. */ 313 rtx x_arg_pointer_save_area; 314 315 /* Dynamic Realign Argument Pointer used for realigning stack. */ 316 rtx drap_reg; 317 318 /* Offset to end of allocated area of stack frame. 319 If stack grows down, this is the address of the last stack slot allocated. 320 If stack grows up, this is the address for the next slot. */ 321 HOST_WIDE_INT x_frame_offset; 322 323 /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */ 324 rtx x_parm_birth_insn; 325 326 /* List of all used temporaries allocated, by level. */ 327 VEC(temp_slot_p,gc) *x_used_temp_slots; 328 329 /* List of available temp slots. */ 330 struct temp_slot *x_avail_temp_slots; 331 332 /* Current nesting level for temporaries. */ 333 int x_temp_slot_level; 334 335 /* The largest alignment needed on the stack, including requirement 336 for outgoing stack alignment. */ 337 unsigned int stack_alignment_needed; 338 339 /* Preferred alignment of the end of stack frame, which is preferred 340 to call other functions. */ 341 unsigned int preferred_stack_boundary; 342 343 /* The minimum alignment of parameter stack. */ 344 unsigned int parm_stack_boundary; 345 346 /* The largest alignment of slot allocated on the stack. */ 347 unsigned int max_used_stack_slot_alignment; 348 349 /* The stack alignment estimated before reload, with consideration of 350 following factors: 351 1. Alignment of local stack variables (max_used_stack_slot_alignment) 352 2. Alignment requirement to call other functions 353 (preferred_stack_boundary) 354 3. Alignment of non-local stack variables but might be spilled in 355 local stack. */ 356 unsigned int stack_alignment_estimated; 357 358 /* For reorg. */ 359 360 /* If some insns can be deferred to the delay slots of the epilogue, the 361 delay list for them is recorded here. */ 362 rtx epilogue_delay_list; 363 364 /* Nonzero if function being compiled called builtin_return_addr or 365 builtin_frame_address with nonzero count. */ 366 bool accesses_prior_frames; 367 368 /* Nonzero if the function calls __builtin_eh_return. */ 369 bool calls_eh_return; 370 371 /* Nonzero if function saves all registers, e.g. if it has a nonlocal 372 label that can reach the exit block via non-exceptional paths. */ 373 bool saves_all_registers; 374 375 /* Nonzero if function being compiled has nonlocal gotos to parent 376 function. */ 377 bool has_nonlocal_goto; 378 379 /* Nonzero if function being compiled has an asm statement. */ 380 bool has_asm_statement; 381 382 /* This bit is used by the exception handling logic. It is set if all 383 calls (if any) are sibling calls. Such functions do not have to 384 have EH tables generated, as they cannot throw. A call to such a 385 function, however, should be treated as throwing if any of its callees 386 can throw. */ 387 bool all_throwers_are_sibcalls; 388 389 /* Nonzero if stack limit checking should be enabled in the current 390 function. */ 391 bool limit_stack; 392 393 /* Nonzero if profiling code should be generated. */ 394 bool profile; 395 396 /* Nonzero if the current function uses the constant pool. */ 397 bool uses_const_pool; 398 399 /* Nonzero if the current function uses pic_offset_table_rtx. */ 400 bool uses_pic_offset_table; 401 402 /* Nonzero if the current function needs an lsda for exception handling. */ 403 bool uses_eh_lsda; 404 405 /* Set when the tail call has been produced. */ 406 bool tail_call_emit; 407 408 /* Nonzero if code to initialize arg_pointer_save_area has been emitted. */ 409 bool arg_pointer_save_area_init; 410 411 /* Nonzero if current function must be given a frame pointer. 412 Set in global.c if anything is allocated on the stack there. */ 413 bool frame_pointer_needed; 414 415 /* When set, expand should optimize for speed. */ 416 bool maybe_hot_insn_p; 417 418 /* Nonzero if function stack realignment is needed. This flag may be 419 set twice: before and after reload. It is set before reload wrt 420 stack alignment estimation before reload. It will be changed after 421 reload if by then criteria of stack realignment is different. 422 The value set after reload is the accurate one and is finalized. */ 423 bool stack_realign_needed; 424 425 /* Nonzero if function stack realignment is tried. This flag is set 426 only once before reload. It affects register elimination. This 427 is used to generate DWARF debug info for stack variables. */ 428 bool stack_realign_tried; 429 430 /* Nonzero if function being compiled needs dynamic realigned 431 argument pointer (drap) if stack needs realigning. */ 432 bool need_drap; 433 434 /* Nonzero if function stack realignment estimation is done, namely 435 stack_realign_needed flag has been set before reload wrt estimated 436 stack alignment info. */ 437 bool stack_realign_processed; 438 439 /* Nonzero if function stack realignment has been finalized, namely 440 stack_realign_needed flag has been set and finalized after reload. */ 441 bool stack_realign_finalized; 442 443 /* True if dbr_schedule has already been called for this function. */ 444 bool dbr_scheduled_p; 445 }; 446 447 #define return_label (crtl->x_return_label) 448 #define naked_return_label (crtl->x_naked_return_label) 449 #define stack_slot_list (crtl->x_stack_slot_list) 450 #define parm_birth_insn (crtl->x_parm_birth_insn) 451 #define frame_offset (crtl->x_frame_offset) 452 #define stack_check_probe_note (crtl->x_stack_check_probe_note) 453 #define arg_pointer_save_area (crtl->x_arg_pointer_save_area) 454 #define used_temp_slots (crtl->x_used_temp_slots) 455 #define avail_temp_slots (crtl->x_avail_temp_slots) 456 #define temp_slot_level (crtl->x_temp_slot_level) 457 #define nonlocal_goto_handler_labels (crtl->x_nonlocal_goto_handler_labels) 458 #define frame_pointer_needed (crtl->frame_pointer_needed) 459 #define stack_realign_fp (crtl->stack_realign_needed && !crtl->need_drap) 460 #define stack_realign_drap (crtl->stack_realign_needed && crtl->need_drap) 461 462 extern GTY(()) struct rtl_data x_rtl; 463 464 /* Accessor to RTL datastructures. We keep them statically allocated now since 465 we never keep multiple functions. For threaded compiler we might however 466 want to do differently. */ 467 #define crtl (&x_rtl) 468 469 /* This structure can save all the important global and static variables 470 describing the status of the current function. */ 471 472 struct function GTY(()) 473 { 474 struct eh_status *eh; 475 476 /* The control flow graph for this function. */ 477 struct control_flow_graph *cfg; 478 479 /* GIMPLE body for this function. */ 480 struct gimple_seq_d *gimple_body; 481 482 /* SSA and dataflow information. */ 483 struct gimple_df *gimple_df; 484 485 /* The loops in this function. */ 486 struct loops *x_current_loops; 487 488 /* Value histograms attached to particular statements. */ 489 htab_t GTY((skip)) value_histograms; 490 491 /* For function.c. */ 492 493 /* Points to the FUNCTION_DECL of this function. */ 494 tree decl; 495 496 /* A PARM_DECL that should contain the static chain for this function. 497 It will be initialized at the beginning of the function. */ 498 tree static_chain_decl; 499 500 /* An expression that contains the non-local goto save area. The first 501 word is the saved frame pointer and the second is the saved stack 502 pointer. */ 503 tree nonlocal_goto_save_area; 504 505 /* Function's module id. */ 506 unsigned module_id; 507 508 /* Function sequence number for profiling, debugging, etc. */ 509 int funcdef_no; 510 511 /* List of function local variables, functions, types and constants. */ 512 tree local_decls; 513 514 /* For md files. */ 515 516 /* tm.h can use this to store whatever it likes. */ 517 struct machine_function * GTY ((maybe_undef)) machine; 518 519 /* Language-specific code can use this to store whatever it likes. */ 520 struct language_function * language; 521 522 /* Used types hash table. */ 523 htab_t GTY ((param_is (union tree_node))) used_types_hash; 524 525 /* Last statement uid. */ 526 int last_stmt_uid; 527 528 /* Line number of the start of the function for debugging purposes. */ 529 location_t function_start_locus; 530 531 /* Line number of the end of the function. */ 532 location_t function_end_locus; 533 534 /* Properties used by the pass manager. */ 535 unsigned int curr_properties; 536 unsigned int last_verified; 537 /* Interprocedural passes scheduled to have their transform functions 538 applied next time we execute local pass on them. We maintain it 539 per-function in order to allow IPA passes to introduce new functions. */ 540 VEC(ipa_opt_pass,heap) * GTY((skip)) ipa_transforms_to_apply; 541 542 /* Collected bit flags. */ 543 544 /* Number of units of general registers that need saving in stdarg 545 function. What unit is depends on the backend, either it is number 546 of bytes, or it can be number of registers. */ 547 unsigned int va_list_gpr_size : 8; 548 549 /* Number of units of floating point registers that need saving in stdarg 550 function. */ 551 unsigned int va_list_fpr_size : 8; 552 553 554 /* How commonly executed the function is. Initialized during branch 555 probabilities pass. */ 556 ENUM_BITFIELD (function_frequency) function_frequency : 2; 557 558 /* Nonzero if function being compiled can call setjmp. */ 559 unsigned int calls_setjmp : 1; 560 561 /* Nonzero if function being compiled can call alloca, 562 either as a subroutine or builtin. */ 563 unsigned int calls_alloca : 1; 564 565 /* Nonzero if function being compiled receives nonlocal gotos 566 from nested functions. */ 567 unsigned int has_nonlocal_label : 1; 568 569 /* Nonzero if current function uses stdarg.h or equivalent. */ 570 unsigned int stdarg : 1; 571 572 /* Nonzero if the back-end should not keep track of expressions that 573 determine the size of variable-sized objects. Normally, such 574 expressions are saved away, and then expanded when the next 575 function is started. For example, if a parameter has a 576 variable-sized type, then the size of the parameter is computed 577 when the function body is entered. However, some front-ends do 578 not desire this behavior. */ 579 unsigned int dont_save_pending_sizes_p : 1; 580 581 unsigned int after_inlining : 1; 582 unsigned int always_inline_functions_inlined : 1; 583 584 /* Fields below this point are not set for abstract functions; see 585 allocate_struct_function. */ 586 587 /* Nonzero if function being compiled needs to be given an address 588 where the value should be stored. */ 589 unsigned int returns_struct : 1; 590 591 /* Nonzero if function being compiled needs to 592 return the address of where it has put a structure value. */ 593 unsigned int returns_pcc_struct : 1; 594 595 /* Nonzero if pass_tree_profile was run on this function. */ 596 unsigned int after_tree_profile : 1; 597 598 /* Nonzero if this function has local DECL_HARD_REGISTER variables. 599 In this case code motion has to be done more carefully. */ 600 unsigned int has_local_explicit_reg_vars : 1; 601 602 /* Nonzero if the current function is a thunk, i.e., a lightweight 603 function implemented by the output_mi_thunk hook) that just 604 adjusts one of its arguments and forwards to another 605 function. */ 606 unsigned int is_thunk : 1; 607 }; 608 609 /* The bit width of function id in the global function id used 610 in LIPO. */ 611 #define FUNC_ID_WIDTH HOST_BITS_PER_WIDEST_INT / 2 612 /* The mask to extract function id from the global function id. */ 613 #define FUNC_ID_MASK ((1ll << FUNC_ID_WIDTH) - 1) 614 /* Macro to extract module id from global function id GID. */ 615 #define EXTRACT_MODULE_ID_FROM_GLOBAL_ID(gid) (unsigned)(((gid) >>\ 616 FUNC_ID_WIDTH) & FUNC_ID_MASK) 617 /* Macro to extract function id from global function id GID. */ 618 #define EXTRACT_FUNC_ID_FROM_GLOBAL_ID(gid) (unsigned)((gid) & FUNC_ID_MASK) 619 /* Macro to generate a global function id from module id M and 620 function id F. */ 621 #define GEN_FUNC_GLOBAL_ID(m,f) ((((HOST_WIDEST_INT) (m)) << FUNC_ID_WIDTH)\ 622 | (f)) 623 /* Access macro for module_id field of function FUNC. */ 624 #define FUNC_DECL_MODULE_ID(func) ((func)->module_id) 625 /* Access macro for funcdef_no field of function FUNC. */ 626 #define FUNC_DECL_FUNC_ID(func) ((func)->funcdef_no + 1) 627 /* Macro to compute global function id for FUNC. */ 628 #define FUNC_DECL_GLOBAL_ID(func) \ 629 GEN_FUNC_GLOBAL_ID (FUNC_DECL_MODULE_ID (func), FUNC_DECL_FUNC_ID (func)) 630 /* 32 bit wide unique id used for asm label (limit: 30k modules, 631 128k funcs per module. */ 632 #define FUNC_LABEL_ID(func) ((FUNC_DECL_MODULE_ID (func) << 17) +\ 633 (func)->funcdef_no) 634 635 /* If va_list_[gf]pr_size is set to this, it means we don't know how 636 many units need to be saved. */ 637 #define VA_LIST_MAX_GPR_SIZE 255 638 #define VA_LIST_MAX_FPR_SIZE 255 639 640 /* The function currently being compiled. */ 641 extern GTY(()) struct function *cfun; 642 643 /* In order to ensure that cfun is not set directly, we redefine it so 644 that it is not an lvalue. Rather than assign to cfun, use 645 push_cfun or set_cfun. */ 646 #define cfun (cfun + 0) 647 648 /* Nonzero if we've already converted virtual regs to hard regs. */ 649 extern int virtuals_instantiated; 650 651 /* Nonzero if at least one trampoline has been created. */ 652 extern int trampolines_created; 653 654 /* cfun shouldn't be set directly; use one of these functions instead. */ 655 extern void set_cfun (struct function *new_cfun); 656 extern void push_cfun (struct function *new_cfun); 657 extern void pop_cfun (void); 658 extern void instantiate_decl_rtl (rtx x); 659 660 /* For backward compatibility... eventually these should all go away. */ 661 #define current_function_funcdef_no (cfun->funcdef_no) 662 663 #define current_loops (cfun->x_current_loops) 664 #define dom_computed (cfun->cfg->x_dom_computed) 665 #define n_bbs_in_dom_tree (cfun->cfg->x_n_bbs_in_dom_tree) 666 #define VALUE_HISTOGRAMS(fun) (fun)->value_histograms 667 668 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END}, 669 and create duplicate blocks. */ 670 extern void reorder_blocks (void); 671 672 /* Set BLOCK_NUMBER for all the blocks in FN. */ 673 extern void number_blocks (tree); 674 675 extern void clear_block_marks (tree); 676 extern tree blocks_nreverse (tree); 677 678 /* Return size needed for stack frame based on slots so far allocated. 679 This size counts from zero. It is not rounded to STACK_BOUNDARY; 680 the caller may have to do that. */ 681 extern HOST_WIDE_INT get_frame_size (void); 682 683 /* Issue an error message and return TRUE if frame OFFSET overflows in 684 the signed target pointer arithmetics for function FUNC. Otherwise 685 return FALSE. */ 686 extern bool frame_offset_overflow (HOST_WIDE_INT, tree); 687 688 /* A pointer to a function to create target specific, per-function 689 data structures. */ 690 extern struct machine_function * (*init_machine_status) (void); 691 692 /* Save and restore status information for a nested function. */ 693 extern void free_after_parsing (struct function *); 694 extern void free_after_compilation (struct function *); 695 696 extern void init_varasm_status (void); 697 698 #ifdef RTX_CODE 699 extern void diddle_return_value (void (*)(rtx, void*), void*); 700 extern void clobber_return_register (void); 701 #endif 702 703 extern rtx get_arg_pointer_save_area (void); 704 705 /* Returns the name of the current function. */ 706 extern const char *current_function_name (void); 707 /* Returns the assembler name (raw, mangled) of the current function. */ 708 extern const char *current_function_assembler_name (void); 709 710 extern void do_warn_unused_parameter (tree); 711 712 extern bool pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode, 713 tree, bool); 714 extern bool reference_callee_copied (CUMULATIVE_ARGS *, enum machine_mode, 715 tree, bool); 716 717 extern void used_types_insert (tree); 718 719 extern int get_next_funcdef_no (void); 720 extern int get_current_funcdef_no (void); 721 722 extern void reset_funcdef_no (void); 723 extern void set_funcdef_no (int); 724 #endif /* GCC_FUNCTION_H */ 725