| 1 | /* Expands front end tree to back end RTL for GNU C-Compiler |
| 2 | Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc. |
| 3 | |
| 4 | This file is part of GNU CC. |
| 5 | |
| 6 | GNU CC is free software; you can redistribute it and/or modify |
| 7 | it under the terms of the GNU General Public License as published by |
| 8 | the Free Software Foundation; either version 2, or (at your option) |
| 9 | any later version. |
| 10 | |
| 11 | GNU CC is distributed in the hope that it will be useful, |
| 12 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
| 13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
| 14 | GNU General Public License for more details. |
| 15 | |
| 16 | You should have received a copy of the GNU General Public License |
| 17 | along with GNU CC; see the file COPYING. If not, write to |
| 18 | the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */ |
| 19 | |
| 20 | |
| 21 | /* This file handles the generation of rtl code from tree structure |
| 22 | above the level of expressions, using subroutines in exp*.c and emit-rtl.c. |
| 23 | It also creates the rtl expressions for parameters and auto variables |
| 24 | and has full responsibility for allocating stack slots. |
| 25 | |
| 26 | The functions whose names start with `expand_' are called by the |
| 27 | parser to generate RTL instructions for various kinds of constructs. |
| 28 | |
| 29 | Some control and binding constructs require calling several such |
| 30 | functions at different times. For example, a simple if-then |
| 31 | is expanded by calling `expand_start_cond' (with the condition-expression |
| 32 | as argument) before parsing the then-clause and calling `expand_end_cond' |
| 33 | after parsing the then-clause. */ |
| 34 | |
| 35 | #include "config.h" |
| 36 | |
| 37 | #include <stdio.h> |
| 38 | #include <ctype.h> |
| 39 | |
| 40 | #include "rtl.h" |
| 41 | #include "tree.h" |
| 42 | #include "flags.h" |
| 43 | #include "function.h" |
| 44 | #include "insn-flags.h" |
| 45 | #include "insn-config.h" |
| 46 | #include "insn-codes.h" |
| 47 | #include "expr.h" |
| 48 | #include "hard-reg-set.h" |
| 49 | #include "obstack.h" |
| 50 | #include "loop.h" |
| 51 | #include "recog.h" |
| 52 | |
| 53 | #define obstack_chunk_alloc xmalloc |
| 54 | #define obstack_chunk_free free |
| 55 | struct obstack stmt_obstack; |
| 56 | |
| 57 | /* Filename and line number of last line-number note, |
| 58 | whether we actually emitted it or not. */ |
| 59 | char *emit_filename; |
| 60 | int emit_lineno; |
| 61 | |
| 62 | /* Nonzero if within a ({...}) grouping, in which case we must |
| 63 | always compute a value for each expr-stmt in case it is the last one. */ |
| 64 | |
| 65 | int expr_stmts_for_value; |
| 66 | |
| 67 | /* Each time we expand an expression-statement, |
| 68 | record the expr's type and its RTL value here. */ |
| 69 | |
| 70 | static tree last_expr_type; |
| 71 | static rtx last_expr_value; |
| 72 | |
| 73 | /* Each time we expand the end of a binding contour (in `expand_end_bindings') |
| 74 | and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here. |
| 75 | This is used by the `remember_end_note' function to record the endpoint |
| 76 | of each generated block in its associated BLOCK node. */ |
| 77 | |
| 78 | static rtx last_block_end_note; |
| 79 | |
| 80 | /* Number of binding contours started so far in this function. */ |
| 81 | |
| 82 | int block_start_count; |
| 83 | |
| 84 | /* Nonzero if function being compiled needs to |
| 85 | return the address of where it has put a structure value. */ |
| 86 | |
| 87 | extern int current_function_returns_pcc_struct; |
| 88 | |
| 89 | /* Label that will go on parm cleanup code, if any. |
| 90 | Jumping to this label runs cleanup code for parameters, if |
| 91 | such code must be run. Following this code is the logical return label. */ |
| 92 | |
| 93 | extern rtx cleanup_label; |
| 94 | |
| 95 | /* Label that will go on function epilogue. |
| 96 | Jumping to this label serves as a "return" instruction |
| 97 | on machines which require execution of the epilogue on all returns. */ |
| 98 | |
| 99 | extern rtx return_label; |
| 100 | |
| 101 | /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs. |
| 102 | So we can mark them all live at the end of the function, if nonopt. */ |
| 103 | extern rtx save_expr_regs; |
| 104 | |
| 105 | /* Offset to end of allocated area of stack frame. |
| 106 | If stack grows down, this is the address of the last stack slot allocated. |
| 107 | If stack grows up, this is the address for the next slot. */ |
| 108 | extern int frame_offset; |
| 109 | |
| 110 | /* Label to jump back to for tail recursion, or 0 if we have |
| 111 | not yet needed one for this function. */ |
| 112 | extern rtx tail_recursion_label; |
| 113 | |
| 114 | /* Place after which to insert the tail_recursion_label if we need one. */ |
| 115 | extern rtx tail_recursion_reentry; |
| 116 | |
| 117 | /* Location at which to save the argument pointer if it will need to be |
| 118 | referenced. There are two cases where this is done: if nonlocal gotos |
| 119 | exist, or if vars whose is an offset from the argument pointer will be |
| 120 | needed by inner routines. */ |
| 121 | |
| 122 | extern rtx arg_pointer_save_area; |
| 123 | |
| 124 | /* Chain of all RTL_EXPRs that have insns in them. */ |
| 125 | extern tree rtl_expr_chain; |
| 126 | |
| 127 | #if 0 /* Turned off because 0 seems to work just as well. */ |
| 128 | /* Cleanup lists are required for binding levels regardless of whether |
| 129 | that binding level has cleanups or not. This node serves as the |
| 130 | cleanup list whenever an empty list is required. */ |
| 131 | static tree empty_cleanup_list; |
| 132 | #endif |
| 133 | \f |
| 134 | /* Functions and data structures for expanding case statements. */ |
| 135 | |
| 136 | /* Case label structure, used to hold info on labels within case |
| 137 | statements. We handle "range" labels; for a single-value label |
| 138 | as in C, the high and low limits are the same. |
| 139 | |
| 140 | A chain of case nodes is initially maintained via the RIGHT fields |
| 141 | in the nodes. Nodes with higher case values are later in the list. |
| 142 | |
| 143 | Switch statements can be output in one of two forms. A branch table |
| 144 | is used if there are more than a few labels and the labels are dense |
| 145 | within the range between the smallest and largest case value. If a |
| 146 | branch table is used, no further manipulations are done with the case |
| 147 | node chain. |
| 148 | |
| 149 | The alternative to the use of a branch table is to generate a series |
| 150 | of compare and jump insns. When that is done, we use the LEFT, RIGHT, |
| 151 | and PARENT fields to hold a binary tree. Initially the tree is |
| 152 | totally unbalanced, with everything on the right. We balance the tree |
| 153 | with nodes on the left having lower case values than the parent |
| 154 | and nodes on the right having higher values. We then output the tree |
| 155 | in order. */ |
| 156 | |
| 157 | struct case_node |
| 158 | { |
| 159 | struct case_node *left; /* Left son in binary tree */ |
| 160 | struct case_node *right; /* Right son in binary tree; also node chain */ |
| 161 | struct case_node *parent; /* Parent of node in binary tree */ |
| 162 | tree low; /* Lowest index value for this label */ |
| 163 | tree high; /* Highest index value for this label */ |
| 164 | tree code_label; /* Label to jump to when node matches */ |
| 165 | }; |
| 166 | |
| 167 | typedef struct case_node case_node; |
| 168 | typedef struct case_node *case_node_ptr; |
| 169 | |
| 170 | /* These are used by estimate_case_costs and balance_case_nodes. */ |
| 171 | |
| 172 | /* This must be a signed type, and non-ANSI compilers lack signed char. */ |
| 173 | static short *cost_table; |
| 174 | static int use_cost_table; |
| 175 | |
| 176 | static int estimate_case_costs (); |
| 177 | static void balance_case_nodes (); |
| 178 | static void emit_case_nodes (); |
| 179 | static void group_case_nodes (); |
| 180 | static void emit_jump_if_reachable (); |
| 181 | |
| 182 | static int warn_if_unused_value (); |
| 183 | static void expand_goto_internal (); |
| 184 | static int expand_fixup (); |
| 185 | void fixup_gotos (); |
| 186 | void free_temp_slots (); |
| 187 | static void expand_cleanups (); |
| 188 | static void expand_null_return_1 (); |
| 189 | static int tail_recursion_args (); |
| 190 | static void do_jump_if_equal (); |
| 191 | \f |
| 192 | /* Stack of control and binding constructs we are currently inside. |
| 193 | |
| 194 | These constructs begin when you call `expand_start_WHATEVER' |
| 195 | and end when you call `expand_end_WHATEVER'. This stack records |
| 196 | info about how the construct began that tells the end-function |
| 197 | what to do. It also may provide information about the construct |
| 198 | to alter the behavior of other constructs within the body. |
| 199 | For example, they may affect the behavior of C `break' and `continue'. |
| 200 | |
| 201 | Each construct gets one `struct nesting' object. |
| 202 | All of these objects are chained through the `all' field. |
| 203 | `nesting_stack' points to the first object (innermost construct). |
| 204 | The position of an entry on `nesting_stack' is in its `depth' field. |
| 205 | |
| 206 | Each type of construct has its own individual stack. |
| 207 | For example, loops have `loop_stack'. Each object points to the |
| 208 | next object of the same type through the `next' field. |
| 209 | |
| 210 | Some constructs are visible to `break' exit-statements and others |
| 211 | are not. Which constructs are visible depends on the language. |
| 212 | Therefore, the data structure allows each construct to be visible |
| 213 | or not, according to the args given when the construct is started. |
| 214 | The construct is visible if the `exit_label' field is non-null. |
| 215 | In that case, the value should be a CODE_LABEL rtx. */ |
| 216 | |
| 217 | struct nesting |
| 218 | { |
| 219 | struct nesting *all; |
| 220 | struct nesting *next; |
| 221 | int depth; |
| 222 | rtx exit_label; |
| 223 | union |
| 224 | { |
| 225 | /* For conds (if-then and if-then-else statements). */ |
| 226 | struct |
| 227 | { |
| 228 | /* Label for the end of the if construct. |
| 229 | There is none if EXITFLAG was not set |
| 230 | and no `else' has been seen yet. */ |
| 231 | rtx endif_label; |
| 232 | /* Label for the end of this alternative. |
| 233 | This may be the end of the if or the next else/elseif. */ |
| 234 | rtx next_label; |
| 235 | } cond; |
| 236 | /* For loops. */ |
| 237 | struct |
| 238 | { |
| 239 | /* Label at the top of the loop; place to loop back to. */ |
| 240 | rtx start_label; |
| 241 | /* Label at the end of the whole construct. */ |
| 242 | rtx end_label; |
| 243 | /* Label for `continue' statement to jump to; |
| 244 | this is in front of the stepper of the loop. */ |
| 245 | rtx continue_label; |
| 246 | } loop; |
| 247 | /* For variable binding contours. */ |
| 248 | struct |
| 249 | { |
| 250 | /* Sequence number of this binding contour within the function, |
| 251 | in order of entry. */ |
| 252 | int block_start_count; |
| 253 | /* Nonzero => value to restore stack to on exit. */ |
| 254 | rtx stack_level; |
| 255 | /* The NOTE that starts this contour. |
| 256 | Used by expand_goto to check whether the destination |
| 257 | is within each contour or not. */ |
| 258 | rtx first_insn; |
| 259 | /* Innermost containing binding contour that has a stack level. */ |
| 260 | struct nesting *innermost_stack_block; |
| 261 | /* List of cleanups to be run on exit from this contour. |
| 262 | This is a list of expressions to be evaluated. |
| 263 | The TREE_PURPOSE of each link is the ..._DECL node |
| 264 | which the cleanup pertains to. */ |
| 265 | tree cleanups; |
| 266 | /* List of cleanup-lists of blocks containing this block, |
| 267 | as they were at the locus where this block appears. |
| 268 | There is an element for each containing block, |
| 269 | ordered innermost containing block first. |
| 270 | The tail of this list can be 0 (was empty_cleanup_list), |
| 271 | if all remaining elements would be empty lists. |
| 272 | The element's TREE_VALUE is the cleanup-list of that block, |
| 273 | which may be null. */ |
| 274 | tree outer_cleanups; |
| 275 | /* Chain of labels defined inside this binding contour. |
| 276 | For contours that have stack levels or cleanups. */ |
| 277 | struct label_chain *label_chain; |
| 278 | /* Number of function calls seen, as of start of this block. */ |
| 279 | int function_call_count; |
| 280 | } block; |
| 281 | /* For switch (C) or case (Pascal) statements, |
| 282 | and also for dummies (see `expand_start_case_dummy'). */ |
| 283 | struct |
| 284 | { |
| 285 | /* The insn after which the case dispatch should finally |
| 286 | be emitted. Zero for a dummy. */ |
| 287 | rtx start; |
| 288 | /* A list of case labels, kept in ascending order by value |
| 289 | as the list is built. |
| 290 | During expand_end_case, this list may be rearranged into a |
| 291 | nearly balanced binary tree. */ |
| 292 | struct case_node *case_list; |
| 293 | /* Label to jump to if no case matches. */ |
| 294 | tree default_label; |
| 295 | /* The expression to be dispatched on. */ |
| 296 | tree index_expr; |
| 297 | /* Type that INDEX_EXPR should be converted to. */ |
| 298 | tree nominal_type; |
| 299 | /* Number of range exprs in case statement. */ |
| 300 | int num_ranges; |
| 301 | /* Name of this kind of statement, for warnings. */ |
| 302 | char *printname; |
| 303 | /* Nonzero if a case label has been seen in this case stmt. */ |
| 304 | char seenlabel; |
| 305 | } case_stmt; |
| 306 | /* For exception contours. */ |
| 307 | struct |
| 308 | { |
| 309 | /* List of exceptions raised. This is a TREE_LIST |
| 310 | of whatever you want. */ |
| 311 | tree raised; |
| 312 | /* List of exceptions caught. This is also a TREE_LIST |
| 313 | of whatever you want. As a special case, it has the |
| 314 | value `void_type_node' if it handles default exceptions. */ |
| 315 | tree handled; |
| 316 | |
| 317 | /* First insn of TRY block, in case resumptive model is needed. */ |
| 318 | rtx first_insn; |
| 319 | /* Label for the catch clauses. */ |
| 320 | rtx except_label; |
| 321 | /* Label for unhandled exceptions. */ |
| 322 | rtx unhandled_label; |
| 323 | /* Label at the end of whole construct. */ |
| 324 | rtx after_label; |
| 325 | /* Label which "escapes" the exception construct. |
| 326 | Like EXIT_LABEL for BREAK construct, but for exceptions. */ |
| 327 | rtx escape_label; |
| 328 | } except_stmt; |
| 329 | } data; |
| 330 | }; |
| 331 | |
| 332 | /* Chain of all pending binding contours. */ |
| 333 | struct nesting *block_stack; |
| 334 | |
| 335 | /* If any new stacks are added here, add them to POPSTACKS too. */ |
| 336 | |
| 337 | /* Chain of all pending binding contours that restore stack levels |
| 338 | or have cleanups. */ |
| 339 | struct nesting *stack_block_stack; |
| 340 | |
| 341 | /* Chain of all pending conditional statements. */ |
| 342 | struct nesting *cond_stack; |
| 343 | |
| 344 | /* Chain of all pending loops. */ |
| 345 | struct nesting *loop_stack; |
| 346 | |
| 347 | /* Chain of all pending case or switch statements. */ |
| 348 | struct nesting *case_stack; |
| 349 | |
| 350 | /* Chain of all pending exception contours. */ |
| 351 | struct nesting *except_stack; |
| 352 | |
| 353 | /* Separate chain including all of the above, |
| 354 | chained through the `all' field. */ |
| 355 | struct nesting *nesting_stack; |
| 356 | |
| 357 | /* Number of entries on nesting_stack now. */ |
| 358 | int nesting_depth; |
| 359 | |
| 360 | /* Allocate and return a new `struct nesting'. */ |
| 361 | |
| 362 | #define ALLOC_NESTING() \ |
| 363 | (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting)) |
| 364 | |
| 365 | /* Pop the nesting stack element by element until we pop off |
| 366 | the element which is at the top of STACK. |
| 367 | Update all the other stacks, popping off elements from them |
| 368 | as we pop them from nesting_stack. */ |
| 369 | |
| 370 | #define POPSTACK(STACK) \ |
| 371 | do { struct nesting *target = STACK; \ |
| 372 | struct nesting *this; \ |
| 373 | do { this = nesting_stack; \ |
| 374 | if (loop_stack == this) \ |
| 375 | loop_stack = loop_stack->next; \ |
| 376 | if (cond_stack == this) \ |
| 377 | cond_stack = cond_stack->next; \ |
| 378 | if (block_stack == this) \ |
| 379 | block_stack = block_stack->next; \ |
| 380 | if (stack_block_stack == this) \ |
| 381 | stack_block_stack = stack_block_stack->next; \ |
| 382 | if (case_stack == this) \ |
| 383 | case_stack = case_stack->next; \ |
| 384 | if (except_stack == this) \ |
| 385 | except_stack = except_stack->next; \ |
| 386 | nesting_depth = nesting_stack->depth - 1; \ |
| 387 | nesting_stack = this->all; \ |
| 388 | obstack_free (&stmt_obstack, this); } \ |
| 389 | while (this != target); } while (0) |
| 390 | \f |
| 391 | /* In some cases it is impossible to generate code for a forward goto |
| 392 | until the label definition is seen. This happens when it may be necessary |
| 393 | for the goto to reset the stack pointer: we don't yet know how to do that. |
| 394 | So expand_goto puts an entry on this fixup list. |
| 395 | Each time a binding contour that resets the stack is exited, |
| 396 | we check each fixup. |
| 397 | If the target label has now been defined, we can insert the proper code. */ |
| 398 | |
| 399 | struct goto_fixup |
| 400 | { |
| 401 | /* Points to following fixup. */ |
| 402 | struct goto_fixup *next; |
| 403 | /* Points to the insn before the jump insn. |
| 404 | If more code must be inserted, it goes after this insn. */ |
| 405 | rtx before_jump; |
| 406 | /* The LABEL_DECL that this jump is jumping to, or 0 |
| 407 | for break, continue or return. */ |
| 408 | tree target; |
| 409 | /* The BLOCK for the place where this goto was found. */ |
| 410 | tree context; |
| 411 | /* The CODE_LABEL rtx that this is jumping to. */ |
| 412 | rtx target_rtl; |
| 413 | /* Number of binding contours started in current function |
| 414 | before the label reference. */ |
| 415 | int block_start_count; |
| 416 | /* The outermost stack level that should be restored for this jump. |
| 417 | Each time a binding contour that resets the stack is exited, |
| 418 | if the target label is *not* yet defined, this slot is updated. */ |
| 419 | rtx stack_level; |
| 420 | /* List of lists of cleanup expressions to be run by this goto. |
| 421 | There is one element for each block that this goto is within. |
| 422 | The tail of this list can be 0 (was empty_cleanup_list), |
| 423 | if all remaining elements would be empty. |
| 424 | The TREE_VALUE contains the cleanup list of that block as of the |
| 425 | time this goto was seen. |
| 426 | The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */ |
| 427 | tree cleanup_list_list; |
| 428 | }; |
| 429 | |
| 430 | static struct goto_fixup *goto_fixup_chain; |
| 431 | |
| 432 | /* Within any binding contour that must restore a stack level, |
| 433 | all labels are recorded with a chain of these structures. */ |
| 434 | |
| 435 | struct label_chain |
| 436 | { |
| 437 | /* Points to following fixup. */ |
| 438 | struct label_chain *next; |
| 439 | tree label; |
| 440 | }; |
| 441 | \f |
| 442 | void |
| 443 | init_stmt () |
| 444 | { |
| 445 | gcc_obstack_init (&stmt_obstack); |
| 446 | #if 0 |
| 447 | empty_cleanup_list = build_tree_list (NULL_TREE, NULL_TREE); |
| 448 | #endif |
| 449 | } |
| 450 | |
| 451 | void |
| 452 | init_stmt_for_function () |
| 453 | { |
| 454 | /* We are not currently within any block, conditional, loop or case. */ |
| 455 | block_stack = 0; |
| 456 | loop_stack = 0; |
| 457 | case_stack = 0; |
| 458 | cond_stack = 0; |
| 459 | nesting_stack = 0; |
| 460 | nesting_depth = 0; |
| 461 | |
| 462 | block_start_count = 0; |
| 463 | |
| 464 | /* No gotos have been expanded yet. */ |
| 465 | goto_fixup_chain = 0; |
| 466 | |
| 467 | /* We are not processing a ({...}) grouping. */ |
| 468 | expr_stmts_for_value = 0; |
| 469 | last_expr_type = 0; |
| 470 | } |
| 471 | |
| 472 | void |
| 473 | save_stmt_status (p) |
| 474 | struct function *p; |
| 475 | { |
| 476 | p->block_stack = block_stack; |
| 477 | p->stack_block_stack = stack_block_stack; |
| 478 | p->cond_stack = cond_stack; |
| 479 | p->loop_stack = loop_stack; |
| 480 | p->case_stack = case_stack; |
| 481 | p->nesting_stack = nesting_stack; |
| 482 | p->nesting_depth = nesting_depth; |
| 483 | p->block_start_count = block_start_count; |
| 484 | p->last_expr_type = last_expr_type; |
| 485 | p->last_expr_value = last_expr_value; |
| 486 | p->expr_stmts_for_value = expr_stmts_for_value; |
| 487 | p->emit_filename = emit_filename; |
| 488 | p->emit_lineno = emit_lineno; |
| 489 | p->goto_fixup_chain = goto_fixup_chain; |
| 490 | } |
| 491 | |
| 492 | void |
| 493 | restore_stmt_status (p) |
| 494 | struct function *p; |
| 495 | { |
| 496 | block_stack = p->block_stack; |
| 497 | stack_block_stack = p->stack_block_stack; |
| 498 | cond_stack = p->cond_stack; |
| 499 | loop_stack = p->loop_stack; |
| 500 | case_stack = p->case_stack; |
| 501 | nesting_stack = p->nesting_stack; |
| 502 | nesting_depth = p->nesting_depth; |
| 503 | block_start_count = p->block_start_count; |
| 504 | last_expr_type = p->last_expr_type; |
| 505 | last_expr_value = p->last_expr_value; |
| 506 | expr_stmts_for_value = p->expr_stmts_for_value; |
| 507 | emit_filename = p->emit_filename; |
| 508 | emit_lineno = p->emit_lineno; |
| 509 | goto_fixup_chain = p->goto_fixup_chain; |
| 510 | } |
| 511 | \f |
| 512 | /* Emit a no-op instruction. */ |
| 513 | |
| 514 | void |
| 515 | emit_nop () |
| 516 | { |
| 517 | rtx last_insn = get_last_insn (); |
| 518 | if (!optimize |
| 519 | && (GET_CODE (last_insn) == CODE_LABEL |
| 520 | || prev_real_insn (last_insn) == 0)) |
| 521 | emit_insn (gen_nop ()); |
| 522 | } |
| 523 | \f |
| 524 | /* Return the rtx-label that corresponds to a LABEL_DECL, |
| 525 | creating it if necessary. */ |
| 526 | |
| 527 | rtx |
| 528 | label_rtx (label) |
| 529 | tree label; |
| 530 | { |
| 531 | if (TREE_CODE (label) != LABEL_DECL) |
| 532 | abort (); |
| 533 | |
| 534 | if (DECL_RTL (label)) |
| 535 | return DECL_RTL (label); |
| 536 | |
| 537 | return DECL_RTL (label) = gen_label_rtx (); |
| 538 | } |
| 539 | |
| 540 | /* Add an unconditional jump to LABEL as the next sequential instruction. */ |
| 541 | |
| 542 | void |
| 543 | emit_jump (label) |
| 544 | rtx label; |
| 545 | { |
| 546 | do_pending_stack_adjust (); |
| 547 | emit_jump_insn (gen_jump (label)); |
| 548 | emit_barrier (); |
| 549 | } |
| 550 | |
| 551 | /* Emit code to jump to the address |
| 552 | specified by the pointer expression EXP. */ |
| 553 | |
| 554 | void |
| 555 | expand_computed_goto (exp) |
| 556 | tree exp; |
| 557 | { |
| 558 | rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0); |
| 559 | emit_queue (); |
| 560 | emit_indirect_jump (x); |
| 561 | } |
| 562 | \f |
| 563 | /* Handle goto statements and the labels that they can go to. */ |
| 564 | |
| 565 | /* Specify the location in the RTL code of a label LABEL, |
| 566 | which is a LABEL_DECL tree node. |
| 567 | |
| 568 | This is used for the kind of label that the user can jump to with a |
| 569 | goto statement, and for alternatives of a switch or case statement. |
| 570 | RTL labels generated for loops and conditionals don't go through here; |
| 571 | they are generated directly at the RTL level, by other functions below. |
| 572 | |
| 573 | Note that this has nothing to do with defining label *names*. |
| 574 | Languages vary in how they do that and what that even means. */ |
| 575 | |
| 576 | void |
| 577 | expand_label (label) |
| 578 | tree label; |
| 579 | { |
| 580 | struct label_chain *p; |
| 581 | |
| 582 | do_pending_stack_adjust (); |
| 583 | emit_label (label_rtx (label)); |
| 584 | if (DECL_NAME (label)) |
| 585 | LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label)); |
| 586 | |
| 587 | if (stack_block_stack != 0) |
| 588 | { |
| 589 | p = (struct label_chain *) oballoc (sizeof (struct label_chain)); |
| 590 | p->next = stack_block_stack->data.block.label_chain; |
| 591 | stack_block_stack->data.block.label_chain = p; |
| 592 | p->label = label; |
| 593 | } |
| 594 | } |
| 595 | |
| 596 | /* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos |
| 597 | from nested functions. */ |
| 598 | |
| 599 | void |
| 600 | declare_nonlocal_label (label) |
| 601 | tree label; |
| 602 | { |
| 603 | nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels); |
| 604 | LABEL_PRESERVE_P (label_rtx (label)) = 1; |
| 605 | if (nonlocal_goto_handler_slot == 0) |
| 606 | { |
| 607 | nonlocal_goto_handler_slot |
| 608 | = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0); |
| 609 | emit_stack_save (SAVE_NONLOCAL, |
| 610 | &nonlocal_goto_stack_level, |
| 611 | PREV_INSN (tail_recursion_reentry)); |
| 612 | } |
| 613 | } |
| 614 | |
| 615 | /* Generate RTL code for a `goto' statement with target label LABEL. |
| 616 | LABEL should be a LABEL_DECL tree node that was or will later be |
| 617 | defined with `expand_label'. */ |
| 618 | |
| 619 | void |
| 620 | expand_goto (label) |
| 621 | tree label; |
| 622 | { |
| 623 | /* Check for a nonlocal goto to a containing function. */ |
| 624 | tree context = decl_function_context (label); |
| 625 | if (context != 0 && context != current_function_decl) |
| 626 | { |
| 627 | struct function *p = find_function_data (context); |
| 628 | rtx label_ref = gen_rtx (LABEL_REF, Pmode, label_rtx (label)); |
| 629 | rtx temp; |
| 630 | |
| 631 | p->has_nonlocal_label = 1; |
| 632 | LABEL_REF_NONLOCAL_P (label_ref) = 1; |
| 633 | |
| 634 | /* Copy the rtl for the slots so that they won't be shared in |
| 635 | case the virtual stack vars register gets instantiated differently |
| 636 | in the parent than in the child. */ |
| 637 | |
| 638 | #if HAVE_nonlocal_goto |
| 639 | if (HAVE_nonlocal_goto) |
| 640 | emit_insn (gen_nonlocal_goto (lookup_static_chain (label), |
| 641 | copy_rtx (p->nonlocal_goto_handler_slot), |
| 642 | copy_rtx (p->nonlocal_goto_stack_level), |
| 643 | label_ref)); |
| 644 | else |
| 645 | #endif |
| 646 | { |
| 647 | rtx addr; |
| 648 | |
| 649 | /* Restore frame pointer for containing function. |
| 650 | This sets the actual hard register used for the frame pointer |
| 651 | to the location of the function's incoming static chain info. |
| 652 | The non-local goto handler will then adjust it to contain the |
| 653 | proper value and reload the argument pointer, if needed. */ |
| 654 | emit_move_insn (frame_pointer_rtx, lookup_static_chain (label)); |
| 655 | |
| 656 | /* We have now loaded the frame pointer hardware register with |
| 657 | the address of that corresponds to the start of the virtual |
| 658 | stack vars. So replace virtual_stack_vars_rtx in all |
| 659 | addresses we use with stack_pointer_rtx. */ |
| 660 | |
| 661 | /* Get addr of containing function's current nonlocal goto handler, |
| 662 | which will do any cleanups and then jump to the label. */ |
| 663 | addr = copy_rtx (p->nonlocal_goto_handler_slot); |
| 664 | temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx, |
| 665 | frame_pointer_rtx)); |
| 666 | |
| 667 | /* Restore the stack pointer. Note this uses fp just restored. */ |
| 668 | addr = p->nonlocal_goto_stack_level; |
| 669 | if (addr) |
| 670 | addr = replace_rtx (copy_rtx (addr), |
| 671 | virtual_stack_vars_rtx, frame_pointer_rtx); |
| 672 | |
| 673 | emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX); |
| 674 | |
| 675 | /* Put in the static chain register the nonlocal label address. */ |
| 676 | emit_move_insn (static_chain_rtx, label_ref); |
| 677 | /* USE of frame_pointer_rtx added for consistency; not clear if |
| 678 | really needed. */ |
| 679 | emit_insn (gen_rtx (USE, VOIDmode, frame_pointer_rtx)); |
| 680 | emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx)); |
| 681 | emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx)); |
| 682 | emit_indirect_jump (temp); |
| 683 | } |
| 684 | } |
| 685 | else |
| 686 | expand_goto_internal (label, label_rtx (label), NULL_RTX); |
| 687 | } |
| 688 | |
| 689 | /* Generate RTL code for a `goto' statement with target label BODY. |
| 690 | LABEL should be a LABEL_REF. |
| 691 | LAST_INSN, if non-0, is the rtx we should consider as the last |
| 692 | insn emitted (for the purposes of cleaning up a return). */ |
| 693 | |
| 694 | static void |
| 695 | expand_goto_internal (body, label, last_insn) |
| 696 | tree body; |
| 697 | rtx label; |
| 698 | rtx last_insn; |
| 699 | { |
| 700 | struct nesting *block; |
| 701 | rtx stack_level = 0; |
| 702 | |
| 703 | if (GET_CODE (label) != CODE_LABEL) |
| 704 | abort (); |
| 705 | |
| 706 | /* If label has already been defined, we can tell now |
| 707 | whether and how we must alter the stack level. */ |
| 708 | |
| 709 | if (PREV_INSN (label) != 0) |
| 710 | { |
| 711 | /* Find the innermost pending block that contains the label. |
| 712 | (Check containment by comparing insn-uids.) |
| 713 | Then restore the outermost stack level within that block, |
| 714 | and do cleanups of all blocks contained in it. */ |
| 715 | for (block = block_stack; block; block = block->next) |
| 716 | { |
| 717 | if (INSN_UID (block->data.block.first_insn) < INSN_UID (label)) |
| 718 | break; |
| 719 | if (block->data.block.stack_level != 0) |
| 720 | stack_level = block->data.block.stack_level; |
| 721 | /* Execute the cleanups for blocks we are exiting. */ |
| 722 | if (block->data.block.cleanups != 0) |
| 723 | { |
| 724 | expand_cleanups (block->data.block.cleanups, NULL_TREE); |
| 725 | do_pending_stack_adjust (); |
| 726 | } |
| 727 | } |
| 728 | |
| 729 | if (stack_level) |
| 730 | { |
| 731 | /* Ensure stack adjust isn't done by emit_jump, as this would clobber |
| 732 | the stack pointer. This one should be deleted as dead by flow. */ |
| 733 | clear_pending_stack_adjust (); |
| 734 | do_pending_stack_adjust (); |
| 735 | emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX); |
| 736 | } |
| 737 | |
| 738 | if (body != 0 && DECL_TOO_LATE (body)) |
| 739 | error ("jump to `%s' invalidly jumps into binding contour", |
| 740 | IDENTIFIER_POINTER (DECL_NAME (body))); |
| 741 | } |
| 742 | /* Label not yet defined: may need to put this goto |
| 743 | on the fixup list. */ |
| 744 | else if (! expand_fixup (body, label, last_insn)) |
| 745 | { |
| 746 | /* No fixup needed. Record that the label is the target |
| 747 | of at least one goto that has no fixup. */ |
| 748 | if (body != 0) |
| 749 | TREE_ADDRESSABLE (body) = 1; |
| 750 | } |
| 751 | |
| 752 | emit_jump (label); |
| 753 | } |
| 754 | \f |
| 755 | /* Generate if necessary a fixup for a goto |
| 756 | whose target label in tree structure (if any) is TREE_LABEL |
| 757 | and whose target in rtl is RTL_LABEL. |
| 758 | |
| 759 | If LAST_INSN is nonzero, we pretend that the jump appears |
| 760 | after insn LAST_INSN instead of at the current point in the insn stream. |
| 761 | |
| 762 | The fixup will be used later to insert insns just before the goto. |
| 763 | Those insns will restore the stack level as appropriate for the |
| 764 | target label, and will (in the case of C++) also invoke any object |
| 765 | destructors which have to be invoked when we exit the scopes which |
| 766 | are exited by the goto. |
| 767 | |
| 768 | Value is nonzero if a fixup is made. */ |
| 769 | |
| 770 | static int |
| 771 | expand_fixup (tree_label, rtl_label, last_insn) |
| 772 | tree tree_label; |
| 773 | rtx rtl_label; |
| 774 | rtx last_insn; |
| 775 | { |
| 776 | struct nesting *block, *end_block; |
| 777 | |
| 778 | /* See if we can recognize which block the label will be output in. |
| 779 | This is possible in some very common cases. |
| 780 | If we succeed, set END_BLOCK to that block. |
| 781 | Otherwise, set it to 0. */ |
| 782 | |
| 783 | if (cond_stack |
| 784 | && (rtl_label == cond_stack->data.cond.endif_label |
| 785 | || rtl_label == cond_stack->data.cond.next_label)) |
| 786 | end_block = cond_stack; |
| 787 | /* If we are in a loop, recognize certain labels which |
| 788 | are likely targets. This reduces the number of fixups |
| 789 | we need to create. */ |
| 790 | else if (loop_stack |
| 791 | && (rtl_label == loop_stack->data.loop.start_label |
| 792 | || rtl_label == loop_stack->data.loop.end_label |
| 793 | || rtl_label == loop_stack->data.loop.continue_label)) |
| 794 | end_block = loop_stack; |
| 795 | else |
| 796 | end_block = 0; |
| 797 | |
| 798 | /* Now set END_BLOCK to the binding level to which we will return. */ |
| 799 | |
| 800 | if (end_block) |
| 801 | { |
| 802 | struct nesting *next_block = end_block->all; |
| 803 | block = block_stack; |
| 804 | |
| 805 | /* First see if the END_BLOCK is inside the innermost binding level. |
| 806 | If so, then no cleanups or stack levels are relevant. */ |
| 807 | while (next_block && next_block != block) |
| 808 | next_block = next_block->all; |
| 809 | |
| 810 | if (next_block) |
| 811 | return 0; |
| 812 | |
| 813 | /* Otherwise, set END_BLOCK to the innermost binding level |
| 814 | which is outside the relevant control-structure nesting. */ |
| 815 | next_block = block_stack->next; |
| 816 | for (block = block_stack; block != end_block; block = block->all) |
| 817 | if (block == next_block) |
| 818 | next_block = next_block->next; |
| 819 | end_block = next_block; |
| 820 | } |
| 821 | |
| 822 | /* Does any containing block have a stack level or cleanups? |
| 823 | If not, no fixup is needed, and that is the normal case |
| 824 | (the only case, for standard C). */ |
| 825 | for (block = block_stack; block != end_block; block = block->next) |
| 826 | if (block->data.block.stack_level != 0 |
| 827 | || block->data.block.cleanups != 0) |
| 828 | break; |
| 829 | |
| 830 | if (block != end_block) |
| 831 | { |
| 832 | /* Ok, a fixup is needed. Add a fixup to the list of such. */ |
| 833 | struct goto_fixup *fixup |
| 834 | = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup)); |
| 835 | /* In case an old stack level is restored, make sure that comes |
| 836 | after any pending stack adjust. */ |
| 837 | /* ?? If the fixup isn't to come at the present position, |
| 838 | doing the stack adjust here isn't useful. Doing it with our |
| 839 | settings at that location isn't useful either. Let's hope |
| 840 | someone does it! */ |
| 841 | if (last_insn == 0) |
| 842 | do_pending_stack_adjust (); |
| 843 | fixup->target = tree_label; |
| 844 | fixup->target_rtl = rtl_label; |
| 845 | |
| 846 | /* Create a BLOCK node and a corresponding matched set of |
| 847 | NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at |
| 848 | this point. The notes will encapsulate any and all fixup |
| 849 | code which we might later insert at this point in the insn |
| 850 | stream. Also, the BLOCK node will be the parent (i.e. the |
| 851 | `SUPERBLOCK') of any other BLOCK nodes which we might create |
| 852 | later on when we are expanding the fixup code. */ |
| 853 | |
| 854 | { |
| 855 | register rtx original_before_jump |
| 856 | = last_insn ? last_insn : get_last_insn (); |
| 857 | |
| 858 | start_sequence (); |
| 859 | pushlevel (0); |
| 860 | fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG); |
| 861 | last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END); |
| 862 | fixup->context = poplevel (1, 0, 0); /* Create the BLOCK node now! */ |
| 863 | end_sequence (); |
| 864 | emit_insns_after (fixup->before_jump, original_before_jump); |
| 865 | } |
| 866 | |
| 867 | fixup->block_start_count = block_start_count; |
| 868 | fixup->stack_level = 0; |
| 869 | fixup->cleanup_list_list |
| 870 | = (((block->data.block.outer_cleanups |
| 871 | #if 0 |
| 872 | && block->data.block.outer_cleanups != empty_cleanup_list |
| 873 | #endif |
| 874 | ) |
| 875 | || block->data.block.cleanups) |
| 876 | ? tree_cons (NULL_TREE, block->data.block.cleanups, |
| 877 | block->data.block.outer_cleanups) |
| 878 | : 0); |
| 879 | fixup->next = goto_fixup_chain; |
| 880 | goto_fixup_chain = fixup; |
| 881 | } |
| 882 | |
| 883 | return block != 0; |
| 884 | } |
| 885 | |
| 886 | /* When exiting a binding contour, process all pending gotos requiring fixups. |
| 887 | THISBLOCK is the structure that describes the block being exited. |
| 888 | STACK_LEVEL is the rtx for the stack level to restore exiting this contour. |
| 889 | CLEANUP_LIST is a list of expressions to evaluate on exiting this contour. |
| 890 | FIRST_INSN is the insn that began this contour. |
| 891 | |
| 892 | Gotos that jump out of this contour must restore the |
| 893 | stack level and do the cleanups before actually jumping. |
| 894 | |
| 895 | DONT_JUMP_IN nonzero means report error there is a jump into this |
| 896 | contour from before the beginning of the contour. |
| 897 | This is also done if STACK_LEVEL is nonzero. */ |
| 898 | |
| 899 | void |
| 900 | fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in) |
| 901 | struct nesting *thisblock; |
| 902 | rtx stack_level; |
| 903 | tree cleanup_list; |
| 904 | rtx first_insn; |
| 905 | int dont_jump_in; |
| 906 | { |
| 907 | register struct goto_fixup *f, *prev; |
| 908 | |
| 909 | /* F is the fixup we are considering; PREV is the previous one. */ |
| 910 | /* We run this loop in two passes so that cleanups of exited blocks |
| 911 | are run first, and blocks that are exited are marked so |
| 912 | afterwards. */ |
| 913 | |
| 914 | for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next) |
| 915 | { |
| 916 | /* Test for a fixup that is inactive because it is already handled. */ |
| 917 | if (f->before_jump == 0) |
| 918 | { |
| 919 | /* Delete inactive fixup from the chain, if that is easy to do. */ |
| 920 | if (prev != 0) |
| 921 | prev->next = f->next; |
| 922 | } |
| 923 | /* Has this fixup's target label been defined? |
| 924 | If so, we can finalize it. */ |
| 925 | else if (PREV_INSN (f->target_rtl) != 0) |
| 926 | { |
| 927 | register rtx cleanup_insns; |
| 928 | |
| 929 | /* Get the first non-label after the label |
| 930 | this goto jumps to. If that's before this scope begins, |
| 931 | we don't have a jump into the scope. */ |
| 932 | rtx after_label = f->target_rtl; |
| 933 | while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL) |
| 934 | after_label = NEXT_INSN (after_label); |
| 935 | |
| 936 | /* If this fixup jumped into this contour from before the beginning |
| 937 | of this contour, report an error. */ |
| 938 | /* ??? Bug: this does not detect jumping in through intermediate |
| 939 | blocks that have stack levels or cleanups. |
| 940 | It detects only a problem with the innermost block |
| 941 | around the label. */ |
| 942 | if (f->target != 0 |
| 943 | && (dont_jump_in || stack_level || cleanup_list) |
| 944 | /* If AFTER_LABEL is 0, it means the jump goes to the end |
| 945 | of the rtl, which means it jumps into this scope. */ |
| 946 | && (after_label == 0 |
| 947 | || INSN_UID (first_insn) < INSN_UID (after_label)) |
| 948 | && INSN_UID (first_insn) > INSN_UID (f->before_jump) |
| 949 | && ! DECL_REGISTER (f->target)) |
| 950 | { |
| 951 | error_with_decl (f->target, |
| 952 | "label `%s' used before containing binding contour"); |
| 953 | /* Prevent multiple errors for one label. */ |
| 954 | DECL_REGISTER (f->target) = 1; |
| 955 | } |
| 956 | |
| 957 | /* We will expand the cleanups into a sequence of their own and |
| 958 | then later on we will attach this new sequence to the insn |
| 959 | stream just ahead of the actual jump insn. */ |
| 960 | |
| 961 | start_sequence (); |
| 962 | |
| 963 | /* Temporarily restore the lexical context where we will |
| 964 | logically be inserting the fixup code. We do this for the |
| 965 | sake of getting the debugging information right. */ |
| 966 | |
| 967 | pushlevel (0); |
| 968 | set_block (f->context); |
| 969 | |
| 970 | /* Expand the cleanups for blocks this jump exits. */ |
| 971 | if (f->cleanup_list_list) |
| 972 | { |
| 973 | tree lists; |
| 974 | for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists)) |
| 975 | /* Marked elements correspond to blocks that have been closed. |
| 976 | Do their cleanups. */ |
| 977 | if (TREE_ADDRESSABLE (lists) |
| 978 | && TREE_VALUE (lists) != 0) |
| 979 | { |
| 980 | expand_cleanups (TREE_VALUE (lists), 0); |
| 981 | /* Pop any pushes done in the cleanups, |
| 982 | in case function is about to return. */ |
| 983 | do_pending_stack_adjust (); |
| 984 | } |
| 985 | } |
| 986 | |
| 987 | /* Restore stack level for the biggest contour that this |
| 988 | jump jumps out of. */ |
| 989 | if (f->stack_level) |
| 990 | emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump); |
| 991 | |
| 992 | /* Finish up the sequence containing the insns which implement the |
| 993 | necessary cleanups, and then attach that whole sequence to the |
| 994 | insn stream just ahead of the actual jump insn. Attaching it |
| 995 | at that point insures that any cleanups which are in fact |
| 996 | implicit C++ object destructions (which must be executed upon |
| 997 | leaving the block) appear (to the debugger) to be taking place |
| 998 | in an area of the generated code where the object(s) being |
| 999 | destructed are still "in scope". */ |
| 1000 | |
| 1001 | cleanup_insns = get_insns (); |
| 1002 | poplevel (1, 0, 0); |
| 1003 | |
| 1004 | end_sequence (); |
| 1005 | emit_insns_after (cleanup_insns, f->before_jump); |
| 1006 | |
| 1007 | |
| 1008 | f->before_jump = 0; |
| 1009 | } |
| 1010 | } |
| 1011 | |
| 1012 | /* Mark the cleanups of exited blocks so that they are executed |
| 1013 | by the code above. */ |
| 1014 | for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next) |
| 1015 | if (f->before_jump != 0 |
| 1016 | && PREV_INSN (f->target_rtl) == 0 |
| 1017 | /* Label has still not appeared. If we are exiting a block with |
| 1018 | a stack level to restore, that started before the fixup, |
| 1019 | mark this stack level as needing restoration |
| 1020 | when the fixup is later finalized. |
| 1021 | Also mark the cleanup_list_list element for F |
| 1022 | that corresponds to this block, so that ultimately |
| 1023 | this block's cleanups will be executed by the code above. */ |
| 1024 | && thisblock != 0 |
| 1025 | /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, |
| 1026 | it means the label is undefined. That's erroneous, but possible. */ |
| 1027 | && (thisblock->data.block.block_start_count |
| 1028 | <= f->block_start_count)) |
| 1029 | { |
| 1030 | tree lists = f->cleanup_list_list; |
| 1031 | for (; lists; lists = TREE_CHAIN (lists)) |
| 1032 | /* If the following elt. corresponds to our containing block |
| 1033 | then the elt. must be for this block. */ |
| 1034 | if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups) |
| 1035 | TREE_ADDRESSABLE (lists) = 1; |
| 1036 | |
| 1037 | if (stack_level) |
| 1038 | f->stack_level = stack_level; |
| 1039 | } |
| 1040 | } |
| 1041 | \f |
| 1042 | /* Generate RTL for an asm statement (explicit assembler code). |
| 1043 | BODY is a STRING_CST node containing the assembler code text, |
| 1044 | or an ADDR_EXPR containing a STRING_CST. */ |
| 1045 | |
| 1046 | void |
| 1047 | expand_asm (body) |
| 1048 | tree body; |
| 1049 | { |
| 1050 | if (TREE_CODE (body) == ADDR_EXPR) |
| 1051 | body = TREE_OPERAND (body, 0); |
| 1052 | |
| 1053 | emit_insn (gen_rtx (ASM_INPUT, VOIDmode, |
| 1054 | TREE_STRING_POINTER (body))); |
| 1055 | last_expr_type = 0; |
| 1056 | } |
| 1057 | |
| 1058 | /* Generate RTL for an asm statement with arguments. |
| 1059 | STRING is the instruction template. |
| 1060 | OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs. |
| 1061 | Each output or input has an expression in the TREE_VALUE and |
| 1062 | a constraint-string in the TREE_PURPOSE. |
| 1063 | CLOBBERS is a list of STRING_CST nodes each naming a hard register |
| 1064 | that is clobbered by this insn. |
| 1065 | |
| 1066 | Not all kinds of lvalue that may appear in OUTPUTS can be stored directly. |
| 1067 | Some elements of OUTPUTS may be replaced with trees representing temporary |
| 1068 | values. The caller should copy those temporary values to the originally |
| 1069 | specified lvalues. |
| 1070 | |
| 1071 | VOL nonzero means the insn is volatile; don't optimize it. */ |
| 1072 | |
| 1073 | void |
| 1074 | expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line) |
| 1075 | tree string, outputs, inputs, clobbers; |
| 1076 | int vol; |
| 1077 | char *filename; |
| 1078 | int line; |
| 1079 | { |
| 1080 | rtvec argvec, constraints; |
| 1081 | rtx body; |
| 1082 | int ninputs = list_length (inputs); |
| 1083 | int noutputs = list_length (outputs); |
| 1084 | int nclobbers; |
| 1085 | tree tail; |
| 1086 | register int i; |
| 1087 | /* Vector of RTX's of evaluated output operands. */ |
| 1088 | rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx)); |
| 1089 | /* The insn we have emitted. */ |
| 1090 | rtx insn; |
| 1091 | |
| 1092 | /* Count the number of meaningful clobbered registers, ignoring what |
| 1093 | we would ignore later. */ |
| 1094 | nclobbers = 0; |
| 1095 | for (tail = clobbers; tail; tail = TREE_CHAIN (tail)) |
| 1096 | { |
| 1097 | char *regname = TREE_STRING_POINTER (TREE_VALUE (tail)); |
| 1098 | i = decode_reg_name (regname); |
| 1099 | if (i >= 0 || i == -4) |
| 1100 | ++nclobbers; |
| 1101 | } |
| 1102 | |
| 1103 | last_expr_type = 0; |
| 1104 | |
| 1105 | for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++) |
| 1106 | { |
| 1107 | tree val = TREE_VALUE (tail); |
| 1108 | tree val1; |
| 1109 | int j; |
| 1110 | int found_equal; |
| 1111 | |
| 1112 | /* If there's an erroneous arg, emit no insn. */ |
| 1113 | if (TREE_TYPE (val) == error_mark_node) |
| 1114 | return; |
| 1115 | |
| 1116 | /* Make sure constraint has `=' and does not have `+'. */ |
| 1117 | |
| 1118 | found_equal = 0; |
| 1119 | for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++) |
| 1120 | { |
| 1121 | if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+') |
| 1122 | { |
| 1123 | error ("output operand constraint contains `+'"); |
| 1124 | return; |
| 1125 | } |
| 1126 | if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '=') |
| 1127 | found_equal = 1; |
| 1128 | } |
| 1129 | if (! found_equal) |
| 1130 | { |
| 1131 | error ("output operand constraint lacks `='"); |
| 1132 | return; |
| 1133 | } |
| 1134 | |
| 1135 | /* If an output operand is not a variable or indirect ref, |
| 1136 | or a part of one, |
| 1137 | create a SAVE_EXPR which is a pseudo-reg |
| 1138 | to act as an intermediate temporary. |
| 1139 | Make the asm insn write into that, then copy it to |
| 1140 | the real output operand. */ |
| 1141 | |
| 1142 | while (TREE_CODE (val) == COMPONENT_REF |
| 1143 | || TREE_CODE (val) == ARRAY_REF) |
| 1144 | val = TREE_OPERAND (val, 0); |
| 1145 | |
| 1146 | if (TREE_CODE (val) != VAR_DECL |
| 1147 | && TREE_CODE (val) != PARM_DECL |
| 1148 | && TREE_CODE (val) != INDIRECT_REF) |
| 1149 | { |
| 1150 | TREE_VALUE (tail) = save_expr (TREE_VALUE (tail)); |
| 1151 | /* If it's a constant, print error now so don't crash later. */ |
| 1152 | if (TREE_CODE (TREE_VALUE (tail)) != SAVE_EXPR) |
| 1153 | { |
| 1154 | error ("invalid output in `asm'"); |
| 1155 | return; |
| 1156 | } |
| 1157 | } |
| 1158 | |
| 1159 | output_rtx[i] = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0); |
| 1160 | } |
| 1161 | |
| 1162 | if (ninputs + noutputs > MAX_RECOG_OPERANDS) |
| 1163 | { |
| 1164 | error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS); |
| 1165 | return; |
| 1166 | } |
| 1167 | |
| 1168 | /* Make vectors for the expression-rtx and constraint strings. */ |
| 1169 | |
| 1170 | argvec = rtvec_alloc (ninputs); |
| 1171 | constraints = rtvec_alloc (ninputs); |
| 1172 | |
| 1173 | body = gen_rtx (ASM_OPERANDS, VOIDmode, |
| 1174 | TREE_STRING_POINTER (string), "", 0, argvec, constraints, |
| 1175 | filename, line); |
| 1176 | MEM_VOLATILE_P (body) = vol; |
| 1177 | |
| 1178 | /* Eval the inputs and put them into ARGVEC. |
| 1179 | Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */ |
| 1180 | |
| 1181 | i = 0; |
| 1182 | for (tail = inputs; tail; tail = TREE_CHAIN (tail)) |
| 1183 | { |
| 1184 | int j; |
| 1185 | |
| 1186 | /* If there's an erroneous arg, emit no insn, |
| 1187 | because the ASM_INPUT would get VOIDmode |
| 1188 | and that could cause a crash in reload. */ |
| 1189 | if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node) |
| 1190 | return; |
| 1191 | if (TREE_PURPOSE (tail) == NULL_TREE) |
| 1192 | { |
| 1193 | error ("hard register `%s' listed as input operand to `asm'", |
| 1194 | TREE_STRING_POINTER (TREE_VALUE (tail)) ); |
| 1195 | return; |
| 1196 | } |
| 1197 | |
| 1198 | /* Make sure constraint has neither `=' nor `+'. */ |
| 1199 | |
| 1200 | for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++) |
| 1201 | if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '=' |
| 1202 | || TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+') |
| 1203 | { |
| 1204 | error ("input operand constraint contains `%c'", |
| 1205 | TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]); |
| 1206 | return; |
| 1207 | } |
| 1208 | |
| 1209 | XVECEXP (body, 3, i) /* argvec */ |
| 1210 | = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0); |
| 1211 | XVECEXP (body, 4, i) /* constraints */ |
| 1212 | = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), |
| 1213 | TREE_STRING_POINTER (TREE_PURPOSE (tail))); |
| 1214 | i++; |
| 1215 | } |
| 1216 | |
| 1217 | /* Protect all the operands from the queue, |
| 1218 | now that they have all been evaluated. */ |
| 1219 | |
| 1220 | for (i = 0; i < ninputs; i++) |
| 1221 | XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0); |
| 1222 | |
| 1223 | for (i = 0; i < noutputs; i++) |
| 1224 | output_rtx[i] = protect_from_queue (output_rtx[i], 1); |
| 1225 | |
| 1226 | /* Now, for each output, construct an rtx |
| 1227 | (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT |
| 1228 | ARGVEC CONSTRAINTS)) |
| 1229 | If there is more than one, put them inside a PARALLEL. */ |
| 1230 | |
| 1231 | if (noutputs == 1 && nclobbers == 0) |
| 1232 | { |
| 1233 | XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs)); |
| 1234 | insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body)); |
| 1235 | } |
| 1236 | else if (noutputs == 0 && nclobbers == 0) |
| 1237 | { |
| 1238 | /* No output operands: put in a raw ASM_OPERANDS rtx. */ |
| 1239 | insn = emit_insn (body); |
| 1240 | } |
| 1241 | else |
| 1242 | { |
| 1243 | rtx obody = body; |
| 1244 | int num = noutputs; |
| 1245 | if (num == 0) num = 1; |
| 1246 | body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers)); |
| 1247 | |
| 1248 | /* For each output operand, store a SET. */ |
| 1249 | |
| 1250 | for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++) |
| 1251 | { |
| 1252 | XVECEXP (body, 0, i) |
| 1253 | = gen_rtx (SET, VOIDmode, |
| 1254 | output_rtx[i], |
| 1255 | gen_rtx (ASM_OPERANDS, VOIDmode, |
| 1256 | TREE_STRING_POINTER (string), |
| 1257 | TREE_STRING_POINTER (TREE_PURPOSE (tail)), |
| 1258 | i, argvec, constraints, |
| 1259 | filename, line)); |
| 1260 | MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol; |
| 1261 | } |
| 1262 | |
| 1263 | /* If there are no outputs (but there are some clobbers) |
| 1264 | store the bare ASM_OPERANDS into the PARALLEL. */ |
| 1265 | |
| 1266 | if (i == 0) |
| 1267 | XVECEXP (body, 0, i++) = obody; |
| 1268 | |
| 1269 | /* Store (clobber REG) for each clobbered register specified. */ |
| 1270 | |
| 1271 | for (tail = clobbers; tail; tail = TREE_CHAIN (tail)) |
| 1272 | { |
| 1273 | char *regname = TREE_STRING_POINTER (TREE_VALUE (tail)); |
| 1274 | int j = decode_reg_name (regname); |
| 1275 | |
| 1276 | if (j < 0) |
| 1277 | { |
| 1278 | if (j == -3) /* `cc', which is not a register */ |
| 1279 | continue; |
| 1280 | |
| 1281 | if (j == -4) /* `memory', don't cache memory across asm */ |
| 1282 | { |
| 1283 | XVECEXP (body, 0, i++) |
| 1284 | = gen_rtx (CLOBBER, VOIDmode, |
| 1285 | gen_rtx (MEM, QImode, |
| 1286 | gen_rtx (SCRATCH, VOIDmode, 0))); |
| 1287 | continue; |
| 1288 | } |
| 1289 | |
| 1290 | error ("unknown register name `%s' in `asm'", regname); |
| 1291 | return; |
| 1292 | } |
| 1293 | |
| 1294 | /* Use QImode since that's guaranteed to clobber just one reg. */ |
| 1295 | XVECEXP (body, 0, i++) |
| 1296 | = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j)); |
| 1297 | } |
| 1298 | |
| 1299 | insn = emit_insn (body); |
| 1300 | } |
| 1301 | |
| 1302 | free_temp_slots (); |
| 1303 | } |
| 1304 | \f |
| 1305 | /* Generate RTL to evaluate the expression EXP |
| 1306 | and remember it in case this is the VALUE in a ({... VALUE; }) constr. */ |
| 1307 | |
| 1308 | void |
| 1309 | expand_expr_stmt (exp) |
| 1310 | tree exp; |
| 1311 | { |
| 1312 | /* If -W, warn about statements with no side effects, |
| 1313 | except for an explicit cast to void (e.g. for assert()), and |
| 1314 | except inside a ({...}) where they may be useful. */ |
| 1315 | if (expr_stmts_for_value == 0 && exp != error_mark_node) |
| 1316 | { |
| 1317 | if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused) |
| 1318 | && !(TREE_CODE (exp) == CONVERT_EXPR |
| 1319 | && TREE_TYPE (exp) == void_type_node)) |
| 1320 | warning_with_file_and_line (emit_filename, emit_lineno, |
| 1321 | "statement with no effect"); |
| 1322 | else if (warn_unused) |
| 1323 | warn_if_unused_value (exp); |
| 1324 | } |
| 1325 | last_expr_type = TREE_TYPE (exp); |
| 1326 | if (! flag_syntax_only) |
| 1327 | last_expr_value = expand_expr (exp, |
| 1328 | (expr_stmts_for_value |
| 1329 | ? NULL_RTX : const0_rtx), |
| 1330 | VOIDmode, 0); |
| 1331 | |
| 1332 | /* If all we do is reference a volatile value in memory, |
| 1333 | copy it to a register to be sure it is actually touched. */ |
| 1334 | if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM |
| 1335 | && TREE_THIS_VOLATILE (exp)) |
| 1336 | { |
| 1337 | if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode) |
| 1338 | ; |
| 1339 | else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode) |
| 1340 | copy_to_reg (last_expr_value); |
| 1341 | else |
| 1342 | { |
| 1343 | rtx lab = gen_label_rtx (); |
| 1344 | |
| 1345 | /* Compare the value with itself to reference it. */ |
| 1346 | emit_cmp_insn (last_expr_value, last_expr_value, EQ, |
| 1347 | expand_expr (TYPE_SIZE (last_expr_type), |
| 1348 | NULL_RTX, VOIDmode, 0), |
| 1349 | BLKmode, 0, |
| 1350 | TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT); |
| 1351 | emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab)); |
| 1352 | emit_label (lab); |
| 1353 | } |
| 1354 | } |
| 1355 | |
| 1356 | /* If this expression is part of a ({...}) and is in memory, we may have |
| 1357 | to preserve temporaries. */ |
| 1358 | preserve_temp_slots (last_expr_value); |
| 1359 | |
| 1360 | /* Free any temporaries used to evaluate this expression. Any temporary |
| 1361 | used as a result of this expression will already have been preserved |
| 1362 | above. */ |
| 1363 | free_temp_slots (); |
| 1364 | |
| 1365 | emit_queue (); |
| 1366 | } |
| 1367 | |
| 1368 | /* Warn if EXP contains any computations whose results are not used. |
| 1369 | Return 1 if a warning is printed; 0 otherwise. */ |
| 1370 | |
| 1371 | static int |
| 1372 | warn_if_unused_value (exp) |
| 1373 | tree exp; |
| 1374 | { |
| 1375 | if (TREE_USED (exp)) |
| 1376 | return 0; |
| 1377 | |
| 1378 | switch (TREE_CODE (exp)) |
| 1379 | { |
| 1380 | case PREINCREMENT_EXPR: |
| 1381 | case POSTINCREMENT_EXPR: |
| 1382 | case PREDECREMENT_EXPR: |
| 1383 | case POSTDECREMENT_EXPR: |
| 1384 | case MODIFY_EXPR: |
| 1385 | case INIT_EXPR: |
| 1386 | case TARGET_EXPR: |
| 1387 | case CALL_EXPR: |
| 1388 | case METHOD_CALL_EXPR: |
| 1389 | case RTL_EXPR: |
| 1390 | case WITH_CLEANUP_EXPR: |
| 1391 | case EXIT_EXPR: |
| 1392 | /* We don't warn about COND_EXPR because it may be a useful |
| 1393 | construct if either arm contains a side effect. */ |
| 1394 | case COND_EXPR: |
| 1395 | return 0; |
| 1396 | |
| 1397 | case BIND_EXPR: |
| 1398 | /* For a binding, warn if no side effect within it. */ |
| 1399 | return warn_if_unused_value (TREE_OPERAND (exp, 1)); |
| 1400 | |
| 1401 | case TRUTH_ORIF_EXPR: |
| 1402 | case TRUTH_ANDIF_EXPR: |
| 1403 | /* In && or ||, warn if 2nd operand has no side effect. */ |
| 1404 | return warn_if_unused_value (TREE_OPERAND (exp, 1)); |
| 1405 | |
| 1406 | case COMPOUND_EXPR: |
| 1407 | if (warn_if_unused_value (TREE_OPERAND (exp, 0))) |
| 1408 | return 1; |
| 1409 | /* Let people do `(foo (), 0)' without a warning. */ |
| 1410 | if (TREE_CONSTANT (TREE_OPERAND (exp, 1))) |
| 1411 | return 0; |
| 1412 | return warn_if_unused_value (TREE_OPERAND (exp, 1)); |
| 1413 | |
| 1414 | case NOP_EXPR: |
| 1415 | case CONVERT_EXPR: |
| 1416 | case NON_LVALUE_EXPR: |
| 1417 | /* Don't warn about values cast to void. */ |
| 1418 | if (TREE_TYPE (exp) == void_type_node) |
| 1419 | return 0; |
| 1420 | /* Don't warn about conversions not explicit in the user's program. */ |
| 1421 | if (TREE_NO_UNUSED_WARNING (exp)) |
| 1422 | return 0; |
| 1423 | /* Assignment to a cast usually results in a cast of a modify. |
| 1424 | Don't complain about that. */ |
| 1425 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == MODIFY_EXPR) |
| 1426 | return 0; |
| 1427 | /* Sometimes it results in a cast of a cast of a modify. |
| 1428 | Don't complain about that. */ |
| 1429 | if ((TREE_CODE (TREE_OPERAND (exp, 0)) == CONVERT_EXPR |
| 1430 | || TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR) |
| 1431 | && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == MODIFY_EXPR) |
| 1432 | return 0; |
| 1433 | |
| 1434 | default: |
| 1435 | /* Referencing a volatile value is a side effect, so don't warn. */ |
| 1436 | if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd' |
| 1437 | || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r') |
| 1438 | && TREE_THIS_VOLATILE (exp)) |
| 1439 | return 0; |
| 1440 | warning_with_file_and_line (emit_filename, emit_lineno, |
| 1441 | "value computed is not used"); |
| 1442 | return 1; |
| 1443 | } |
| 1444 | } |
| 1445 | |
| 1446 | /* Clear out the memory of the last expression evaluated. */ |
| 1447 | |
| 1448 | void |
| 1449 | clear_last_expr () |
| 1450 | { |
| 1451 | last_expr_type = 0; |
| 1452 | } |
| 1453 | |
| 1454 | /* Begin a statement which will return a value. |
| 1455 | Return the RTL_EXPR for this statement expr. |
| 1456 | The caller must save that value and pass it to expand_end_stmt_expr. */ |
| 1457 | |
| 1458 | tree |
| 1459 | expand_start_stmt_expr () |
| 1460 | { |
| 1461 | /* Make the RTL_EXPR node temporary, not momentary, |
| 1462 | so that rtl_expr_chain doesn't become garbage. */ |
| 1463 | int momentary = suspend_momentary (); |
| 1464 | tree t = make_node (RTL_EXPR); |
| 1465 | resume_momentary (momentary); |
| 1466 | start_sequence (); |
| 1467 | NO_DEFER_POP; |
| 1468 | expr_stmts_for_value++; |
| 1469 | return t; |
| 1470 | } |
| 1471 | |
| 1472 | /* Restore the previous state at the end of a statement that returns a value. |
| 1473 | Returns a tree node representing the statement's value and the |
| 1474 | insns to compute the value. |
| 1475 | |
| 1476 | The nodes of that expression have been freed by now, so we cannot use them. |
| 1477 | But we don't want to do that anyway; the expression has already been |
| 1478 | evaluated and now we just want to use the value. So generate a RTL_EXPR |
| 1479 | with the proper type and RTL value. |
| 1480 | |
| 1481 | If the last substatement was not an expression, |
| 1482 | return something with type `void'. */ |
| 1483 | |
| 1484 | tree |
| 1485 | expand_end_stmt_expr (t) |
| 1486 | tree t; |
| 1487 | { |
| 1488 | OK_DEFER_POP; |
| 1489 | |
| 1490 | if (last_expr_type == 0) |
| 1491 | { |
| 1492 | last_expr_type = void_type_node; |
| 1493 | last_expr_value = const0_rtx; |
| 1494 | } |
| 1495 | else if (last_expr_value == 0) |
| 1496 | /* There are some cases where this can happen, such as when the |
| 1497 | statement is void type. */ |
| 1498 | last_expr_value = const0_rtx; |
| 1499 | else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value)) |
| 1500 | /* Remove any possible QUEUED. */ |
| 1501 | last_expr_value = protect_from_queue (last_expr_value, 0); |
| 1502 | |
| 1503 | emit_queue (); |
| 1504 | |
| 1505 | TREE_TYPE (t) = last_expr_type; |
| 1506 | RTL_EXPR_RTL (t) = last_expr_value; |
| 1507 | RTL_EXPR_SEQUENCE (t) = get_insns (); |
| 1508 | |
| 1509 | rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain); |
| 1510 | |
| 1511 | end_sequence (); |
| 1512 | |
| 1513 | /* Don't consider deleting this expr or containing exprs at tree level. */ |
| 1514 | TREE_SIDE_EFFECTS (t) = 1; |
| 1515 | /* Propagate volatility of the actual RTL expr. */ |
| 1516 | TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value); |
| 1517 | |
| 1518 | last_expr_type = 0; |
| 1519 | expr_stmts_for_value--; |
| 1520 | |
| 1521 | return t; |
| 1522 | } |
| 1523 | \f |
| 1524 | /* The exception handling nesting looks like this: |
| 1525 | |
| 1526 | <-- Level N-1 |
| 1527 | { <-- exception handler block |
| 1528 | <-- Level N |
| 1529 | <-- in an exception handler |
| 1530 | { <-- try block |
| 1531 | : <-- in a TRY block |
| 1532 | : <-- in an exception handler |
| 1533 | : |
| 1534 | } |
| 1535 | |
| 1536 | { <-- except block |
| 1537 | : <-- in an except block |
| 1538 | : <-- in an exception handler |
| 1539 | : |
| 1540 | } |
| 1541 | |
| 1542 | } |
| 1543 | */ |
| 1544 | |
| 1545 | /* Return nonzero iff in a try block at level LEVEL. */ |
| 1546 | |
| 1547 | int |
| 1548 | in_try_block (level) |
| 1549 | int level; |
| 1550 | { |
| 1551 | struct nesting *n = except_stack; |
| 1552 | while (1) |
| 1553 | { |
| 1554 | while (n && n->data.except_stmt.after_label != 0) |
| 1555 | n = n->next; |
| 1556 | if (n == 0) |
| 1557 | return 0; |
| 1558 | if (level == 0) |
| 1559 | return n != 0; |
| 1560 | level--; |
| 1561 | n = n->next; |
| 1562 | } |
| 1563 | } |
| 1564 | |
| 1565 | /* Return nonzero iff in an except block at level LEVEL. */ |
| 1566 | |
| 1567 | int |
| 1568 | in_except_block (level) |
| 1569 | int level; |
| 1570 | { |
| 1571 | struct nesting *n = except_stack; |
| 1572 | while (1) |
| 1573 | { |
| 1574 | while (n && n->data.except_stmt.after_label == 0) |
| 1575 | n = n->next; |
| 1576 | if (n == 0) |
| 1577 | return 0; |
| 1578 | if (level == 0) |
| 1579 | return n != 0; |
| 1580 | level--; |
| 1581 | n = n->next; |
| 1582 | } |
| 1583 | } |
| 1584 | |
| 1585 | /* Return nonzero iff in an exception handler at level LEVEL. */ |
| 1586 | |
| 1587 | int |
| 1588 | in_exception_handler (level) |
| 1589 | int level; |
| 1590 | { |
| 1591 | struct nesting *n = except_stack; |
| 1592 | while (n && level--) |
| 1593 | n = n->next; |
| 1594 | return n != 0; |
| 1595 | } |
| 1596 | |
| 1597 | /* Record the fact that the current exception nesting raises |
| 1598 | exception EX. If not in an exception handler, return 0. */ |
| 1599 | int |
| 1600 | expand_raise (ex) |
| 1601 | tree ex; |
| 1602 | { |
| 1603 | tree *raises_ptr; |
| 1604 | |
| 1605 | if (except_stack == 0) |
| 1606 | return 0; |
| 1607 | raises_ptr = &except_stack->data.except_stmt.raised; |
| 1608 | if (! value_member (ex, *raises_ptr)) |
| 1609 | *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr); |
| 1610 | return 1; |
| 1611 | } |
| 1612 | |
| 1613 | /* Generate RTL for the start of a try block. |
| 1614 | |
| 1615 | TRY_CLAUSE is the condition to test to enter the try block. */ |
| 1616 | |
| 1617 | void |
| 1618 | expand_start_try (try_clause, exitflag, escapeflag) |
| 1619 | tree try_clause; |
| 1620 | int exitflag; |
| 1621 | int escapeflag; |
| 1622 | { |
| 1623 | struct nesting *thishandler = ALLOC_NESTING (); |
| 1624 | |
| 1625 | /* Make an entry on cond_stack for the cond we are entering. */ |
| 1626 | |
| 1627 | thishandler->next = except_stack; |
| 1628 | thishandler->all = nesting_stack; |
| 1629 | thishandler->depth = ++nesting_depth; |
| 1630 | thishandler->data.except_stmt.raised = 0; |
| 1631 | thishandler->data.except_stmt.handled = 0; |
| 1632 | thishandler->data.except_stmt.first_insn = get_insns (); |
| 1633 | thishandler->data.except_stmt.except_label = gen_label_rtx (); |
| 1634 | thishandler->data.except_stmt.unhandled_label = 0; |
| 1635 | thishandler->data.except_stmt.after_label = 0; |
| 1636 | thishandler->data.except_stmt.escape_label |
| 1637 | = escapeflag ? thishandler->data.except_stmt.except_label : 0; |
| 1638 | thishandler->exit_label = exitflag ? gen_label_rtx () : 0; |
| 1639 | except_stack = thishandler; |
| 1640 | nesting_stack = thishandler; |
| 1641 | |
| 1642 | do_jump (try_clause, thishandler->data.except_stmt.except_label, NULL_RTX); |
| 1643 | } |
| 1644 | |
| 1645 | /* End of a TRY block. Nothing to do for now. */ |
| 1646 | |
| 1647 | void |
| 1648 | expand_end_try () |
| 1649 | { |
| 1650 | except_stack->data.except_stmt.after_label = gen_label_rtx (); |
| 1651 | expand_goto_internal (NULL_TREE, except_stack->data.except_stmt.after_label, |
| 1652 | NULL_RTX); |
| 1653 | } |
| 1654 | |
| 1655 | /* Start an `except' nesting contour. |
| 1656 | EXITFLAG says whether this contour should be able to `exit' something. |
| 1657 | ESCAPEFLAG says whether this contour should be escapable. */ |
| 1658 | |
| 1659 | void |
| 1660 | expand_start_except (exitflag, escapeflag) |
| 1661 | int exitflag; |
| 1662 | int escapeflag; |
| 1663 | { |
| 1664 | if (exitflag) |
| 1665 | { |
| 1666 | struct nesting *n; |
| 1667 | /* An `exit' from catch clauses goes out to next exit level, |
| 1668 | if there is one. Otherwise, it just goes to the end |
| 1669 | of the construct. */ |
| 1670 | for (n = except_stack->next; n; n = n->next) |
| 1671 | if (n->exit_label != 0) |
| 1672 | { |
| 1673 | except_stack->exit_label = n->exit_label; |
| 1674 | break; |
| 1675 | } |
| 1676 | if (n == 0) |
| 1677 | except_stack->exit_label = except_stack->data.except_stmt.after_label; |
| 1678 | } |
| 1679 | if (escapeflag) |
| 1680 | { |
| 1681 | struct nesting *n; |
| 1682 | /* An `escape' from catch clauses goes out to next escape level, |
| 1683 | if there is one. Otherwise, it just goes to the end |
| 1684 | of the construct. */ |
| 1685 | for (n = except_stack->next; n; n = n->next) |
| 1686 | if (n->data.except_stmt.escape_label != 0) |
| 1687 | { |
| 1688 | except_stack->data.except_stmt.escape_label |
| 1689 | = n->data.except_stmt.escape_label; |
| 1690 | break; |
| 1691 | } |
| 1692 | if (n == 0) |
| 1693 | except_stack->data.except_stmt.escape_label |
| 1694 | = except_stack->data.except_stmt.after_label; |
| 1695 | } |
| 1696 | do_pending_stack_adjust (); |
| 1697 | emit_label (except_stack->data.except_stmt.except_label); |
| 1698 | } |
| 1699 | |
| 1700 | /* Generate code to `escape' from an exception contour. This |
| 1701 | is like `exiting', but does not conflict with constructs which |
| 1702 | use `exit_label'. |
| 1703 | |
| 1704 | Return nonzero if this contour is escapable, otherwise |
| 1705 | return zero, and language-specific code will emit the |
| 1706 | appropriate error message. */ |
| 1707 | int |
| 1708 | expand_escape_except () |
| 1709 | { |
| 1710 | struct nesting *n; |
| 1711 | last_expr_type = 0; |
| 1712 | for (n = except_stack; n; n = n->next) |
| 1713 | if (n->data.except_stmt.escape_label != 0) |
| 1714 | { |
| 1715 | expand_goto_internal (NULL_TREE, |
| 1716 | n->data.except_stmt.escape_label, NULL_RTX); |
| 1717 | return 1; |
| 1718 | } |
| 1719 | |
| 1720 | return 0; |
| 1721 | } |
| 1722 | |
| 1723 | /* Finish processing and `except' contour. |
| 1724 | Culls out all exceptions which might be raise but not |
| 1725 | handled, and returns the list to the caller. |
| 1726 | Language-specific code is responsible for dealing with these |
| 1727 | exceptions. */ |
| 1728 | |
| 1729 | tree |
| 1730 | expand_end_except () |
| 1731 | { |
| 1732 | struct nesting *n; |
| 1733 | tree raised = NULL_TREE; |
| 1734 | |
| 1735 | do_pending_stack_adjust (); |
| 1736 | emit_label (except_stack->data.except_stmt.after_label); |
| 1737 | |
| 1738 | n = except_stack->next; |
| 1739 | if (n) |
| 1740 | { |
| 1741 | /* Propagate exceptions raised but not handled to next |
| 1742 | highest level. */ |
| 1743 | tree handled = except_stack->data.except_stmt.raised; |
| 1744 | if (handled != void_type_node) |
| 1745 | { |
| 1746 | tree prev = NULL_TREE; |
| 1747 | raised = except_stack->data.except_stmt.raised; |
| 1748 | while (handled) |
| 1749 | { |
| 1750 | tree this_raise; |
| 1751 | for (this_raise = raised, prev = 0; this_raise; |
| 1752 | this_raise = TREE_CHAIN (this_raise)) |
| 1753 | { |
| 1754 | if (value_member (TREE_VALUE (this_raise), handled)) |
| 1755 | { |
| 1756 | if (prev) |
| 1757 | TREE_CHAIN (prev) = TREE_CHAIN (this_raise); |
| 1758 | else |
| 1759 | { |
| 1760 | raised = TREE_CHAIN (raised); |
| 1761 | if (raised == NULL_TREE) |
| 1762 | goto nada; |
| 1763 | } |
| 1764 | } |
| 1765 | else |
| 1766 | prev = this_raise; |
| 1767 | } |
| 1768 | handled = TREE_CHAIN (handled); |
| 1769 | } |
| 1770 | if (prev == NULL_TREE) |
| 1771 | prev = raised; |
| 1772 | if (prev) |
| 1773 | TREE_CHAIN (prev) = n->data.except_stmt.raised; |
| 1774 | nada: |
| 1775 | n->data.except_stmt.raised = raised; |
| 1776 | } |
| 1777 | } |
| 1778 | |
| 1779 | POPSTACK (except_stack); |
| 1780 | last_expr_type = 0; |
| 1781 | return raised; |
| 1782 | } |
| 1783 | |
| 1784 | /* Record that exception EX is caught by this exception handler. |
| 1785 | Return nonzero if in exception handling construct, otherwise return 0. */ |
| 1786 | int |
| 1787 | expand_catch (ex) |
| 1788 | tree ex; |
| 1789 | { |
| 1790 | tree *raises_ptr; |
| 1791 | |
| 1792 | if (except_stack == 0) |
| 1793 | return 0; |
| 1794 | raises_ptr = &except_stack->data.except_stmt.handled; |
| 1795 | if (*raises_ptr != void_type_node |
| 1796 | && ex != NULL_TREE |
| 1797 | && ! value_member (ex, *raises_ptr)) |
| 1798 | *raises_ptr = tree_cons (NULL_TREE, ex, *raises_ptr); |
| 1799 | return 1; |
| 1800 | } |
| 1801 | |
| 1802 | /* Record that this exception handler catches all exceptions. |
| 1803 | Return nonzero if in exception handling construct, otherwise return 0. */ |
| 1804 | |
| 1805 | int |
| 1806 | expand_catch_default () |
| 1807 | { |
| 1808 | if (except_stack == 0) |
| 1809 | return 0; |
| 1810 | except_stack->data.except_stmt.handled = void_type_node; |
| 1811 | return 1; |
| 1812 | } |
| 1813 | |
| 1814 | int |
| 1815 | expand_end_catch () |
| 1816 | { |
| 1817 | if (except_stack == 0 || except_stack->data.except_stmt.after_label == 0) |
| 1818 | return 0; |
| 1819 | expand_goto_internal (NULL_TREE, except_stack->data.except_stmt.after_label, |
| 1820 | NULL_RTX); |
| 1821 | return 1; |
| 1822 | } |
| 1823 | \f |
| 1824 | /* Generate RTL for the start of an if-then. COND is the expression |
| 1825 | whose truth should be tested. |
| 1826 | |
| 1827 | If EXITFLAG is nonzero, this conditional is visible to |
| 1828 | `exit_something'. */ |
| 1829 | |
| 1830 | void |
| 1831 | expand_start_cond (cond, exitflag) |
| 1832 | tree cond; |
| 1833 | int exitflag; |
| 1834 | { |
| 1835 | struct nesting *thiscond = ALLOC_NESTING (); |
| 1836 | |
| 1837 | /* Make an entry on cond_stack for the cond we are entering. */ |
| 1838 | |
| 1839 | thiscond->next = cond_stack; |
| 1840 | thiscond->all = nesting_stack; |
| 1841 | thiscond->depth = ++nesting_depth; |
| 1842 | thiscond->data.cond.next_label = gen_label_rtx (); |
| 1843 | /* Before we encounter an `else', we don't need a separate exit label |
| 1844 | unless there are supposed to be exit statements |
| 1845 | to exit this conditional. */ |
| 1846 | thiscond->exit_label = exitflag ? gen_label_rtx () : 0; |
| 1847 | thiscond->data.cond.endif_label = thiscond->exit_label; |
| 1848 | cond_stack = thiscond; |
| 1849 | nesting_stack = thiscond; |
| 1850 | |
| 1851 | do_jump (cond, thiscond->data.cond.next_label, NULL_RTX); |
| 1852 | } |
| 1853 | |
| 1854 | /* Generate RTL between then-clause and the elseif-clause |
| 1855 | of an if-then-elseif-.... */ |
| 1856 | |
| 1857 | void |
| 1858 | expand_start_elseif (cond) |
| 1859 | tree cond; |
| 1860 | { |
| 1861 | if (cond_stack->data.cond.endif_label == 0) |
| 1862 | cond_stack->data.cond.endif_label = gen_label_rtx (); |
| 1863 | emit_jump (cond_stack->data.cond.endif_label); |
| 1864 | emit_label (cond_stack->data.cond.next_label); |
| 1865 | cond_stack->data.cond.next_label = gen_label_rtx (); |
| 1866 | do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX); |
| 1867 | } |
| 1868 | |
| 1869 | /* Generate RTL between the then-clause and the else-clause |
| 1870 | of an if-then-else. */ |
| 1871 | |
| 1872 | void |
| 1873 | expand_start_else () |
| 1874 | { |
| 1875 | if (cond_stack->data.cond.endif_label == 0) |
| 1876 | cond_stack->data.cond.endif_label = gen_label_rtx (); |
| 1877 | emit_jump (cond_stack->data.cond.endif_label); |
| 1878 | emit_label (cond_stack->data.cond.next_label); |
| 1879 | cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */ |
| 1880 | } |
| 1881 | |
| 1882 | /* Generate RTL for the end of an if-then. |
| 1883 | Pop the record for it off of cond_stack. */ |
| 1884 | |
| 1885 | void |
| 1886 | expand_end_cond () |
| 1887 | { |
| 1888 | struct nesting *thiscond = cond_stack; |
| 1889 | |
| 1890 | do_pending_stack_adjust (); |
| 1891 | if (thiscond->data.cond.next_label) |
| 1892 | emit_label (thiscond->data.cond.next_label); |
| 1893 | if (thiscond->data.cond.endif_label) |
| 1894 | emit_label (thiscond->data.cond.endif_label); |
| 1895 | |
| 1896 | POPSTACK (cond_stack); |
| 1897 | last_expr_type = 0; |
| 1898 | } |
| 1899 | \f |
| 1900 | /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this |
| 1901 | loop should be exited by `exit_something'. This is a loop for which |
| 1902 | `expand_continue' will jump to the top of the loop. |
| 1903 | |
| 1904 | Make an entry on loop_stack to record the labels associated with |
| 1905 | this loop. */ |
| 1906 | |
| 1907 | struct nesting * |
| 1908 | expand_start_loop (exit_flag) |
| 1909 | int exit_flag; |
| 1910 | { |
| 1911 | register struct nesting *thisloop = ALLOC_NESTING (); |
| 1912 | |
| 1913 | /* Make an entry on loop_stack for the loop we are entering. */ |
| 1914 | |
| 1915 | thisloop->next = loop_stack; |
| 1916 | thisloop->all = nesting_stack; |
| 1917 | thisloop->depth = ++nesting_depth; |
| 1918 | thisloop->data.loop.start_label = gen_label_rtx (); |
| 1919 | thisloop->data.loop.end_label = gen_label_rtx (); |
| 1920 | thisloop->data.loop.continue_label = thisloop->data.loop.start_label; |
| 1921 | thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0; |
| 1922 | loop_stack = thisloop; |
| 1923 | nesting_stack = thisloop; |
| 1924 | |
| 1925 | do_pending_stack_adjust (); |
| 1926 | emit_queue (); |
| 1927 | emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG); |
| 1928 | emit_label (thisloop->data.loop.start_label); |
| 1929 | |
| 1930 | return thisloop; |
| 1931 | } |
| 1932 | |
| 1933 | /* Like expand_start_loop but for a loop where the continuation point |
| 1934 | (for expand_continue_loop) will be specified explicitly. */ |
| 1935 | |
| 1936 | struct nesting * |
| 1937 | expand_start_loop_continue_elsewhere (exit_flag) |
| 1938 | int exit_flag; |
| 1939 | { |
| 1940 | struct nesting *thisloop = expand_start_loop (exit_flag); |
| 1941 | loop_stack->data.loop.continue_label = gen_label_rtx (); |
| 1942 | return thisloop; |
| 1943 | } |
| 1944 | |
| 1945 | /* Specify the continuation point for a loop started with |
| 1946 | expand_start_loop_continue_elsewhere. |
| 1947 | Use this at the point in the code to which a continue statement |
| 1948 | should jump. */ |
| 1949 | |
| 1950 | void |
| 1951 | expand_loop_continue_here () |
| 1952 | { |
| 1953 | do_pending_stack_adjust (); |
| 1954 | emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT); |
| 1955 | emit_label (loop_stack->data.loop.continue_label); |
| 1956 | } |
| 1957 | |
| 1958 | /* Finish a loop. Generate a jump back to the top and the loop-exit label. |
| 1959 | Pop the block off of loop_stack. */ |
| 1960 | |
| 1961 | void |
| 1962 | expand_end_loop () |
| 1963 | { |
| 1964 | register rtx insn = get_last_insn (); |
| 1965 | register rtx start_label = loop_stack->data.loop.start_label; |
| 1966 | rtx last_test_insn = 0; |
| 1967 | int num_insns = 0; |
| 1968 | |
| 1969 | /* Mark the continue-point at the top of the loop if none elsewhere. */ |
| 1970 | if (start_label == loop_stack->data.loop.continue_label) |
| 1971 | emit_note_before (NOTE_INSN_LOOP_CONT, start_label); |
| 1972 | |
| 1973 | do_pending_stack_adjust (); |
| 1974 | |
| 1975 | /* If optimizing, perhaps reorder the loop. If the loop |
| 1976 | starts with a conditional exit, roll that to the end |
| 1977 | where it will optimize together with the jump back. |
| 1978 | |
| 1979 | We look for the last conditional branch to the exit that we encounter |
| 1980 | before hitting 30 insns or a CALL_INSN. If we see an unconditional |
| 1981 | branch to the exit first, use it. |
| 1982 | |
| 1983 | We must also stop at NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes |
| 1984 | because moving them is not valid. */ |
| 1985 | |
| 1986 | if (optimize |
| 1987 | && |
| 1988 | ! (GET_CODE (insn) == JUMP_INSN |
| 1989 | && GET_CODE (PATTERN (insn)) == SET |
| 1990 | && SET_DEST (PATTERN (insn)) == pc_rtx |
| 1991 | && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)) |
| 1992 | { |
| 1993 | /* Scan insns from the top of the loop looking for a qualified |
| 1994 | conditional exit. */ |
| 1995 | for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn; |
| 1996 | insn = NEXT_INSN (insn)) |
| 1997 | { |
| 1998 | if (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == CODE_LABEL) |
| 1999 | break; |
| 2000 | |
| 2001 | if (GET_CODE (insn) == NOTE |
| 2002 | && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG |
| 2003 | || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)) |
| 2004 | break; |
| 2005 | |
| 2006 | if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN) |
| 2007 | num_insns++; |
| 2008 | |
| 2009 | if (last_test_insn && num_insns > 30) |
| 2010 | break; |
| 2011 | |
| 2012 | if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET |
| 2013 | && SET_DEST (PATTERN (insn)) == pc_rtx |
| 2014 | && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE |
| 2015 | && ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF |
| 2016 | && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0) |
| 2017 | == loop_stack->data.loop.end_label)) |
| 2018 | || (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF |
| 2019 | && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0) |
| 2020 | == loop_stack->data.loop.end_label)))) |
| 2021 | last_test_insn = insn; |
| 2022 | |
| 2023 | if (last_test_insn == 0 && GET_CODE (insn) == JUMP_INSN |
| 2024 | && GET_CODE (PATTERN (insn)) == SET |
| 2025 | && SET_DEST (PATTERN (insn)) == pc_rtx |
| 2026 | && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF |
| 2027 | && (XEXP (SET_SRC (PATTERN (insn)), 0) |
| 2028 | == loop_stack->data.loop.end_label)) |
| 2029 | /* Include BARRIER. */ |
| 2030 | last_test_insn = NEXT_INSN (insn); |
| 2031 | } |
| 2032 | |
| 2033 | if (last_test_insn != 0 && last_test_insn != get_last_insn ()) |
| 2034 | { |
| 2035 | /* We found one. Move everything from there up |
| 2036 | to the end of the loop, and add a jump into the loop |
| 2037 | to jump to there. */ |
| 2038 | register rtx newstart_label = gen_label_rtx (); |
| 2039 | register rtx start_move = start_label; |
| 2040 | |
| 2041 | /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note, |
| 2042 | then we want to move this note also. */ |
| 2043 | if (GET_CODE (PREV_INSN (start_move)) == NOTE |
| 2044 | && (NOTE_LINE_NUMBER (PREV_INSN (start_move)) |
| 2045 | == NOTE_INSN_LOOP_CONT)) |
| 2046 | start_move = PREV_INSN (start_move); |
| 2047 | |
| 2048 | emit_label_after (newstart_label, PREV_INSN (start_move)); |
| 2049 | reorder_insns (start_move, last_test_insn, get_last_insn ()); |
| 2050 | emit_jump_insn_after (gen_jump (start_label), |
| 2051 | PREV_INSN (newstart_label)); |
| 2052 | emit_barrier_after (PREV_INSN (newstart_label)); |
| 2053 | start_label = newstart_label; |
| 2054 | } |
| 2055 | } |
| 2056 | |
| 2057 | emit_jump (start_label); |
| 2058 | emit_note (NULL_PTR, NOTE_INSN_LOOP_END); |
| 2059 | emit_label (loop_stack->data.loop.end_label); |
| 2060 | |
| 2061 | POPSTACK (loop_stack); |
| 2062 | |
| 2063 | last_expr_type = 0; |
| 2064 | } |
| 2065 | |
| 2066 | /* Generate a jump to the current loop's continue-point. |
| 2067 | This is usually the top of the loop, but may be specified |
| 2068 | explicitly elsewhere. If not currently inside a loop, |
| 2069 | return 0 and do nothing; caller will print an error message. */ |
| 2070 | |
| 2071 | int |
| 2072 | expand_continue_loop (whichloop) |
| 2073 | struct nesting *whichloop; |
| 2074 | { |
| 2075 | last_expr_type = 0; |
| 2076 | if (whichloop == 0) |
| 2077 | whichloop = loop_stack; |
| 2078 | if (whichloop == 0) |
| 2079 | return 0; |
| 2080 | expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label, |
| 2081 | NULL_RTX); |
| 2082 | return 1; |
| 2083 | } |
| 2084 | |
| 2085 | /* Generate a jump to exit the current loop. If not currently inside a loop, |
| 2086 | return 0 and do nothing; caller will print an error message. */ |
| 2087 | |
| 2088 | int |
| 2089 | expand_exit_loop (whichloop) |
| 2090 | struct nesting *whichloop; |
| 2091 | { |
| 2092 | last_expr_type = 0; |
| 2093 | if (whichloop == 0) |
| 2094 | whichloop = loop_stack; |
| 2095 | if (whichloop == 0) |
| 2096 | return 0; |
| 2097 | expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX); |
| 2098 | return 1; |
| 2099 | } |
| 2100 | |
| 2101 | /* Generate a conditional jump to exit the current loop if COND |
| 2102 | evaluates to zero. If not currently inside a loop, |
| 2103 | return 0 and do nothing; caller will print an error message. */ |
| 2104 | |
| 2105 | int |
| 2106 | expand_exit_loop_if_false (whichloop, cond) |
| 2107 | struct nesting *whichloop; |
| 2108 | tree cond; |
| 2109 | { |
| 2110 | last_expr_type = 0; |
| 2111 | if (whichloop == 0) |
| 2112 | whichloop = loop_stack; |
| 2113 | if (whichloop == 0) |
| 2114 | return 0; |
| 2115 | do_jump (cond, whichloop->data.loop.end_label, NULL_RTX); |
| 2116 | return 1; |
| 2117 | } |
| 2118 | |
| 2119 | /* Return non-zero if we should preserve sub-expressions as separate |
| 2120 | pseudos. We never do so if we aren't optimizing. We always do so |
| 2121 | if -fexpensive-optimizations. |
| 2122 | |
| 2123 | Otherwise, we only do so if we are in the "early" part of a loop. I.e., |
| 2124 | the loop may still be a small one. */ |
| 2125 | |
| 2126 | int |
| 2127 | preserve_subexpressions_p () |
| 2128 | { |
| 2129 | rtx insn; |
| 2130 | |
| 2131 | if (flag_expensive_optimizations) |
| 2132 | return 1; |
| 2133 | |
| 2134 | if (optimize == 0 || loop_stack == 0) |
| 2135 | return 0; |
| 2136 | |
| 2137 | insn = get_last_insn_anywhere (); |
| 2138 | |
| 2139 | return (insn |
| 2140 | && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label) |
| 2141 | < n_non_fixed_regs * 3)); |
| 2142 | |
| 2143 | } |
| 2144 | |
| 2145 | /* Generate a jump to exit the current loop, conditional, binding contour |
| 2146 | or case statement. Not all such constructs are visible to this function, |
| 2147 | only those started with EXIT_FLAG nonzero. Individual languages use |
| 2148 | the EXIT_FLAG parameter to control which kinds of constructs you can |
| 2149 | exit this way. |
| 2150 | |
| 2151 | If not currently inside anything that can be exited, |
| 2152 | return 0 and do nothing; caller will print an error message. */ |
| 2153 | |
| 2154 | int |
| 2155 | expand_exit_something () |
| 2156 | { |
| 2157 | struct nesting *n; |
| 2158 | last_expr_type = 0; |
| 2159 | for (n = nesting_stack; n; n = n->all) |
| 2160 | if (n->exit_label != 0) |
| 2161 | { |
| 2162 | expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX); |
| 2163 | return 1; |
| 2164 | } |
| 2165 | |
| 2166 | return 0; |
| 2167 | } |
| 2168 | \f |
| 2169 | /* Generate RTL to return from the current function, with no value. |
| 2170 | (That is, we do not do anything about returning any value.) */ |
| 2171 | |
| 2172 | void |
| 2173 | expand_null_return () |
| 2174 | { |
| 2175 | struct nesting *block = block_stack; |
| 2176 | rtx last_insn = 0; |
| 2177 | |
| 2178 | /* Does any pending block have cleanups? */ |
| 2179 | |
| 2180 | while (block && block->data.block.cleanups == 0) |
| 2181 | block = block->next; |
| 2182 | |
| 2183 | /* If yes, use a goto to return, since that runs cleanups. */ |
| 2184 | |
| 2185 | expand_null_return_1 (last_insn, block != 0); |
| 2186 | } |
| 2187 | |
| 2188 | /* Generate RTL to return from the current function, with value VAL. */ |
| 2189 | |
| 2190 | void |
| 2191 | expand_value_return (val) |
| 2192 | rtx val; |
| 2193 | { |
| 2194 | struct nesting *block = block_stack; |
| 2195 | rtx last_insn = get_last_insn (); |
| 2196 | rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl)); |
| 2197 | |
| 2198 | /* Copy the value to the return location |
| 2199 | unless it's already there. */ |
| 2200 | |
| 2201 | if (return_reg != val) |
| 2202 | { |
| 2203 | #ifdef PROMOTE_FUNCTION_RETURN |
| 2204 | enum machine_mode mode = DECL_MODE (DECL_RESULT (current_function_decl)); |
| 2205 | tree type = TREE_TYPE (DECL_RESULT (current_function_decl)); |
| 2206 | int unsignedp = TREE_UNSIGNED (type); |
| 2207 | |
| 2208 | if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == ENUMERAL_TYPE |
| 2209 | || TREE_CODE (type) == BOOLEAN_TYPE || TREE_CODE (type) == CHAR_TYPE |
| 2210 | || TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == POINTER_TYPE |
| 2211 | || TREE_CODE (type) == OFFSET_TYPE) |
| 2212 | { |
| 2213 | PROMOTE_MODE (mode, unsignedp, type); |
| 2214 | } |
| 2215 | |
| 2216 | if (GET_MODE (val) != VOIDmode && GET_MODE (val) != mode) |
| 2217 | convert_move (return_reg, val, unsignedp); |
| 2218 | else |
| 2219 | #endif |
| 2220 | emit_move_insn (return_reg, val); |
| 2221 | } |
| 2222 | if (GET_CODE (return_reg) == REG |
| 2223 | && REGNO (return_reg) < FIRST_PSEUDO_REGISTER) |
| 2224 | emit_insn (gen_rtx (USE, VOIDmode, return_reg)); |
| 2225 | |
| 2226 | /* Does any pending block have cleanups? */ |
| 2227 | |
| 2228 | while (block && block->data.block.cleanups == 0) |
| 2229 | block = block->next; |
| 2230 | |
| 2231 | /* If yes, use a goto to return, since that runs cleanups. |
| 2232 | Use LAST_INSN to put cleanups *before* the move insn emitted above. */ |
| 2233 | |
| 2234 | expand_null_return_1 (last_insn, block != 0); |
| 2235 | } |
| 2236 | |
| 2237 | /* Output a return with no value. If LAST_INSN is nonzero, |
| 2238 | pretend that the return takes place after LAST_INSN. |
| 2239 | If USE_GOTO is nonzero then don't use a return instruction; |
| 2240 | go to the return label instead. This causes any cleanups |
| 2241 | of pending blocks to be executed normally. */ |
| 2242 | |
| 2243 | static void |
| 2244 | expand_null_return_1 (last_insn, use_goto) |
| 2245 | rtx last_insn; |
| 2246 | int use_goto; |
| 2247 | { |
| 2248 | rtx end_label = cleanup_label ? cleanup_label : return_label; |
| 2249 | |
| 2250 | clear_pending_stack_adjust (); |
| 2251 | do_pending_stack_adjust (); |
| 2252 | last_expr_type = 0; |
| 2253 | |
| 2254 | /* PCC-struct return always uses an epilogue. */ |
| 2255 | if (current_function_returns_pcc_struct || use_goto) |
| 2256 | { |
| 2257 | if (end_label == 0) |
| 2258 | end_label = return_label = gen_label_rtx (); |
| 2259 | expand_goto_internal (NULL_TREE, end_label, last_insn); |
| 2260 | return; |
| 2261 | } |
| 2262 | |
| 2263 | /* Otherwise output a simple return-insn if one is available, |
| 2264 | unless it won't do the job. */ |
| 2265 | #ifdef HAVE_return |
| 2266 | if (HAVE_return && use_goto == 0 && cleanup_label == 0) |
| 2267 | { |
| 2268 | emit_jump_insn (gen_return ()); |
| 2269 | emit_barrier (); |
| 2270 | return; |
| 2271 | } |
| 2272 | #endif |
| 2273 | |
| 2274 | /* Otherwise jump to the epilogue. */ |
| 2275 | expand_goto_internal (NULL_TREE, end_label, last_insn); |
| 2276 | } |
| 2277 | \f |
| 2278 | /* Generate RTL to evaluate the expression RETVAL and return it |
| 2279 | from the current function. */ |
| 2280 | |
| 2281 | void |
| 2282 | expand_return (retval) |
| 2283 | tree retval; |
| 2284 | { |
| 2285 | /* If there are any cleanups to be performed, then they will |
| 2286 | be inserted following LAST_INSN. It is desirable |
| 2287 | that the last_insn, for such purposes, should be the |
| 2288 | last insn before computing the return value. Otherwise, cleanups |
| 2289 | which call functions can clobber the return value. */ |
| 2290 | /* ??? rms: I think that is erroneous, because in C++ it would |
| 2291 | run destructors on variables that might be used in the subsequent |
| 2292 | computation of the return value. */ |
| 2293 | rtx last_insn = 0; |
| 2294 | register rtx val = 0; |
| 2295 | register rtx op0; |
| 2296 | tree retval_rhs; |
| 2297 | int cleanups; |
| 2298 | struct nesting *block; |
| 2299 | |
| 2300 | /* If function wants no value, give it none. */ |
| 2301 | if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE) |
| 2302 | { |
| 2303 | expand_expr (retval, NULL_RTX, VOIDmode, 0); |
| 2304 | emit_queue (); |
| 2305 | expand_null_return (); |
| 2306 | return; |
| 2307 | } |
| 2308 | |
| 2309 | /* Are any cleanups needed? E.g. C++ destructors to be run? */ |
| 2310 | cleanups = any_pending_cleanups (1); |
| 2311 | |
| 2312 | if (TREE_CODE (retval) == RESULT_DECL) |
| 2313 | retval_rhs = retval; |
| 2314 | else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR) |
| 2315 | && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL) |
| 2316 | retval_rhs = TREE_OPERAND (retval, 1); |
| 2317 | else if (TREE_TYPE (retval) == void_type_node) |
| 2318 | /* Recognize tail-recursive call to void function. */ |
| 2319 | retval_rhs = retval; |
| 2320 | else |
| 2321 | retval_rhs = NULL_TREE; |
| 2322 | |
| 2323 | /* Only use `last_insn' if there are cleanups which must be run. */ |
| 2324 | if (cleanups || cleanup_label != 0) |
| 2325 | last_insn = get_last_insn (); |
| 2326 | |
| 2327 | /* Distribute return down conditional expr if either of the sides |
| 2328 | may involve tail recursion (see test below). This enhances the number |
| 2329 | of tail recursions we see. Don't do this always since it can produce |
| 2330 | sub-optimal code in some cases and we distribute assignments into |
| 2331 | conditional expressions when it would help. */ |
| 2332 | |
| 2333 | if (optimize && retval_rhs != 0 |
| 2334 | && frame_offset == 0 |
| 2335 | && TREE_CODE (retval_rhs) == COND_EXPR |
| 2336 | && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR |
| 2337 | || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR)) |
| 2338 | { |
| 2339 | rtx label = gen_label_rtx (); |
| 2340 | do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX); |
| 2341 | expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl), |
| 2342 | DECL_RESULT (current_function_decl), |
| 2343 | TREE_OPERAND (retval_rhs, 1))); |
| 2344 | emit_label (label); |
| 2345 | expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl), |
| 2346 | DECL_RESULT (current_function_decl), |
| 2347 | TREE_OPERAND (retval_rhs, 2))); |
| 2348 | return; |
| 2349 | } |
| 2350 | |
| 2351 | /* For tail-recursive call to current function, |
| 2352 | just jump back to the beginning. |
| 2353 | It's unsafe if any auto variable in this function |
| 2354 | has its address taken; for simplicity, |
| 2355 | require stack frame to be empty. */ |
| 2356 | if (optimize && retval_rhs != 0 |
| 2357 | && frame_offset == 0 |
| 2358 | && TREE_CODE (retval_rhs) == CALL_EXPR |
| 2359 | && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR |
| 2360 | && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == current_function_decl |
| 2361 | /* Finish checking validity, and if valid emit code |
| 2362 | to set the argument variables for the new call. */ |
| 2363 | && tail_recursion_args (TREE_OPERAND (retval_rhs, 1), |
| 2364 | DECL_ARGUMENTS (current_function_decl))) |
| 2365 | { |
| 2366 | if (tail_recursion_label == 0) |
| 2367 | { |
| 2368 | tail_recursion_label = gen_label_rtx (); |
| 2369 | emit_label_after (tail_recursion_label, |
| 2370 | tail_recursion_reentry); |
| 2371 | } |
| 2372 | emit_queue (); |
| 2373 | expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn); |
| 2374 | emit_barrier (); |
| 2375 | return; |
| 2376 | } |
| 2377 | #ifdef HAVE_return |
| 2378 | /* This optimization is safe if there are local cleanups |
| 2379 | because expand_null_return takes care of them. |
| 2380 | ??? I think it should also be safe when there is a cleanup label, |
| 2381 | because expand_null_return takes care of them, too. |
| 2382 | Any reason why not? */ |
| 2383 | if (HAVE_return && cleanup_label == 0 |
| 2384 | && ! current_function_returns_pcc_struct) |
| 2385 | { |
| 2386 | /* If this is return x == y; then generate |
| 2387 | if (x == y) return 1; else return 0; |
| 2388 | if we can do it with explicit return insns. */ |
| 2389 | if (retval_rhs) |
| 2390 | switch (TREE_CODE (retval_rhs)) |
| 2391 | { |
| 2392 | case EQ_EXPR: |
| 2393 | case NE_EXPR: |
| 2394 | case GT_EXPR: |
| 2395 | case GE_EXPR: |
| 2396 | case LT_EXPR: |
| 2397 | case LE_EXPR: |
| 2398 | case TRUTH_ANDIF_EXPR: |
| 2399 | case TRUTH_ORIF_EXPR: |
| 2400 | case TRUTH_AND_EXPR: |
| 2401 | case TRUTH_OR_EXPR: |
| 2402 | case TRUTH_NOT_EXPR: |
| 2403 | case TRUTH_XOR_EXPR: |
| 2404 | op0 = gen_label_rtx (); |
| 2405 | jumpifnot (retval_rhs, op0); |
| 2406 | expand_value_return (const1_rtx); |
| 2407 | emit_label (op0); |
| 2408 | expand_value_return (const0_rtx); |
| 2409 | return; |
| 2410 | } |
| 2411 | } |
| 2412 | #endif /* HAVE_return */ |
| 2413 | |
| 2414 | if (cleanups |
| 2415 | && retval_rhs != 0 |
| 2416 | && TREE_TYPE (retval_rhs) != void_type_node |
| 2417 | && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG) |
| 2418 | { |
| 2419 | /* Calculate the return value into a pseudo reg. */ |
| 2420 | val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0); |
| 2421 | emit_queue (); |
| 2422 | /* All temporaries have now been used. */ |
| 2423 | free_temp_slots (); |
| 2424 | /* Return the calculated value, doing cleanups first. */ |
| 2425 | expand_value_return (val); |
| 2426 | } |
| 2427 | else |
| 2428 | { |
| 2429 | /* No cleanups or no hard reg used; |
| 2430 | calculate value into hard return reg. */ |
| 2431 | expand_expr (retval, NULL_RTX, VOIDmode, 0); |
| 2432 | emit_queue (); |
| 2433 | free_temp_slots (); |
| 2434 | expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl))); |
| 2435 | } |
| 2436 | } |
| 2437 | |
| 2438 | /* Return 1 if the end of the generated RTX is not a barrier. |
| 2439 | This means code already compiled can drop through. */ |
| 2440 | |
| 2441 | int |
| 2442 | drop_through_at_end_p () |
| 2443 | { |
| 2444 | rtx insn = get_last_insn (); |
| 2445 | while (insn && GET_CODE (insn) == NOTE) |
| 2446 | insn = PREV_INSN (insn); |
| 2447 | return insn && GET_CODE (insn) != BARRIER; |
| 2448 | } |
| 2449 | \f |
| 2450 | /* Emit code to alter this function's formal parms for a tail-recursive call. |
| 2451 | ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs). |
| 2452 | FORMALS is the chain of decls of formals. |
| 2453 | Return 1 if this can be done; |
| 2454 | otherwise return 0 and do not emit any code. */ |
| 2455 | |
| 2456 | static int |
| 2457 | tail_recursion_args (actuals, formals) |
| 2458 | tree actuals, formals; |
| 2459 | { |
| 2460 | register tree a = actuals, f = formals; |
| 2461 | register int i; |
| 2462 | register rtx *argvec; |
| 2463 | |
| 2464 | /* Check that number and types of actuals are compatible |
| 2465 | with the formals. This is not always true in valid C code. |
| 2466 | Also check that no formal needs to be addressable |
| 2467 | and that all formals are scalars. */ |
| 2468 | |
| 2469 | /* Also count the args. */ |
| 2470 | |
| 2471 | for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++) |
| 2472 | { |
| 2473 | if (TREE_TYPE (TREE_VALUE (a)) != TREE_TYPE (f)) |
| 2474 | return 0; |
| 2475 | if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode) |
| 2476 | return 0; |
| 2477 | } |
| 2478 | if (a != 0 || f != 0) |
| 2479 | return 0; |
| 2480 | |
| 2481 | /* Compute all the actuals. */ |
| 2482 | |
| 2483 | argvec = (rtx *) alloca (i * sizeof (rtx)); |
| 2484 | |
| 2485 | for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++) |
| 2486 | argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0); |
| 2487 | |
| 2488 | /* Find which actual values refer to current values of previous formals. |
| 2489 | Copy each of them now, before any formal is changed. */ |
| 2490 | |
| 2491 | for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++) |
| 2492 | { |
| 2493 | int copy = 0; |
| 2494 | register int j; |
| 2495 | for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++) |
| 2496 | if (reg_mentioned_p (DECL_RTL (f), argvec[i])) |
| 2497 | { copy = 1; break; } |
| 2498 | if (copy) |
| 2499 | argvec[i] = copy_to_reg (argvec[i]); |
| 2500 | } |
| 2501 | |
| 2502 | /* Store the values of the actuals into the formals. */ |
| 2503 | |
| 2504 | for (f = formals, a = actuals, i = 0; f; |
| 2505 | f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++) |
| 2506 | { |
| 2507 | if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i])) |
| 2508 | emit_move_insn (DECL_RTL (f), argvec[i]); |
| 2509 | else |
| 2510 | convert_move (DECL_RTL (f), argvec[i], |
| 2511 | TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a)))); |
| 2512 | } |
| 2513 | |
| 2514 | free_temp_slots (); |
| 2515 | return 1; |
| 2516 | } |
| 2517 | \f |
| 2518 | /* Generate the RTL code for entering a binding contour. |
| 2519 | The variables are declared one by one, by calls to `expand_decl'. |
| 2520 | |
| 2521 | EXIT_FLAG is nonzero if this construct should be visible to |
| 2522 | `exit_something'. */ |
| 2523 | |
| 2524 | void |
| 2525 | expand_start_bindings (exit_flag) |
| 2526 | int exit_flag; |
| 2527 | { |
| 2528 | struct nesting *thisblock = ALLOC_NESTING (); |
| 2529 | |
| 2530 | rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG); |
| 2531 | |
| 2532 | /* Make an entry on block_stack for the block we are entering. */ |
| 2533 | |
| 2534 | thisblock->next = block_stack; |
| 2535 | thisblock->all = nesting_stack; |
| 2536 | thisblock->depth = ++nesting_depth; |
| 2537 | thisblock->data.block.stack_level = 0; |
| 2538 | thisblock->data.block.cleanups = 0; |
| 2539 | thisblock->data.block.function_call_count = 0; |
| 2540 | #if 0 |
| 2541 | if (block_stack) |
| 2542 | { |
| 2543 | if (block_stack->data.block.cleanups == NULL_TREE |
| 2544 | && (block_stack->data.block.outer_cleanups == NULL_TREE |
| 2545 | || block_stack->data.block.outer_cleanups == empty_cleanup_list)) |
| 2546 | thisblock->data.block.outer_cleanups = empty_cleanup_list; |
| 2547 | else |
| 2548 | thisblock->data.block.outer_cleanups |
| 2549 | = tree_cons (NULL_TREE, block_stack->data.block.cleanups, |
| 2550 | block_stack->data.block.outer_cleanups); |
| 2551 | } |
| 2552 | else |
| 2553 | thisblock->data.block.outer_cleanups = 0; |
| 2554 | #endif |
| 2555 | #if 1 |
| 2556 | if (block_stack |
| 2557 | && !(block_stack->data.block.cleanups == NULL_TREE |
| 2558 | && block_stack->data.block.outer_cleanups == NULL_TREE)) |
| 2559 | thisblock->data.block.outer_cleanups |
| 2560 | = tree_cons (NULL_TREE, block_stack->data.block.cleanups, |
| 2561 | block_stack->data.block.outer_cleanups); |
| 2562 | else |
| 2563 | thisblock->data.block.outer_cleanups = 0; |
| 2564 | #endif |
| 2565 | thisblock->data.block.label_chain = 0; |
| 2566 | thisblock->data.block.innermost_stack_block = stack_block_stack; |
| 2567 | thisblock->data.block.first_insn = note; |
| 2568 | thisblock->data.block.block_start_count = ++block_start_count; |
| 2569 | thisblock->exit_label = exit_flag ? gen_label_rtx () : 0; |
| 2570 | block_stack = thisblock; |
| 2571 | nesting_stack = thisblock; |
| 2572 | |
| 2573 | /* Make a new level for allocating stack slots. */ |
| 2574 | push_temp_slots (); |
| 2575 | } |
| 2576 | |
| 2577 | /* Given a pointer to a BLOCK node, save a pointer to the most recently |
| 2578 | generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given |
| 2579 | BLOCK node. */ |
| 2580 | |
| 2581 | void |
| 2582 | remember_end_note (block) |
| 2583 | register tree block; |
| 2584 | { |
| 2585 | BLOCK_END_NOTE (block) = last_block_end_note; |
| 2586 | last_block_end_note = NULL_RTX; |
| 2587 | } |
| 2588 | |
| 2589 | /* Generate RTL code to terminate a binding contour. |
| 2590 | VARS is the chain of VAR_DECL nodes |
| 2591 | for the variables bound in this contour. |
| 2592 | MARK_ENDS is nonzero if we should put a note at the beginning |
| 2593 | and end of this binding contour. |
| 2594 | |
| 2595 | DONT_JUMP_IN is nonzero if it is not valid to jump into this contour. |
| 2596 | (That is true automatically if the contour has a saved stack level.) */ |
| 2597 | |
| 2598 | void |
| 2599 | expand_end_bindings (vars, mark_ends, dont_jump_in) |
| 2600 | tree vars; |
| 2601 | int mark_ends; |
| 2602 | int dont_jump_in; |
| 2603 | { |
| 2604 | register struct nesting *thisblock = block_stack; |
| 2605 | register tree decl; |
| 2606 | |
| 2607 | if (warn_unused) |
| 2608 | for (decl = vars; decl; decl = TREE_CHAIN (decl)) |
| 2609 | if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL |
| 2610 | && ! DECL_IN_SYSTEM_HEADER (decl)) |
| 2611 | warning_with_decl (decl, "unused variable `%s'"); |
| 2612 | |
| 2613 | if (thisblock->exit_label) |
| 2614 | { |
| 2615 | do_pending_stack_adjust (); |
| 2616 | emit_label (thisblock->exit_label); |
| 2617 | } |
| 2618 | |
| 2619 | /* If necessary, make a handler for nonlocal gotos taking |
| 2620 | place in the function calls in this block. */ |
| 2621 | if (function_call_count != thisblock->data.block.function_call_count |
| 2622 | && nonlocal_labels |
| 2623 | /* Make handler for outermost block |
| 2624 | if there were any nonlocal gotos to this function. */ |
| 2625 | && (thisblock->next == 0 ? current_function_has_nonlocal_label |
| 2626 | /* Make handler for inner block if it has something |
| 2627 | special to do when you jump out of it. */ |
| 2628 | : (thisblock->data.block.cleanups != 0 |
| 2629 | || thisblock->data.block.stack_level != 0))) |
| 2630 | { |
| 2631 | tree link; |
| 2632 | rtx afterward = gen_label_rtx (); |
| 2633 | rtx handler_label = gen_label_rtx (); |
| 2634 | rtx save_receiver = gen_reg_rtx (Pmode); |
| 2635 | |
| 2636 | /* Don't let jump_optimize delete the handler. */ |
| 2637 | LABEL_PRESERVE_P (handler_label) = 1; |
| 2638 | |
| 2639 | /* Record the handler address in the stack slot for that purpose, |
| 2640 | during this block, saving and restoring the outer value. */ |
| 2641 | if (thisblock->next != 0) |
| 2642 | { |
| 2643 | emit_move_insn (nonlocal_goto_handler_slot, save_receiver); |
| 2644 | emit_insn_before (gen_move_insn (save_receiver, |
| 2645 | nonlocal_goto_handler_slot), |
| 2646 | thisblock->data.block.first_insn); |
| 2647 | } |
| 2648 | emit_insn_before (gen_move_insn (nonlocal_goto_handler_slot, |
| 2649 | gen_rtx (LABEL_REF, Pmode, |
| 2650 | handler_label)), |
| 2651 | thisblock->data.block.first_insn); |
| 2652 | |
| 2653 | /* Jump around the handler; it runs only when specially invoked. */ |
| 2654 | emit_jump (afterward); |
| 2655 | emit_label (handler_label); |
| 2656 | |
| 2657 | #ifdef HAVE_nonlocal_goto |
| 2658 | if (! HAVE_nonlocal_goto) |
| 2659 | #endif |
| 2660 | /* First adjust our frame pointer to its actual value. It was |
| 2661 | previously set to the start of the virtual area corresponding to |
| 2662 | the stacked variables when we branched here and now needs to be |
| 2663 | adjusted to the actual hardware fp value. |
| 2664 | |
| 2665 | Assignments are to virtual registers are converted by |
| 2666 | instantiate_virtual_regs into the corresponding assignment |
| 2667 | to the underlying register (fp in this case) that makes |
| 2668 | the original assignment true. |
| 2669 | So the following insn will actually be |
| 2670 | decrementing fp by STARTING_FRAME_OFFSET. */ |
| 2671 | emit_move_insn (virtual_stack_vars_rtx, frame_pointer_rtx); |
| 2672 | |
| 2673 | #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM |
| 2674 | if (fixed_regs[ARG_POINTER_REGNUM]) |
| 2675 | { |
| 2676 | #ifdef ELIMINABLE_REGS |
| 2677 | /* If the argument pointer can be eliminated in favor of the |
| 2678 | frame pointer, we don't need to restore it. We assume here |
| 2679 | that if such an elimination is present, it can always be used. |
| 2680 | This is the case on all known machines; if we don't make this |
| 2681 | assumption, we do unnecessary saving on many machines. */ |
| 2682 | static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS; |
| 2683 | int i; |
| 2684 | |
| 2685 | for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++) |
| 2686 | if (elim_regs[i].from == ARG_POINTER_REGNUM |
| 2687 | && elim_regs[i].to == FRAME_POINTER_REGNUM) |
| 2688 | break; |
| 2689 | |
| 2690 | if (i == sizeof elim_regs / sizeof elim_regs [0]) |
| 2691 | #endif |
| 2692 | { |
| 2693 | /* Now restore our arg pointer from the address at which it |
| 2694 | was saved in our stack frame. |
| 2695 | If there hasn't be space allocated for it yet, make |
| 2696 | some now. */ |
| 2697 | if (arg_pointer_save_area == 0) |
| 2698 | arg_pointer_save_area |
| 2699 | = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0); |
| 2700 | emit_move_insn (virtual_incoming_args_rtx, |
| 2701 | /* We need a pseudo here, or else |
| 2702 | instantiate_virtual_regs_1 complains. */ |
| 2703 | copy_to_reg (arg_pointer_save_area)); |
| 2704 | } |
| 2705 | } |
| 2706 | #endif |
| 2707 | |
| 2708 | /* The handler expects the desired label address in the static chain |
| 2709 | register. It tests the address and does an appropriate jump |
| 2710 | to whatever label is desired. */ |
| 2711 | for (link = nonlocal_labels; link; link = TREE_CHAIN (link)) |
| 2712 | /* Skip any labels we shouldn't be able to jump to from here. */ |
| 2713 | if (! DECL_TOO_LATE (TREE_VALUE (link))) |
| 2714 | { |
| 2715 | rtx not_this = gen_label_rtx (); |
| 2716 | rtx this = gen_label_rtx (); |
| 2717 | do_jump_if_equal (static_chain_rtx, |
| 2718 | gen_rtx (LABEL_REF, Pmode, DECL_RTL (TREE_VALUE (link))), |
| 2719 | this, 0); |
| 2720 | emit_jump (not_this); |
| 2721 | emit_label (this); |
| 2722 | expand_goto (TREE_VALUE (link)); |
| 2723 | emit_label (not_this); |
| 2724 | } |
| 2725 | /* If label is not recognized, abort. */ |
| 2726 | emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "abort"), 0, |
| 2727 | VOIDmode, 0); |
| 2728 | emit_label (afterward); |
| 2729 | } |
| 2730 | |
| 2731 | /* Don't allow jumping into a block that has cleanups or a stack level. */ |
| 2732 | if (dont_jump_in |
| 2733 | || thisblock->data.block.stack_level != 0 |
| 2734 | || thisblock->data.block.cleanups != 0) |
| 2735 | { |
| 2736 | struct label_chain *chain; |
| 2737 | |
| 2738 | /* Any labels in this block are no longer valid to go to. |
| 2739 | Mark them to cause an error message. */ |
| 2740 | for (chain = thisblock->data.block.label_chain; chain; chain = chain->next) |
| 2741 | { |
| 2742 | DECL_TOO_LATE (chain->label) = 1; |
| 2743 | /* If any goto without a fixup came to this label, |
| 2744 | that must be an error, because gotos without fixups |
| 2745 | come from outside all saved stack-levels and all cleanups. */ |
| 2746 | if (TREE_ADDRESSABLE (chain->label)) |
| 2747 | error_with_decl (chain->label, |
| 2748 | "label `%s' used before containing binding contour"); |
| 2749 | } |
| 2750 | } |
| 2751 | |
| 2752 | /* Restore stack level in effect before the block |
| 2753 | (only if variable-size objects allocated). */ |
| 2754 | /* Perform any cleanups associated with the block. */ |
| 2755 | |
| 2756 | if (thisblock->data.block.stack_level != 0 |
| 2757 | || thisblock->data.block.cleanups != 0) |
| 2758 | { |
| 2759 | /* Don't let cleanups affect ({...}) constructs. */ |
| 2760 | int old_expr_stmts_for_value = expr_stmts_for_value; |
| 2761 | rtx old_last_expr_value = last_expr_value; |
| 2762 | tree old_last_expr_type = last_expr_type; |
| 2763 | expr_stmts_for_value = 0; |
| 2764 | |
| 2765 | /* Do the cleanups. */ |
| 2766 | expand_cleanups (thisblock->data.block.cleanups, NULL_TREE); |
| 2767 | do_pending_stack_adjust (); |
| 2768 | |
| 2769 | expr_stmts_for_value = old_expr_stmts_for_value; |
| 2770 | last_expr_value = old_last_expr_value; |
| 2771 | last_expr_type = old_last_expr_type; |
| 2772 | |
| 2773 | /* Restore the stack level. */ |
| 2774 | |
| 2775 | if (thisblock->data.block.stack_level != 0) |
| 2776 | { |
| 2777 | emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION, |
| 2778 | thisblock->data.block.stack_level, NULL_RTX); |
| 2779 | if (nonlocal_goto_handler_slot != 0) |
| 2780 | emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, |
| 2781 | NULL_RTX); |
| 2782 | } |
| 2783 | |
| 2784 | /* Any gotos out of this block must also do these things. |
| 2785 | Also report any gotos with fixups that came to labels in this |
| 2786 | level. */ |
| 2787 | fixup_gotos (thisblock, |
| 2788 | thisblock->data.block.stack_level, |
| 2789 | thisblock->data.block.cleanups, |
| 2790 | thisblock->data.block.first_insn, |
| 2791 | dont_jump_in); |
| 2792 | } |
| 2793 | |
| 2794 | /* Mark the beginning and end of the scope if requested. |
| 2795 | We do this now, after running cleanups on the variables |
| 2796 | just going out of scope, so they are in scope for their cleanups. */ |
| 2797 | |
| 2798 | if (mark_ends) |
| 2799 | last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END); |
| 2800 | else |
| 2801 | /* Get rid of the beginning-mark if we don't make an end-mark. */ |
| 2802 | NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED; |
| 2803 | |
| 2804 | /* If doing stupid register allocation, make sure lives of all |
| 2805 | register variables declared here extend thru end of scope. */ |
| 2806 | |
| 2807 | if (obey_regdecls) |
| 2808 | for (decl = vars; decl; decl = TREE_CHAIN (decl)) |
| 2809 | { |
| 2810 | rtx rtl = DECL_RTL (decl); |
| 2811 | if (TREE_CODE (decl) == VAR_DECL && rtl != 0) |
| 2812 | use_variable (rtl); |
| 2813 | } |
| 2814 | |
| 2815 | /* Restore block_stack level for containing block. */ |
| 2816 | |
| 2817 | stack_block_stack = thisblock->data.block.innermost_stack_block; |
| 2818 | POPSTACK (block_stack); |
| 2819 | |
| 2820 | /* Pop the stack slot nesting and free any slots at this level. */ |
| 2821 | pop_temp_slots (); |
| 2822 | } |
| 2823 | \f |
| 2824 | /* Generate RTL for the automatic variable declaration DECL. |
| 2825 | (Other kinds of declarations are simply ignored if seen here.) |
| 2826 | CLEANUP is an expression to be executed at exit from this binding contour; |
| 2827 | for example, in C++, it might call the destructor for this variable. |
| 2828 | |
| 2829 | If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them |
| 2830 | either before or after calling `expand_decl' but before compiling |
| 2831 | any subsequent expressions. This is because CLEANUP may be expanded |
| 2832 | more than once, on different branches of execution. |
| 2833 | For the same reason, CLEANUP may not contain a CALL_EXPR |
| 2834 | except as its topmost node--else `preexpand_calls' would get confused. |
| 2835 | |
| 2836 | If CLEANUP is nonzero and DECL is zero, we record a cleanup |
| 2837 | that is not associated with any particular variable. |
| 2838 | |
| 2839 | There is no special support here for C++ constructors. |
| 2840 | They should be handled by the proper code in DECL_INITIAL. */ |
| 2841 | |
| 2842 | void |
| 2843 | expand_decl (decl) |
| 2844 | register tree decl; |
| 2845 | { |
| 2846 | struct nesting *thisblock = block_stack; |
| 2847 | tree type = TREE_TYPE (decl); |
| 2848 | |
| 2849 | /* Only automatic variables need any expansion done. |
| 2850 | Static and external variables, and external functions, |
| 2851 | will be handled by `assemble_variable' (called from finish_decl). |
| 2852 | TYPE_DECL and CONST_DECL require nothing. |
| 2853 | PARM_DECLs are handled in `assign_parms'. */ |
| 2854 | |
| 2855 | if (TREE_CODE (decl) != VAR_DECL) |
| 2856 | return; |
| 2857 | if (TREE_STATIC (decl) || DECL_EXTERNAL (decl)) |
| 2858 | return; |
| 2859 | |
| 2860 | /* Create the RTL representation for the variable. */ |
| 2861 | |
| 2862 | if (type == error_mark_node) |
| 2863 | DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx); |
| 2864 | else if (DECL_SIZE (decl) == 0) |
| 2865 | /* Variable with incomplete type. */ |
| 2866 | { |
| 2867 | if (DECL_INITIAL (decl) == 0) |
| 2868 | /* Error message was already done; now avoid a crash. */ |
| 2869 | DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1); |
| 2870 | else |
| 2871 | /* An initializer is going to decide the size of this array. |
| 2872 | Until we know the size, represent its address with a reg. */ |
| 2873 | DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode)); |
| 2874 | } |
| 2875 | else if (DECL_MODE (decl) != BLKmode |
| 2876 | /* If -ffloat-store, don't put explicit float vars |
| 2877 | into regs. */ |
| 2878 | && !(flag_float_store |
| 2879 | && TREE_CODE (type) == REAL_TYPE) |
| 2880 | && ! TREE_THIS_VOLATILE (decl) |
| 2881 | && ! TREE_ADDRESSABLE (decl) |
| 2882 | && (DECL_REGISTER (decl) || ! obey_regdecls)) |
| 2883 | { |
| 2884 | /* Automatic variable that can go in a register. */ |
| 2885 | enum machine_mode reg_mode = DECL_MODE (decl); |
| 2886 | int unsignedp = TREE_UNSIGNED (type); |
| 2887 | |
| 2888 | if (TREE_CODE (type) == INTEGER_TYPE || TREE_CODE (type) == ENUMERAL_TYPE |
| 2889 | || TREE_CODE (type) == BOOLEAN_TYPE || TREE_CODE (type) == CHAR_TYPE |
| 2890 | || TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == POINTER_TYPE |
| 2891 | || TREE_CODE (type) == OFFSET_TYPE) |
| 2892 | { |
| 2893 | PROMOTE_MODE (reg_mode, unsignedp, type); |
| 2894 | } |
| 2895 | |
| 2896 | DECL_RTL (decl) = gen_reg_rtx (reg_mode); |
| 2897 | if (TREE_CODE (type) == POINTER_TYPE) |
| 2898 | mark_reg_pointer (DECL_RTL (decl)); |
| 2899 | REG_USERVAR_P (DECL_RTL (decl)) = 1; |
| 2900 | } |
| 2901 | else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST) |
| 2902 | { |
| 2903 | /* Variable of fixed size that goes on the stack. */ |
| 2904 | rtx oldaddr = 0; |
| 2905 | rtx addr; |
| 2906 | |
| 2907 | /* If we previously made RTL for this decl, it must be an array |
| 2908 | whose size was determined by the initializer. |
| 2909 | The old address was a register; set that register now |
| 2910 | to the proper address. */ |
| 2911 | if (DECL_RTL (decl) != 0) |
| 2912 | { |
| 2913 | if (GET_CODE (DECL_RTL (decl)) != MEM |
| 2914 | || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG) |
| 2915 | abort (); |
| 2916 | oldaddr = XEXP (DECL_RTL (decl), 0); |
| 2917 | } |
| 2918 | |
| 2919 | DECL_RTL (decl) |
| 2920 | = assign_stack_temp (DECL_MODE (decl), |
| 2921 | ((TREE_INT_CST_LOW (DECL_SIZE (decl)) |
| 2922 | + BITS_PER_UNIT - 1) |
| 2923 | / BITS_PER_UNIT), |
| 2924 | 1); |
| 2925 | |
| 2926 | /* Set alignment we actually gave this decl. */ |
| 2927 | DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT |
| 2928 | : GET_MODE_BITSIZE (DECL_MODE (decl))); |
| 2929 | |
| 2930 | if (oldaddr) |
| 2931 | { |
| 2932 | addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr); |
| 2933 | if (addr != oldaddr) |
| 2934 | emit_move_insn (oldaddr, addr); |
| 2935 | } |
| 2936 | |
| 2937 | /* If this is a memory ref that contains aggregate components, |
| 2938 | mark it as such for cse and loop optimize. */ |
| 2939 | MEM_IN_STRUCT_P (DECL_RTL (decl)) |
| 2940 | = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE |
| 2941 | || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE |
| 2942 | || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE |
| 2943 | || TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE); |
| 2944 | #if 0 |
| 2945 | /* If this is in memory because of -ffloat-store, |
| 2946 | set the volatile bit, to prevent optimizations from |
| 2947 | undoing the effects. */ |
| 2948 | if (flag_float_store && TREE_CODE (type) == REAL_TYPE) |
| 2949 | MEM_VOLATILE_P (DECL_RTL (decl)) = 1; |
| 2950 | #endif |
| 2951 | } |
| 2952 | else |
| 2953 | /* Dynamic-size object: must push space on the stack. */ |
| 2954 | { |
| 2955 | rtx address, size; |
| 2956 | |
| 2957 | /* Record the stack pointer on entry to block, if have |
| 2958 | not already done so. */ |
| 2959 | if (thisblock->data.block.stack_level == 0) |
| 2960 | { |
| 2961 | do_pending_stack_adjust (); |
| 2962 | emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION, |
| 2963 | &thisblock->data.block.stack_level, |
| 2964 | thisblock->data.block.first_insn); |
| 2965 | stack_block_stack = thisblock; |
| 2966 | } |
| 2967 | |
| 2968 | /* Compute the variable's size, in bytes. */ |
| 2969 | size = expand_expr (size_binop (CEIL_DIV_EXPR, |
| 2970 | DECL_SIZE (decl), |
| 2971 | size_int (BITS_PER_UNIT)), |
| 2972 | NULL_RTX, VOIDmode, 0); |
| 2973 | free_temp_slots (); |
| 2974 | |
| 2975 | /* This is equivalent to calling alloca. */ |
| 2976 | current_function_calls_alloca = 1; |
| 2977 | |
| 2978 | /* Allocate space on the stack for the variable. */ |
| 2979 | address = allocate_dynamic_stack_space (size, NULL_RTX, |
| 2980 | DECL_ALIGN (decl)); |
| 2981 | |
| 2982 | if (nonlocal_goto_handler_slot != 0) |
| 2983 | emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX); |
| 2984 | |
| 2985 | /* Reference the variable indirect through that rtx. */ |
| 2986 | DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address); |
| 2987 | |
| 2988 | /* If this is a memory ref that contains aggregate components, |
| 2989 | mark it as such for cse and loop optimize. */ |
| 2990 | MEM_IN_STRUCT_P (DECL_RTL (decl)) |
| 2991 | = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE |
| 2992 | || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE |
| 2993 | || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE |
| 2994 | || TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE); |
| 2995 | |
| 2996 | /* Indicate the alignment we actually gave this variable. */ |
| 2997 | #ifdef STACK_BOUNDARY |
| 2998 | DECL_ALIGN (decl) = STACK_BOUNDARY; |
| 2999 | #else |
| 3000 | DECL_ALIGN (decl) = BIGGEST_ALIGNMENT; |
| 3001 | #endif |
| 3002 | } |
| 3003 | |
| 3004 | if (TREE_THIS_VOLATILE (decl)) |
| 3005 | MEM_VOLATILE_P (DECL_RTL (decl)) = 1; |
| 3006 | #if 0 /* A variable is not necessarily unchanging |
| 3007 | just because it is const. RTX_UNCHANGING_P |
| 3008 | means no change in the function, |
| 3009 | not merely no change in the variable's scope. |
| 3010 | It is correct to set RTX_UNCHANGING_P if the variable's scope |
| 3011 | is the whole function. There's no convenient way to test that. */ |
| 3012 | if (TREE_READONLY (decl)) |
| 3013 | RTX_UNCHANGING_P (DECL_RTL (decl)) = 1; |
| 3014 | #endif |
| 3015 | |
| 3016 | /* If doing stupid register allocation, make sure life of any |
| 3017 | register variable starts here, at the start of its scope. */ |
| 3018 | |
| 3019 | if (obey_regdecls) |
| 3020 | use_variable (DECL_RTL (decl)); |
| 3021 | } |
| 3022 | \f |
| 3023 | /* Emit code to perform the initialization of a declaration DECL. */ |
| 3024 | |
| 3025 | void |
| 3026 | expand_decl_init (decl) |
| 3027 | tree decl; |
| 3028 | { |
| 3029 | int was_used = TREE_USED (decl); |
| 3030 | |
| 3031 | if (TREE_STATIC (decl)) |
| 3032 | return; |
| 3033 | |
| 3034 | /* Compute and store the initial value now. */ |
| 3035 | |
| 3036 | if (DECL_INITIAL (decl) == error_mark_node) |
| 3037 | { |
| 3038 | enum tree_code code = TREE_CODE (TREE_TYPE (decl)); |
| 3039 | if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE |
| 3040 | || code == POINTER_TYPE) |
| 3041 | expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node), |
| 3042 | 0, 0); |
| 3043 | emit_queue (); |
| 3044 | } |
| 3045 | else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST) |
| 3046 | { |
| 3047 | emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl)); |
| 3048 | expand_assignment (decl, DECL_INITIAL (decl), 0, 0); |
| 3049 | emit_queue (); |
| 3050 | } |
| 3051 | |
| 3052 | /* Don't let the initialization count as "using" the variable. */ |
| 3053 | TREE_USED (decl) = was_used; |
| 3054 | |
| 3055 | /* Free any temporaries we made while initializing the decl. */ |
| 3056 | free_temp_slots (); |
| 3057 | } |
| 3058 | |
| 3059 | /* CLEANUP is an expression to be executed at exit from this binding contour; |
| 3060 | for example, in C++, it might call the destructor for this variable. |
| 3061 | |
| 3062 | If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them |
| 3063 | either before or after calling `expand_decl' but before compiling |
| 3064 | any subsequent expressions. This is because CLEANUP may be expanded |
| 3065 | more than once, on different branches of execution. |
| 3066 | For the same reason, CLEANUP may not contain a CALL_EXPR |
| 3067 | except as its topmost node--else `preexpand_calls' would get confused. |
| 3068 | |
| 3069 | If CLEANUP is nonzero and DECL is zero, we record a cleanup |
| 3070 | that is not associated with any particular variable. */ |
| 3071 | |
| 3072 | int |
| 3073 | expand_decl_cleanup (decl, cleanup) |
| 3074 | tree decl, cleanup; |
| 3075 | { |
| 3076 | struct nesting *thisblock = block_stack; |
| 3077 | |
| 3078 | /* Error if we are not in any block. */ |
| 3079 | if (thisblock == 0) |
| 3080 | return 0; |
| 3081 | |
| 3082 | /* Record the cleanup if there is one. */ |
| 3083 | |
| 3084 | if (cleanup != 0) |
| 3085 | { |
| 3086 | thisblock->data.block.cleanups |
| 3087 | = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups); |
| 3088 | /* If this block has a cleanup, it belongs in stack_block_stack. */ |
| 3089 | stack_block_stack = thisblock; |
| 3090 | } |
| 3091 | return 1; |
| 3092 | } |
| 3093 | \f |
| 3094 | /* DECL is an anonymous union. CLEANUP is a cleanup for DECL. |
| 3095 | DECL_ELTS is the list of elements that belong to DECL's type. |
| 3096 | In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */ |
| 3097 | |
| 3098 | void |
| 3099 | expand_anon_union_decl (decl, cleanup, decl_elts) |
| 3100 | tree decl, cleanup, decl_elts; |
| 3101 | { |
| 3102 | struct nesting *thisblock = block_stack; |
| 3103 | rtx x; |
| 3104 | |
| 3105 | expand_decl (decl, cleanup); |
| 3106 | x = DECL_RTL (decl); |
| 3107 | |
| 3108 | while (decl_elts) |
| 3109 | { |
| 3110 | tree decl_elt = TREE_VALUE (decl_elts); |
| 3111 | tree cleanup_elt = TREE_PURPOSE (decl_elts); |
| 3112 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt)); |
| 3113 | |
| 3114 | /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we |
| 3115 | instead create a new MEM rtx with the proper mode. */ |
| 3116 | if (GET_CODE (x) == MEM) |
| 3117 | { |
| 3118 | if (mode == GET_MODE (x)) |
| 3119 | DECL_RTL (decl_elt) = x; |
| 3120 | else |
| 3121 | { |
| 3122 | DECL_RTL (decl_elt) = gen_rtx (MEM, mode, copy_rtx (XEXP (x, 0))); |
| 3123 | MEM_IN_STRUCT_P (DECL_RTL (decl_elt)) = MEM_IN_STRUCT_P (x); |
| 3124 | RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x); |
| 3125 | } |
| 3126 | } |
| 3127 | else if (GET_CODE (x) == REG) |
| 3128 | { |
| 3129 | if (mode == GET_MODE (x)) |
| 3130 | DECL_RTL (decl_elt) = x; |
| 3131 | else |
| 3132 | DECL_RTL (decl_elt) = gen_rtx (SUBREG, mode, x, 0); |
| 3133 | } |
| 3134 | else |
| 3135 | abort (); |
| 3136 | |
| 3137 | /* Record the cleanup if there is one. */ |
| 3138 | |
| 3139 | if (cleanup != 0) |
| 3140 | thisblock->data.block.cleanups |
| 3141 | = temp_tree_cons (decl_elt, cleanup_elt, |
| 3142 | thisblock->data.block.cleanups); |
| 3143 | |
| 3144 | decl_elts = TREE_CHAIN (decl_elts); |
| 3145 | } |
| 3146 | } |
| 3147 | \f |
| 3148 | /* Expand a list of cleanups LIST. |
| 3149 | Elements may be expressions or may be nested lists. |
| 3150 | |
| 3151 | If DONT_DO is nonnull, then any list-element |
| 3152 | whose TREE_PURPOSE matches DONT_DO is omitted. |
| 3153 | This is sometimes used to avoid a cleanup associated with |
| 3154 | a value that is being returned out of the scope. */ |
| 3155 | |
| 3156 | static void |
| 3157 | expand_cleanups (list, dont_do) |
| 3158 | tree list; |
| 3159 | tree dont_do; |
| 3160 | { |
| 3161 | tree tail; |
| 3162 | for (tail = list; tail; tail = TREE_CHAIN (tail)) |
| 3163 | if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do) |
| 3164 | { |
| 3165 | if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST) |
| 3166 | expand_cleanups (TREE_VALUE (tail), dont_do); |
| 3167 | else |
| 3168 | { |
| 3169 | /* Cleanups may be run multiple times. For example, |
| 3170 | when exiting a binding contour, we expand the |
| 3171 | cleanups associated with that contour. When a goto |
| 3172 | within that binding contour has a target outside that |
| 3173 | contour, it will expand all cleanups from its scope to |
| 3174 | the target. Though the cleanups are expanded multiple |
| 3175 | times, the control paths are non-overlapping so the |
| 3176 | cleanups will not be executed twice. */ |
| 3177 | expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0); |
| 3178 | free_temp_slots (); |
| 3179 | } |
| 3180 | } |
| 3181 | } |
| 3182 | |
| 3183 | /* Move all cleanups from the current block_stack |
| 3184 | to the containing block_stack, where they are assumed to |
| 3185 | have been created. If anything can cause a temporary to |
| 3186 | be created, but not expanded for more than one level of |
| 3187 | block_stacks, then this code will have to change. */ |
| 3188 | |
| 3189 | void |
| 3190 | move_cleanups_up () |
| 3191 | { |
| 3192 | struct nesting *block = block_stack; |
| 3193 | struct nesting *outer = block->next; |
| 3194 | |
| 3195 | outer->data.block.cleanups |
| 3196 | = chainon (block->data.block.cleanups, |
| 3197 | outer->data.block.cleanups); |
| 3198 | block->data.block.cleanups = 0; |
| 3199 | } |
| 3200 | |
| 3201 | tree |
| 3202 | last_cleanup_this_contour () |
| 3203 | { |
| 3204 | if (block_stack == 0) |
| 3205 | return 0; |
| 3206 | |
| 3207 | return block_stack->data.block.cleanups; |
| 3208 | } |
| 3209 | |
| 3210 | /* Return 1 if there are any pending cleanups at this point. |
| 3211 | If THIS_CONTOUR is nonzero, check the current contour as well. |
| 3212 | Otherwise, look only at the contours that enclose this one. */ |
| 3213 | |
| 3214 | int |
| 3215 | any_pending_cleanups (this_contour) |
| 3216 | int this_contour; |
| 3217 | { |
| 3218 | struct nesting *block; |
| 3219 | |
| 3220 | if (block_stack == 0) |
| 3221 | return 0; |
| 3222 | |
| 3223 | if (this_contour && block_stack->data.block.cleanups != NULL) |
| 3224 | return 1; |
| 3225 | if (block_stack->data.block.cleanups == 0 |
| 3226 | && (block_stack->data.block.outer_cleanups == 0 |
| 3227 | #if 0 |
| 3228 | || block_stack->data.block.outer_cleanups == empty_cleanup_list |
| 3229 | #endif |
| 3230 | )) |
| 3231 | return 0; |
| 3232 | |
| 3233 | for (block = block_stack->next; block; block = block->next) |
| 3234 | if (block->data.block.cleanups != 0) |
| 3235 | return 1; |
| 3236 | |
| 3237 | return 0; |
| 3238 | } |
| 3239 | \f |
| 3240 | /* Enter a case (Pascal) or switch (C) statement. |
| 3241 | Push a block onto case_stack and nesting_stack |
| 3242 | to accumulate the case-labels that are seen |
| 3243 | and to record the labels generated for the statement. |
| 3244 | |
| 3245 | EXIT_FLAG is nonzero if `exit_something' should exit this case stmt. |
| 3246 | Otherwise, this construct is transparent for `exit_something'. |
| 3247 | |
| 3248 | EXPR is the index-expression to be dispatched on. |
| 3249 | TYPE is its nominal type. We could simply convert EXPR to this type, |
| 3250 | but instead we take short cuts. */ |
| 3251 | |
| 3252 | void |
| 3253 | expand_start_case (exit_flag, expr, type, printname) |
| 3254 | int exit_flag; |
| 3255 | tree expr; |
| 3256 | tree type; |
| 3257 | char *printname; |
| 3258 | { |
| 3259 | register struct nesting *thiscase = ALLOC_NESTING (); |
| 3260 | |
| 3261 | /* Make an entry on case_stack for the case we are entering. */ |
| 3262 | |
| 3263 | thiscase->next = case_stack; |
| 3264 | thiscase->all = nesting_stack; |
| 3265 | thiscase->depth = ++nesting_depth; |
| 3266 | thiscase->exit_label = exit_flag ? gen_label_rtx () : 0; |
| 3267 | thiscase->data.case_stmt.case_list = 0; |
| 3268 | thiscase->data.case_stmt.index_expr = expr; |
| 3269 | thiscase->data.case_stmt.nominal_type = type; |
| 3270 | thiscase->data.case_stmt.default_label = 0; |
| 3271 | thiscase->data.case_stmt.num_ranges = 0; |
| 3272 | thiscase->data.case_stmt.printname = printname; |
| 3273 | thiscase->data.case_stmt.seenlabel = 0; |
| 3274 | case_stack = thiscase; |
| 3275 | nesting_stack = thiscase; |
| 3276 | |
| 3277 | do_pending_stack_adjust (); |
| 3278 | |
| 3279 | /* Make sure case_stmt.start points to something that won't |
| 3280 | need any transformation before expand_end_case. */ |
| 3281 | if (GET_CODE (get_last_insn ()) != NOTE) |
| 3282 | emit_note (NULL_PTR, NOTE_INSN_DELETED); |
| 3283 | |
| 3284 | thiscase->data.case_stmt.start = get_last_insn (); |
| 3285 | } |
| 3286 | |
| 3287 | /* Start a "dummy case statement" within which case labels are invalid |
| 3288 | and are not connected to any larger real case statement. |
| 3289 | This can be used if you don't want to let a case statement jump |
| 3290 | into the middle of certain kinds of constructs. */ |
| 3291 | |
| 3292 | void |
| 3293 | expand_start_case_dummy () |
| 3294 | { |
| 3295 | register struct nesting *thiscase = ALLOC_NESTING (); |
| 3296 | |
| 3297 | /* Make an entry on case_stack for the dummy. */ |
| 3298 | |
| 3299 | thiscase->next = case_stack; |
| 3300 | thiscase->all = nesting_stack; |
| 3301 | thiscase->depth = ++nesting_depth; |
| 3302 | thiscase->exit_label = 0; |
| 3303 | thiscase->data.case_stmt.case_list = 0; |
| 3304 | thiscase->data.case_stmt.start = 0; |
| 3305 | thiscase->data.case_stmt.nominal_type = 0; |
| 3306 | thiscase->data.case_stmt.default_label = 0; |
| 3307 | thiscase->data.case_stmt.num_ranges = 0; |
| 3308 | case_stack = thiscase; |
| 3309 | nesting_stack = thiscase; |
| 3310 | } |
| 3311 | |
| 3312 | /* End a dummy case statement. */ |
| 3313 | |
| 3314 | void |
| 3315 | expand_end_case_dummy () |
| 3316 | { |
| 3317 | POPSTACK (case_stack); |
| 3318 | } |
| 3319 | |
| 3320 | /* Return the data type of the index-expression |
| 3321 | of the innermost case statement, or null if none. */ |
| 3322 | |
| 3323 | tree |
| 3324 | case_index_expr_type () |
| 3325 | { |
| 3326 | if (case_stack) |
| 3327 | return TREE_TYPE (case_stack->data.case_stmt.index_expr); |
| 3328 | return 0; |
| 3329 | } |
| 3330 | \f |
| 3331 | /* Accumulate one case or default label inside a case or switch statement. |
| 3332 | VALUE is the value of the case (a null pointer, for a default label). |
| 3333 | |
| 3334 | If not currently inside a case or switch statement, return 1 and do |
| 3335 | nothing. The caller will print a language-specific error message. |
| 3336 | If VALUE is a duplicate or overlaps, return 2 and do nothing |
| 3337 | except store the (first) duplicate node in *DUPLICATE. |
| 3338 | If VALUE is out of range, return 3 and do nothing. |
| 3339 | If we are jumping into the scope of a cleaup or var-sized array, return 5. |
| 3340 | Return 0 on success. |
| 3341 | |
| 3342 | Extended to handle range statements. */ |
| 3343 | |
| 3344 | int |
| 3345 | pushcase (value, label, duplicate) |
| 3346 | register tree value; |
| 3347 | register tree label; |
| 3348 | tree *duplicate; |
| 3349 | { |
| 3350 | register struct case_node **l; |
| 3351 | register struct case_node *n; |
| 3352 | tree index_type; |
| 3353 | tree nominal_type; |
| 3354 | |
| 3355 | /* Fail if not inside a real case statement. */ |
| 3356 | if (! (case_stack && case_stack->data.case_stmt.start)) |
| 3357 | return 1; |
| 3358 | |
| 3359 | if (stack_block_stack |
| 3360 | && stack_block_stack->depth > case_stack->depth) |
| 3361 | return 5; |
| 3362 | |
| 3363 | index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr); |
| 3364 | nominal_type = case_stack->data.case_stmt.nominal_type; |
| 3365 | |
| 3366 | /* If the index is erroneous, avoid more problems: pretend to succeed. */ |
| 3367 | if (index_type == error_mark_node) |
| 3368 | return 0; |
| 3369 | |
| 3370 | /* Convert VALUE to the type in which the comparisons are nominally done. */ |
| 3371 | if (value != 0) |
| 3372 | value = convert (nominal_type, value); |
| 3373 | |
| 3374 | /* If this is the first label, warn if any insns have been emitted. */ |
| 3375 | if (case_stack->data.case_stmt.seenlabel == 0) |
| 3376 | { |
| 3377 | rtx insn; |
| 3378 | for (insn = case_stack->data.case_stmt.start; |
| 3379 | insn; |
| 3380 | insn = NEXT_INSN (insn)) |
| 3381 | { |
| 3382 | if (GET_CODE (insn) == CODE_LABEL) |
| 3383 | break; |
| 3384 | if (GET_CODE (insn) != NOTE |
| 3385 | && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE)) |
| 3386 | { |
| 3387 | warning ("unreachable code at beginning of %s", |
| 3388 | case_stack->data.case_stmt.printname); |
| 3389 | break; |
| 3390 | } |
| 3391 | } |
| 3392 | } |
| 3393 | case_stack->data.case_stmt.seenlabel = 1; |
| 3394 | |
| 3395 | /* Fail if this value is out of range for the actual type of the index |
| 3396 | (which may be narrower than NOMINAL_TYPE). */ |
| 3397 | if (value != 0 && ! int_fits_type_p (value, index_type)) |
| 3398 | return 3; |
| 3399 | |
| 3400 | /* Fail if this is a duplicate or overlaps another entry. */ |
| 3401 | if (value == 0) |
| 3402 | { |
| 3403 | if (case_stack->data.case_stmt.default_label != 0) |
| 3404 | { |
| 3405 | *duplicate = case_stack->data.case_stmt.default_label; |
| 3406 | return 2; |
| 3407 | } |
| 3408 | case_stack->data.case_stmt.default_label = label; |
| 3409 | } |
| 3410 | else |
| 3411 | { |
| 3412 | /* Find the elt in the chain before which to insert the new value, |
| 3413 | to keep the chain sorted in increasing order. |
| 3414 | But report an error if this element is a duplicate. */ |
| 3415 | for (l = &case_stack->data.case_stmt.case_list; |
| 3416 | /* Keep going past elements distinctly less than VALUE. */ |
| 3417 | *l != 0 && tree_int_cst_lt ((*l)->high, value); |
| 3418 | l = &(*l)->right) |
| 3419 | ; |
| 3420 | if (*l) |
| 3421 | { |
| 3422 | /* Element we will insert before must be distinctly greater; |
| 3423 | overlap means error. */ |
| 3424 | if (! tree_int_cst_lt (value, (*l)->low)) |
| 3425 | { |
| 3426 | *duplicate = (*l)->code_label; |
| 3427 | return 2; |
| 3428 | } |
| 3429 | } |
| 3430 | |
| 3431 | /* Add this label to the chain, and succeed. |
| 3432 | Copy VALUE so it is on temporary rather than momentary |
| 3433 | obstack and will thus survive till the end of the case statement. */ |
| 3434 | n = (struct case_node *) oballoc (sizeof (struct case_node)); |
| 3435 | n->left = 0; |
| 3436 | n->right = *l; |
| 3437 | n->high = n->low = copy_node (value); |
| 3438 | n->code_label = label; |
| 3439 | *l = n; |
| 3440 | } |
| 3441 | |
| 3442 | expand_label (label); |
| 3443 | return 0; |
| 3444 | } |
| 3445 | |
| 3446 | /* Like pushcase but this case applies to all values |
| 3447 | between VALUE1 and VALUE2 (inclusive). |
| 3448 | The return value is the same as that of pushcase |
| 3449 | but there is one additional error code: |
| 3450 | 4 means the specified range was empty. */ |
| 3451 | |
| 3452 | int |
| 3453 | pushcase_range (value1, value2, label, duplicate) |
| 3454 | register tree value1, value2; |
| 3455 | register tree label; |
| 3456 | tree *duplicate; |
| 3457 | { |
| 3458 | register struct case_node **l; |
| 3459 | register struct case_node *n; |
| 3460 | tree index_type; |
| 3461 | tree nominal_type; |
| 3462 | |
| 3463 | /* Fail if not inside a real case statement. */ |
| 3464 | if (! (case_stack && case_stack->data.case_stmt.start)) |
| 3465 | return 1; |
| 3466 | |
| 3467 | if (stack_block_stack |
| 3468 | && stack_block_stack->depth > case_stack->depth) |
| 3469 | return 5; |
| 3470 | |
| 3471 | index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr); |
| 3472 | nominal_type = case_stack->data.case_stmt.nominal_type; |
| 3473 | |
| 3474 | /* If the index is erroneous, avoid more problems: pretend to succeed. */ |
| 3475 | if (index_type == error_mark_node) |
| 3476 | return 0; |
| 3477 | |
| 3478 | /* If this is the first label, warn if any insns have been emitted. */ |
| 3479 | if (case_stack->data.case_stmt.seenlabel == 0) |
| 3480 | { |
| 3481 | rtx insn; |
| 3482 | for (insn = case_stack->data.case_stmt.start; |
| 3483 | insn; |
| 3484 | insn = NEXT_INSN (insn)) |
| 3485 | { |
| 3486 | if (GET_CODE (insn) == CODE_LABEL) |
| 3487 | break; |
| 3488 | if (GET_CODE (insn) != NOTE |
| 3489 | && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE)) |
| 3490 | { |
| 3491 | warning ("unreachable code at beginning of %s", |
| 3492 | case_stack->data.case_stmt.printname); |
| 3493 | break; |
| 3494 | } |
| 3495 | } |
| 3496 | } |
| 3497 | case_stack->data.case_stmt.seenlabel = 1; |
| 3498 | |
| 3499 | /* Convert VALUEs to type in which the comparisons are nominally done. */ |
| 3500 | if (value1 == 0) /* Negative infinity. */ |
| 3501 | value1 = TYPE_MIN_VALUE(index_type); |
| 3502 | value1 = convert (nominal_type, value1); |
| 3503 | |
| 3504 | if (value2 == 0) /* Positive infinity. */ |
| 3505 | value2 = TYPE_MAX_VALUE(index_type); |
| 3506 | value2 = convert (nominal_type, value2); |
| 3507 | |
| 3508 | /* Fail if these values are out of range. */ |
| 3509 | if (! int_fits_type_p (value1, index_type)) |
| 3510 | return 3; |
| 3511 | |
| 3512 | if (! int_fits_type_p (value2, index_type)) |
| 3513 | return 3; |
| 3514 | |
| 3515 | /* Fail if the range is empty. */ |
| 3516 | if (tree_int_cst_lt (value2, value1)) |
| 3517 | return 4; |
| 3518 | |
| 3519 | /* If the bounds are equal, turn this into the one-value case. */ |
| 3520 | if (tree_int_cst_equal (value1, value2)) |
| 3521 | return pushcase (value1, label, duplicate); |
| 3522 | |
| 3523 | /* Find the elt in the chain before which to insert the new value, |
| 3524 | to keep the chain sorted in increasing order. |
| 3525 | But report an error if this element is a duplicate. */ |
| 3526 | for (l = &case_stack->data.case_stmt.case_list; |
| 3527 | /* Keep going past elements distinctly less than this range. */ |
| 3528 | *l != 0 && tree_int_cst_lt ((*l)->high, value1); |
| 3529 | l = &(*l)->right) |
| 3530 | ; |
| 3531 | if (*l) |
| 3532 | { |
| 3533 | /* Element we will insert before must be distinctly greater; |
| 3534 | overlap means error. */ |
| 3535 | if (! tree_int_cst_lt (value2, (*l)->low)) |
| 3536 | { |
| 3537 | *duplicate = (*l)->code_label; |
| 3538 | return 2; |
| 3539 | } |
| 3540 | } |
| 3541 | |
| 3542 | /* Add this label to the chain, and succeed. |
| 3543 | Copy VALUE1, VALUE2 so they are on temporary rather than momentary |
| 3544 | obstack and will thus survive till the end of the case statement. */ |
| 3545 | |
| 3546 | n = (struct case_node *) oballoc (sizeof (struct case_node)); |
| 3547 | n->left = 0; |
| 3548 | n->right = *l; |
| 3549 | n->low = copy_node (value1); |
| 3550 | n->high = copy_node (value2); |
| 3551 | n->code_label = label; |
| 3552 | *l = n; |
| 3553 | |
| 3554 | expand_label (label); |
| 3555 | |
| 3556 | case_stack->data.case_stmt.num_ranges++; |
| 3557 | |
| 3558 | return 0; |
| 3559 | } |
| 3560 | \f |
| 3561 | /* Called when the index of a switch statement is an enumerated type |
| 3562 | and there is no default label. |
| 3563 | |
| 3564 | Checks that all enumeration literals are covered by the case |
| 3565 | expressions of a switch. Also, warn if there are any extra |
| 3566 | switch cases that are *not* elements of the enumerated type. |
| 3567 | |
| 3568 | If all enumeration literals were covered by the case expressions, |
| 3569 | turn one of the expressions into the default expression since it should |
| 3570 | not be possible to fall through such a switch. */ |
| 3571 | |
| 3572 | void |
| 3573 | check_for_full_enumeration_handling (type) |
| 3574 | tree type; |
| 3575 | { |
| 3576 | register struct case_node *n; |
| 3577 | register struct case_node **l; |
| 3578 | register tree chain; |
| 3579 | int all_values = 1; |
| 3580 | |
| 3581 | /* The time complexity of this loop is currently O(N * M), with |
| 3582 | N being the number of members in the enumerated type, and |
| 3583 | M being the number of case expressions in the switch. */ |
| 3584 | |
| 3585 | for (chain = TYPE_VALUES (type); |
| 3586 | chain; |
| 3587 | chain = TREE_CHAIN (chain)) |
| 3588 | { |
| 3589 | /* Find a match between enumeral and case expression, if possible. |
| 3590 | Quit looking when we've gone too far (since case expressions |
| 3591 | are kept sorted in ascending order). Warn about enumerators not |
| 3592 | handled in the switch statement case expression list. */ |
| 3593 | |
| 3594 | for (n = case_stack->data.case_stmt.case_list; |
| 3595 | n && tree_int_cst_lt (n->high, TREE_VALUE (chain)); |
| 3596 | n = n->right) |
| 3597 | ; |
| 3598 | |
| 3599 | if (!n || tree_int_cst_lt (TREE_VALUE (chain), n->low)) |
| 3600 | { |
| 3601 | if (warn_switch) |
| 3602 | warning ("enumeration value `%s' not handled in switch", |
| 3603 | IDENTIFIER_POINTER (TREE_PURPOSE (chain))); |
| 3604 | all_values = 0; |
| 3605 | } |
| 3606 | } |
| 3607 | |
| 3608 | /* Now we go the other way around; we warn if there are case |
| 3609 | expressions that don't correspond to enumerators. This can |
| 3610 | occur since C and C++ don't enforce type-checking of |
| 3611 | assignments to enumeration variables. */ |
| 3612 | |
| 3613 | if (warn_switch) |
| 3614 | for (n = case_stack->data.case_stmt.case_list; n; n = n->right) |
| 3615 | { |
| 3616 | for (chain = TYPE_VALUES (type); |
| 3617 | chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain)); |
| 3618 | chain = TREE_CHAIN (chain)) |
| 3619 | ; |
| 3620 | |
| 3621 | if (!chain) |
| 3622 | { |
| 3623 | if (TYPE_NAME (type) == 0) |
| 3624 | warning ("case value `%d' not in enumerated type", |
| 3625 | TREE_INT_CST_LOW (n->low)); |
| 3626 | else |
| 3627 | warning ("case value `%d' not in enumerated type `%s'", |
| 3628 | TREE_INT_CST_LOW (n->low), |
| 3629 | IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type)) |
| 3630 | == IDENTIFIER_NODE) |
| 3631 | ? TYPE_NAME (type) |
| 3632 | : DECL_NAME (TYPE_NAME (type)))); |
| 3633 | } |
| 3634 | if (!tree_int_cst_equal (n->low, n->high)) |
| 3635 | { |
| 3636 | for (chain = TYPE_VALUES (type); |
| 3637 | chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain)); |
| 3638 | chain = TREE_CHAIN (chain)) |
| 3639 | ; |
| 3640 | |
| 3641 | if (!chain) |
| 3642 | { |
| 3643 | if (TYPE_NAME (type) == 0) |
| 3644 | warning ("case value `%d' not in enumerated type", |
| 3645 | TREE_INT_CST_LOW (n->high)); |
| 3646 | else |
| 3647 | warning ("case value `%d' not in enumerated type `%s'", |
| 3648 | TREE_INT_CST_LOW (n->high), |
| 3649 | IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type)) |
| 3650 | == IDENTIFIER_NODE) |
| 3651 | ? TYPE_NAME (type) |
| 3652 | : DECL_NAME (TYPE_NAME (type)))); |
| 3653 | } |
| 3654 | } |
| 3655 | } |
| 3656 | |
| 3657 | /* If all values were found as case labels, make one of them the default |
| 3658 | label. Thus, this switch will never fall through. We arbitrarily pick |
| 3659 | the last one to make the default since this is likely the most |
| 3660 | efficient choice. */ |
| 3661 | |
| 3662 | if (all_values) |
| 3663 | { |
| 3664 | for (l = &case_stack->data.case_stmt.case_list; |
| 3665 | (*l)->right != 0; |
| 3666 | l = &(*l)->right) |
| 3667 | ; |
| 3668 | |
| 3669 | case_stack->data.case_stmt.default_label = (*l)->code_label; |
| 3670 | *l = 0; |
| 3671 | } |
| 3672 | } |
| 3673 | \f |
| 3674 | /* Terminate a case (Pascal) or switch (C) statement |
| 3675 | in which ORIG_INDEX is the expression to be tested. |
| 3676 | Generate the code to test it and jump to the right place. */ |
| 3677 | |
| 3678 | void |
| 3679 | expand_end_case (orig_index) |
| 3680 | tree orig_index; |
| 3681 | { |
| 3682 | tree minval, maxval, range; |
| 3683 | rtx default_label = 0; |
| 3684 | register struct case_node *n; |
| 3685 | int count; |
| 3686 | rtx index; |
| 3687 | rtx table_label = gen_label_rtx (); |
| 3688 | int ncases; |
| 3689 | rtx *labelvec; |
| 3690 | register int i; |
| 3691 | rtx before_case; |
| 3692 | register struct nesting *thiscase = case_stack; |
| 3693 | tree index_expr = thiscase->data.case_stmt.index_expr; |
| 3694 | int unsignedp = TREE_UNSIGNED (TREE_TYPE (index_expr)); |
| 3695 | |
| 3696 | do_pending_stack_adjust (); |
| 3697 | |
| 3698 | /* An ERROR_MARK occurs for various reasons including invalid data type. */ |
| 3699 | if (TREE_TYPE (index_expr) != error_mark_node) |
| 3700 | { |
| 3701 | /* If switch expression was an enumerated type, check that all |
| 3702 | enumeration literals are covered by the cases. |
| 3703 | No sense trying this if there's a default case, however. */ |
| 3704 | |
| 3705 | if (!thiscase->data.case_stmt.default_label |
| 3706 | && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE |
| 3707 | && TREE_CODE (index_expr) != INTEGER_CST) |
| 3708 | check_for_full_enumeration_handling (TREE_TYPE (orig_index)); |
| 3709 | |
| 3710 | /* If this is the first label, warn if any insns have been emitted. */ |
| 3711 | if (thiscase->data.case_stmt.seenlabel == 0) |
| 3712 | { |
| 3713 | rtx insn; |
| 3714 | for (insn = get_last_insn (); |
| 3715 | insn != case_stack->data.case_stmt.start; |
| 3716 | insn = PREV_INSN (insn)) |
| 3717 | if (GET_CODE (insn) != NOTE |
| 3718 | && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn))!= USE)) |
| 3719 | { |
| 3720 | warning ("unreachable code at beginning of %s", |
| 3721 | case_stack->data.case_stmt.printname); |
| 3722 | break; |
| 3723 | } |
| 3724 | } |
| 3725 | |
| 3726 | /* If we don't have a default-label, create one here, |
| 3727 | after the body of the switch. */ |
| 3728 | if (thiscase->data.case_stmt.default_label == 0) |
| 3729 | { |
| 3730 | thiscase->data.case_stmt.default_label |
| 3731 | = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE); |
| 3732 | expand_label (thiscase->data.case_stmt.default_label); |
| 3733 | } |
| 3734 | default_label = label_rtx (thiscase->data.case_stmt.default_label); |
| 3735 | |
| 3736 | before_case = get_last_insn (); |
| 3737 | |
| 3738 | /* Simplify the case-list before we count it. */ |
| 3739 | group_case_nodes (thiscase->data.case_stmt.case_list); |
| 3740 | |
| 3741 | /* Get upper and lower bounds of case values. |
| 3742 | Also convert all the case values to the index expr's data type. */ |
| 3743 | |
| 3744 | count = 0; |
| 3745 | for (n = thiscase->data.case_stmt.case_list; n; n = n->right) |
| 3746 | { |
| 3747 | /* Check low and high label values are integers. */ |
| 3748 | if (TREE_CODE (n->low) != INTEGER_CST) |
| 3749 | abort (); |
| 3750 | if (TREE_CODE (n->high) != INTEGER_CST) |
| 3751 | abort (); |
| 3752 | |
| 3753 | n->low = convert (TREE_TYPE (index_expr), n->low); |
| 3754 | n->high = convert (TREE_TYPE (index_expr), n->high); |
| 3755 | |
| 3756 | /* Count the elements and track the largest and smallest |
| 3757 | of them (treating them as signed even if they are not). */ |
| 3758 | if (count++ == 0) |
| 3759 | { |
| 3760 | minval = n->low; |
| 3761 | maxval = n->high; |
| 3762 | } |
| 3763 | else |
| 3764 | { |
| 3765 | if (INT_CST_LT (n->low, minval)) |
| 3766 | minval = n->low; |
| 3767 | if (INT_CST_LT (maxval, n->high)) |
| 3768 | maxval = n->high; |
| 3769 | } |
| 3770 | /* A range counts double, since it requires two compares. */ |
| 3771 | if (! tree_int_cst_equal (n->low, n->high)) |
| 3772 | count++; |
| 3773 | } |
| 3774 | |
| 3775 | /* Compute span of values. */ |
| 3776 | if (count != 0) |
| 3777 | range = fold (build (MINUS_EXPR, TREE_TYPE (index_expr), |
| 3778 | maxval, minval)); |
| 3779 | |
| 3780 | if (count == 0 || TREE_CODE (TREE_TYPE (index_expr)) == ERROR_MARK) |
| 3781 | { |
| 3782 | expand_expr (index_expr, const0_rtx, VOIDmode, 0); |
| 3783 | emit_queue (); |
| 3784 | emit_jump (default_label); |
| 3785 | } |
| 3786 | /* If range of values is much bigger than number of values, |
| 3787 | make a sequence of conditional branches instead of a dispatch. |
| 3788 | If the switch-index is a constant, do it this way |
| 3789 | because we can optimize it. */ |
| 3790 | |
| 3791 | #ifndef CASE_VALUES_THRESHOLD |
| 3792 | #ifdef HAVE_casesi |
| 3793 | #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5) |
| 3794 | #else |
| 3795 | /* If machine does not have a case insn that compares the |
| 3796 | bounds, this means extra overhead for dispatch tables |
| 3797 | which raises the threshold for using them. */ |
| 3798 | #define CASE_VALUES_THRESHOLD 5 |
| 3799 | #endif /* HAVE_casesi */ |
| 3800 | #endif /* CASE_VALUES_THRESHOLD */ |
| 3801 | |
| 3802 | else if (TREE_INT_CST_HIGH (range) != 0 |
| 3803 | || count < CASE_VALUES_THRESHOLD |
| 3804 | || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range)) |
| 3805 | > 10 * count) |
| 3806 | || TREE_CODE (index_expr) == INTEGER_CST |
| 3807 | /* These will reduce to a constant. */ |
| 3808 | || (TREE_CODE (index_expr) == CALL_EXPR |
| 3809 | && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR |
| 3810 | && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL |
| 3811 | && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE) |
| 3812 | || (TREE_CODE (index_expr) == COMPOUND_EXPR |
| 3813 | && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST)) |
| 3814 | { |
| 3815 | index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0); |
| 3816 | |
| 3817 | /* If the index is a short or char that we do not have |
| 3818 | an insn to handle comparisons directly, convert it to |
| 3819 | a full integer now, rather than letting each comparison |
| 3820 | generate the conversion. */ |
| 3821 | |
| 3822 | if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT |
| 3823 | && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code |
| 3824 | == CODE_FOR_nothing)) |
| 3825 | { |
| 3826 | enum machine_mode wider_mode; |
| 3827 | for (wider_mode = GET_MODE (index); wider_mode != VOIDmode; |
| 3828 | wider_mode = GET_MODE_WIDER_MODE (wider_mode)) |
| 3829 | if (cmp_optab->handlers[(int) wider_mode].insn_code |
| 3830 | != CODE_FOR_nothing) |
| 3831 | { |
| 3832 | index = convert_to_mode (wider_mode, index, unsignedp); |
| 3833 | break; |
| 3834 | } |
| 3835 | } |
| 3836 | |
| 3837 | emit_queue (); |
| 3838 | do_pending_stack_adjust (); |
| 3839 | |
| 3840 | index = protect_from_queue (index, 0); |
| 3841 | if (GET_CODE (index) == MEM) |
| 3842 | index = copy_to_reg (index); |
| 3843 | if (GET_CODE (index) == CONST_INT |
| 3844 | || TREE_CODE (index_expr) == INTEGER_CST) |
| 3845 | { |
| 3846 | /* Make a tree node with the proper constant value |
| 3847 | if we don't already have one. */ |
| 3848 | if (TREE_CODE (index_expr) != INTEGER_CST) |
| 3849 | { |
| 3850 | index_expr |
| 3851 | = build_int_2 (INTVAL (index), |
| 3852 | !unsignedp && INTVAL (index) >= 0 ? 0 : -1); |
| 3853 | index_expr = convert (TREE_TYPE (index_expr), index_expr); |
| 3854 | } |
| 3855 | |
| 3856 | /* For constant index expressions we need only |
| 3857 | issue a unconditional branch to the appropriate |
| 3858 | target code. The job of removing any unreachable |
| 3859 | code is left to the optimisation phase if the |
| 3860 | "-O" option is specified. */ |
| 3861 | for (n = thiscase->data.case_stmt.case_list; |
| 3862 | n; |
| 3863 | n = n->right) |
| 3864 | { |
| 3865 | if (! tree_int_cst_lt (index_expr, n->low) |
| 3866 | && ! tree_int_cst_lt (n->high, index_expr)) |
| 3867 | break; |
| 3868 | } |
| 3869 | if (n) |
| 3870 | emit_jump (label_rtx (n->code_label)); |
| 3871 | else |
| 3872 | emit_jump (default_label); |
| 3873 | } |
| 3874 | else |
| 3875 | { |
| 3876 | /* If the index expression is not constant we generate |
| 3877 | a binary decision tree to select the appropriate |
| 3878 | target code. This is done as follows: |
| 3879 | |
| 3880 | The list of cases is rearranged into a binary tree, |
| 3881 | nearly optimal assuming equal probability for each case. |
| 3882 | |
| 3883 | The tree is transformed into RTL, eliminating |
| 3884 | redundant test conditions at the same time. |
| 3885 | |
| 3886 | If program flow could reach the end of the |
| 3887 | decision tree an unconditional jump to the |
| 3888 | default code is emitted. */ |
| 3889 | |
| 3890 | use_cost_table |
| 3891 | = (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE |
| 3892 | && estimate_case_costs (thiscase->data.case_stmt.case_list)); |
| 3893 | balance_case_nodes (&thiscase->data.case_stmt.case_list, |
| 3894 | NULL_PTR); |
| 3895 | emit_case_nodes (index, thiscase->data.case_stmt.case_list, |
| 3896 | default_label, TREE_TYPE (index_expr)); |
| 3897 | emit_jump_if_reachable (default_label); |
| 3898 | } |
| 3899 | } |
| 3900 | else |
| 3901 | { |
| 3902 | int win = 0; |
| 3903 | #ifdef HAVE_casesi |
| 3904 | if (HAVE_casesi) |
| 3905 | { |
| 3906 | enum machine_mode index_mode = SImode; |
| 3907 | int index_bits = GET_MODE_BITSIZE (index_mode); |
| 3908 | |
| 3909 | /* Convert the index to SImode. */ |
| 3910 | if (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (index_expr))) |
| 3911 | > GET_MODE_BITSIZE (index_mode)) |
| 3912 | { |
| 3913 | enum machine_mode omode = TYPE_MODE (TREE_TYPE (index_expr)); |
| 3914 | rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0); |
| 3915 | |
| 3916 | /* We must handle the endpoints in the original mode. */ |
| 3917 | index_expr = build (MINUS_EXPR, TREE_TYPE (index_expr), |
| 3918 | index_expr, minval); |
| 3919 | minval = integer_zero_node; |
| 3920 | index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0); |
| 3921 | emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 0, 0); |
| 3922 | emit_jump_insn (gen_bltu (default_label)); |
| 3923 | /* Now we can safely truncate. */ |
| 3924 | index = convert_to_mode (index_mode, index, 0); |
| 3925 | } |
| 3926 | else |
| 3927 | { |
| 3928 | if (TYPE_MODE (TREE_TYPE (index_expr)) != index_mode) |
| 3929 | index_expr = convert (type_for_size (index_bits, 0), |
| 3930 | index_expr); |
| 3931 | index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0); |
| 3932 | } |
| 3933 | emit_queue (); |
| 3934 | index = protect_from_queue (index, 0); |
| 3935 | do_pending_stack_adjust (); |
| 3936 | |
| 3937 | emit_jump_insn (gen_casesi (index, expand_expr (minval, NULL_RTX, |
| 3938 | VOIDmode, 0), |
| 3939 | expand_expr (range, NULL_RTX, |
| 3940 | VOIDmode, 0), |
| 3941 | table_label, default_label)); |
| 3942 | win = 1; |
| 3943 | } |
| 3944 | #endif |
| 3945 | #ifdef HAVE_tablejump |
| 3946 | if (! win && HAVE_tablejump) |
| 3947 | { |
| 3948 | index_expr = convert (thiscase->data.case_stmt.nominal_type, |
| 3949 | fold (build (MINUS_EXPR, |
| 3950 | TREE_TYPE (index_expr), |
| 3951 | index_expr, minval))); |
| 3952 | index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0); |
| 3953 | emit_queue (); |
| 3954 | index = protect_from_queue (index, 0); |
| 3955 | do_pending_stack_adjust (); |
| 3956 | |
| 3957 | do_tablejump (index, TYPE_MODE (TREE_TYPE (index_expr)), |
| 3958 | expand_expr (range, NULL_RTX, VOIDmode, 0), |
| 3959 | table_label, default_label); |
| 3960 | win = 1; |
| 3961 | } |
| 3962 | #endif |
| 3963 | if (! win) |
| 3964 | abort (); |
| 3965 | |
| 3966 | /* Get table of labels to jump to, in order of case index. */ |
| 3967 | |
| 3968 | ncases = TREE_INT_CST_LOW (range) + 1; |
| 3969 | labelvec = (rtx *) alloca (ncases * sizeof (rtx)); |
| 3970 | bzero (labelvec, ncases * sizeof (rtx)); |
| 3971 | |
| 3972 | for (n = thiscase->data.case_stmt.case_list; n; n = n->right) |
| 3973 | { |
| 3974 | register HOST_WIDE_INT i |
| 3975 | = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (minval); |
| 3976 | |
| 3977 | while (1) |
| 3978 | { |
| 3979 | labelvec[i] |
| 3980 | = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label)); |
| 3981 | if (i + TREE_INT_CST_LOW (minval) |
| 3982 | == TREE_INT_CST_LOW (n->high)) |
| 3983 | break; |
| 3984 | i++; |
| 3985 | } |
| 3986 | } |
| 3987 | |
| 3988 | /* Fill in the gaps with the default. */ |
| 3989 | for (i = 0; i < ncases; i++) |
| 3990 | if (labelvec[i] == 0) |
| 3991 | labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label); |
| 3992 | |
| 3993 | /* Output the table */ |
| 3994 | emit_label (table_label); |
| 3995 | |
| 3996 | /* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE |
| 3997 | were an expression, instead of an #ifdef/#ifndef. */ |
| 3998 | if ( |
| 3999 | #ifdef CASE_VECTOR_PC_RELATIVE |
| 4000 | 1 || |
| 4001 | #endif |
| 4002 | flag_pic) |
| 4003 | emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE, |
| 4004 | gen_rtx (LABEL_REF, Pmode, table_label), |
| 4005 | gen_rtvec_v (ncases, labelvec))); |
| 4006 | else |
| 4007 | emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE, |
| 4008 | gen_rtvec_v (ncases, labelvec))); |
| 4009 | |
| 4010 | /* If the case insn drops through the table, |
| 4011 | after the table we must jump to the default-label. |
| 4012 | Otherwise record no drop-through after the table. */ |
| 4013 | #ifdef CASE_DROPS_THROUGH |
| 4014 | emit_jump (default_label); |
| 4015 | #else |
| 4016 | emit_barrier (); |
| 4017 | #endif |
| 4018 | } |
| 4019 | |
| 4020 | before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ()); |
| 4021 | reorder_insns (before_case, get_last_insn (), |
| 4022 | thiscase->data.case_stmt.start); |
| 4023 | } |
| 4024 | if (thiscase->exit_label) |
| 4025 | emit_label (thiscase->exit_label); |
| 4026 | |
| 4027 | POPSTACK (case_stack); |
| 4028 | |
| 4029 | free_temp_slots (); |
| 4030 | } |
| 4031 | |
| 4032 | /* Generate code to jump to LABEL if OP1 and OP2 are equal. */ |
| 4033 | |
| 4034 | static void |
| 4035 | do_jump_if_equal (op1, op2, label, unsignedp) |
| 4036 | rtx op1, op2, label; |
| 4037 | int unsignedp; |
| 4038 | { |
| 4039 | if (GET_CODE (op1) == CONST_INT |
| 4040 | && GET_CODE (op2) == CONST_INT) |
| 4041 | { |
| 4042 | if (INTVAL (op1) == INTVAL (op2)) |
| 4043 | emit_jump (label); |
| 4044 | } |
| 4045 | else |
| 4046 | { |
| 4047 | enum machine_mode mode = GET_MODE (op1); |
| 4048 | if (mode == VOIDmode) |
| 4049 | mode = GET_MODE (op2); |
| 4050 | emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0); |
| 4051 | emit_jump_insn (gen_beq (label)); |
| 4052 | } |
| 4053 | } |
| 4054 | \f |
| 4055 | /* Not all case values are encountered equally. This function |
| 4056 | uses a heuristic to weight case labels, in cases where that |
| 4057 | looks like a reasonable thing to do. |
| 4058 | |
| 4059 | Right now, all we try to guess is text, and we establish the |
| 4060 | following weights: |
| 4061 | |
| 4062 | chars above space: 16 |
| 4063 | digits: 16 |
| 4064 | default: 12 |
| 4065 | space, punct: 8 |
| 4066 | tab: 4 |
| 4067 | newline: 2 |
| 4068 | other "\" chars: 1 |
| 4069 | remaining chars: 0 |
| 4070 | |
| 4071 | If we find any cases in the switch that are not either -1 or in the range |
| 4072 | of valid ASCII characters, or are control characters other than those |
| 4073 | commonly used with "\", don't treat this switch scanning text. |
| 4074 | |
| 4075 | Return 1 if these nodes are suitable for cost estimation, otherwise |
| 4076 | return 0. */ |
| 4077 | |
| 4078 | static int |
| 4079 | estimate_case_costs (node) |
| 4080 | case_node_ptr node; |
| 4081 | { |
| 4082 | tree min_ascii = build_int_2 (-1, -1); |
| 4083 | tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0)); |
| 4084 | case_node_ptr n; |
| 4085 | int i; |
| 4086 | |
| 4087 | /* If we haven't already made the cost table, make it now. Note that the |
| 4088 | lower bound of the table is -1, not zero. */ |
| 4089 | |
| 4090 | if (cost_table == NULL) |
| 4091 | { |
| 4092 | cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1; |
| 4093 | bzero (cost_table - 1, 129 * sizeof (short)); |
| 4094 | |
| 4095 | for (i = 0; i < 128; i++) |
| 4096 | { |
| 4097 | if (isalnum (i)) |
| 4098 | cost_table[i] = 16; |
| 4099 | else if (ispunct (i)) |
| 4100 | cost_table[i] = 8; |
| 4101 | else if (iscntrl (i)) |
| 4102 | cost_table[i] = -1; |
| 4103 | } |
| 4104 | |
| 4105 | cost_table[' '] = 8; |
| 4106 | cost_table['\t'] = 4; |
| 4107 | cost_table['\0'] = 4; |
| 4108 | cost_table['\n'] = 2; |
| 4109 | cost_table['\f'] = 1; |
| 4110 | cost_table['\v'] = 1; |
| 4111 | cost_table['\b'] = 1; |
| 4112 | } |
| 4113 | |
| 4114 | /* See if all the case expressions look like text. It is text if the |
| 4115 | constant is >= -1 and the highest constant is <= 127. Do all comparisons |
| 4116 | as signed arithmetic since we don't want to ever access cost_table with a |
| 4117 | value less than -1. Also check that none of the constants in a range |
| 4118 | are strange control characters. */ |
| 4119 | |
| 4120 | for (n = node; n; n = n->right) |
| 4121 | { |
| 4122 | if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high)) |
| 4123 | return 0; |
| 4124 | |
| 4125 | for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++) |
| 4126 | if (cost_table[i] < 0) |
| 4127 | return 0; |
| 4128 | } |
| 4129 | |
| 4130 | /* All interesting values are within the range of interesting |
| 4131 | ASCII characters. */ |
| 4132 | return 1; |
| 4133 | } |
| 4134 | |
| 4135 | /* Scan an ordered list of case nodes |
| 4136 | combining those with consecutive values or ranges. |
| 4137 | |
| 4138 | Eg. three separate entries 1: 2: 3: become one entry 1..3: */ |
| 4139 | |
| 4140 | static void |
| 4141 | group_case_nodes (head) |
| 4142 | case_node_ptr head; |
| 4143 | { |
| 4144 | case_node_ptr node = head; |
| 4145 | |
| 4146 | while (node) |
| 4147 | { |
| 4148 | rtx lb = next_real_insn (label_rtx (node->code_label)); |
| 4149 | case_node_ptr np = node; |
| 4150 | |
| 4151 | /* Try to group the successors of NODE with NODE. */ |
| 4152 | while (((np = np->right) != 0) |
| 4153 | /* Do they jump to the same place? */ |
| 4154 | && next_real_insn (label_rtx (np->code_label)) == lb |
| 4155 | /* Are their ranges consecutive? */ |
| 4156 | && tree_int_cst_equal (np->low, |
| 4157 | fold (build (PLUS_EXPR, |
| 4158 | TREE_TYPE (node->high), |
| 4159 | node->high, |
| 4160 | integer_one_node))) |
| 4161 | /* An overflow is not consecutive. */ |
| 4162 | && tree_int_cst_lt (node->high, |
| 4163 | fold (build (PLUS_EXPR, |
| 4164 | TREE_TYPE (node->high), |
| 4165 | node->high, |
| 4166 | integer_one_node)))) |
| 4167 | { |
| 4168 | node->high = np->high; |
| 4169 | } |
| 4170 | /* NP is the first node after NODE which can't be grouped with it. |
| 4171 | Delete the nodes in between, and move on to that node. */ |
| 4172 | node->right = np; |
| 4173 | node = np; |
| 4174 | } |
| 4175 | } |
| 4176 | |
| 4177 | /* Take an ordered list of case nodes |
| 4178 | and transform them into a near optimal binary tree, |
| 4179 | on the assumption that any target code selection value is as |
| 4180 | likely as any other. |
| 4181 | |
| 4182 | The transformation is performed by splitting the ordered |
| 4183 | list into two equal sections plus a pivot. The parts are |
| 4184 | then attached to the pivot as left and right branches. Each |
| 4185 | branch is is then transformed recursively. */ |
| 4186 | |
| 4187 | static void |
| 4188 | balance_case_nodes (head, parent) |
| 4189 | case_node_ptr *head; |
| 4190 | case_node_ptr parent; |
| 4191 | { |
| 4192 | register case_node_ptr np; |
| 4193 | |
| 4194 | np = *head; |
| 4195 | if (np) |
| 4196 | { |
| 4197 | int cost = 0; |
| 4198 | int i = 0; |
| 4199 | int ranges = 0; |
| 4200 | register case_node_ptr *npp; |
| 4201 | case_node_ptr left; |
| 4202 | |
| 4203 | /* Count the number of entries on branch. Also count the ranges. */ |
| 4204 | |
| 4205 | while (np) |
| 4206 | { |
| 4207 | if (!tree_int_cst_equal (np->low, np->high)) |
| 4208 | { |
| 4209 | ranges++; |
| 4210 | if (use_cost_table) |
| 4211 | cost += cost_table[TREE_INT_CST_LOW (np->high)]; |
| 4212 | } |
| 4213 | |
| 4214 | if (use_cost_table) |
| 4215 | cost += cost_table[TREE_INT_CST_LOW (np->low)]; |
| 4216 | |
| 4217 | i++; |
| 4218 | np = np->right; |
| 4219 | } |
| 4220 | |
| 4221 | if (i > 2) |
| 4222 | { |
| 4223 | /* Split this list if it is long enough for that to help. */ |
| 4224 | npp = head; |
| 4225 | left = *npp; |
| 4226 | if (use_cost_table) |
| 4227 | { |
| 4228 | /* Find the place in the list that bisects the list's total cost, |
| 4229 | Here I gets half the total cost. */ |
| 4230 | int n_moved = 0; |
| 4231 | i = (cost + 1) / 2; |
| 4232 | while (1) |
| 4233 | { |
| 4234 | /* Skip nodes while their cost does not reach that amount. */ |
| 4235 | if (!tree_int_cst_equal ((*npp)->low, (*npp)->high)) |
| 4236 | i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)]; |
| 4237 | i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)]; |
| 4238 | if (i <= 0) |
| 4239 | break; |
| 4240 | npp = &(*npp)->right; |
| 4241 | n_moved += 1; |
| 4242 | } |
| 4243 | if (n_moved == 0) |
| 4244 | { |
| 4245 | /* Leave this branch lopsided, but optimize left-hand |
| 4246 | side and fill in `parent' fields for right-hand side. */ |
| 4247 | np = *head; |
| 4248 | np->parent = parent; |
| 4249 | balance_case_nodes (&np->left, np); |
| 4250 | for (; np->right; np = np->right) |
| 4251 | np->right->parent = np; |
| 4252 | return; |
| 4253 | } |
| 4254 | } |
| 4255 | /* If there are just three nodes, split at the middle one. */ |
| 4256 | else if (i == 3) |
| 4257 | npp = &(*npp)->right; |
| 4258 | else |
| 4259 | { |
| 4260 | /* Find the place in the list that bisects the list's total cost, |
| 4261 | where ranges count as 2. |
| 4262 | Here I gets half the total cost. */ |
| 4263 | i = (i + ranges + 1) / 2; |
| 4264 | while (1) |
| 4265 | { |
| 4266 | /* Skip nodes while their cost does not reach that amount. */ |
| 4267 | if (!tree_int_cst_equal ((*npp)->low, (*npp)->high)) |
| 4268 | i--; |
| 4269 | i--; |
| 4270 | if (i <= 0) |
| 4271 | break; |
| 4272 | npp = &(*npp)->right; |
| 4273 | } |
| 4274 | } |
| 4275 | *head = np = *npp; |
| 4276 | *npp = 0; |
| 4277 | np->parent = parent; |
| 4278 | np->left = left; |
| 4279 | |
| 4280 | /* Optimize each of the two split parts. */ |
| 4281 | balance_case_nodes (&np->left, np); |
| 4282 | balance_case_nodes (&np->right, np); |
| 4283 | } |
| 4284 | else |
| 4285 | { |
| 4286 | /* Else leave this branch as one level, |
| 4287 | but fill in `parent' fields. */ |
| 4288 | np = *head; |
| 4289 | np->parent = parent; |
| 4290 | for (; np->right; np = np->right) |
| 4291 | np->right->parent = np; |
| 4292 | } |
| 4293 | } |
| 4294 | } |
| 4295 | \f |
| 4296 | /* Search the parent sections of the case node tree |
| 4297 | to see if a test for the lower bound of NODE would be redundant. |
| 4298 | INDEX_TYPE is the type of the index expression. |
| 4299 | |
| 4300 | The instructions to generate the case decision tree are |
| 4301 | output in the same order as nodes are processed so it is |
| 4302 | known that if a parent node checks the range of the current |
| 4303 | node minus one that the current node is bounded at its lower |
| 4304 | span. Thus the test would be redundant. */ |
| 4305 | |
| 4306 | static int |
| 4307 | node_has_low_bound (node, index_type) |
| 4308 | case_node_ptr node; |
| 4309 | tree index_type; |
| 4310 | { |
| 4311 | tree low_minus_one; |
| 4312 | case_node_ptr pnode; |
| 4313 | |
| 4314 | /* If the lower bound of this node is the lowest value in the index type, |
| 4315 | we need not test it. */ |
| 4316 | |
| 4317 | if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type))) |
| 4318 | return 1; |
| 4319 | |
| 4320 | /* If this node has a left branch, the value at the left must be less |
| 4321 | than that at this node, so it cannot be bounded at the bottom and |
| 4322 | we need not bother testing any further. */ |
| 4323 | |
| 4324 | if (node->left) |
| 4325 | return 0; |
| 4326 | |
| 4327 | low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low), |
| 4328 | node->low, integer_one_node)); |
| 4329 | |
| 4330 | /* If the subtraction above overflowed, we can't verify anything. |
| 4331 | Otherwise, look for a parent that tests our value - 1. */ |
| 4332 | |
| 4333 | if (! tree_int_cst_lt (low_minus_one, node->low)) |
| 4334 | return 0; |
| 4335 | |
| 4336 | for (pnode = node->parent; pnode; pnode = pnode->parent) |
| 4337 | if (tree_int_cst_equal (low_minus_one, pnode->high)) |
| 4338 | return 1; |
| 4339 | |
| 4340 | return 0; |
| 4341 | } |
| 4342 | |
| 4343 | /* Search the parent sections of the case node tree |
| 4344 | to see if a test for the upper bound of NODE would be redundant. |
| 4345 | INDEX_TYPE is the type of the index expression. |
| 4346 | |
| 4347 | The instructions to generate the case decision tree are |
| 4348 | output in the same order as nodes are processed so it is |
| 4349 | known that if a parent node checks the range of the current |
| 4350 | node plus one that the current node is bounded at its upper |
| 4351 | span. Thus the test would be redundant. */ |
| 4352 | |
| 4353 | static int |
| 4354 | node_has_high_bound (node, index_type) |
| 4355 | case_node_ptr node; |
| 4356 | tree index_type; |
| 4357 | { |
| 4358 | tree high_plus_one; |
| 4359 | case_node_ptr pnode; |
| 4360 | |
| 4361 | /* If the upper bound of this node is the highest value in the type |
| 4362 | of the index expression, we need not test against it. */ |
| 4363 | |
| 4364 | if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type))) |
| 4365 | return 1; |
| 4366 | |
| 4367 | /* If this node has a right branch, the value at the right must be greater |
| 4368 | than that at this node, so it cannot be bounded at the top and |
| 4369 | we need not bother testing any further. */ |
| 4370 | |
| 4371 | if (node->right) |
| 4372 | return 0; |
| 4373 | |
| 4374 | high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high), |
| 4375 | node->high, integer_one_node)); |
| 4376 | |
| 4377 | /* If the addition above overflowed, we can't verify anything. |
| 4378 | Otherwise, look for a parent that tests our value + 1. */ |
| 4379 | |
| 4380 | if (! tree_int_cst_lt (node->high, high_plus_one)) |
| 4381 | return 0; |
| 4382 | |
| 4383 | for (pnode = node->parent; pnode; pnode = pnode->parent) |
| 4384 | if (tree_int_cst_equal (high_plus_one, pnode->low)) |
| 4385 | return 1; |
| 4386 | |
| 4387 | return 0; |
| 4388 | } |
| 4389 | |
| 4390 | /* Search the parent sections of the |
| 4391 | case node tree to see if both tests for the upper and lower |
| 4392 | bounds of NODE would be redundant. */ |
| 4393 | |
| 4394 | static int |
| 4395 | node_is_bounded (node, index_type) |
| 4396 | case_node_ptr node; |
| 4397 | tree index_type; |
| 4398 | { |
| 4399 | return (node_has_low_bound (node, index_type) |
| 4400 | && node_has_high_bound (node, index_type)); |
| 4401 | } |
| 4402 | |
| 4403 | /* Emit an unconditional jump to LABEL unless it would be dead code. */ |
| 4404 | |
| 4405 | static void |
| 4406 | emit_jump_if_reachable (label) |
| 4407 | rtx label; |
| 4408 | { |
| 4409 | if (GET_CODE (get_last_insn ()) != BARRIER) |
| 4410 | emit_jump (label); |
| 4411 | } |
| 4412 | \f |
| 4413 | /* Emit step-by-step code to select a case for the value of INDEX. |
| 4414 | The thus generated decision tree follows the form of the |
| 4415 | case-node binary tree NODE, whose nodes represent test conditions. |
| 4416 | INDEX_TYPE is the type of the index of the switch. |
| 4417 | |
| 4418 | Care is taken to prune redundant tests from the decision tree |
| 4419 | by detecting any boundary conditions already checked by |
| 4420 | emitted rtx. (See node_has_high_bound, node_has_low_bound |
| 4421 | and node_is_bounded, above.) |
| 4422 | |
| 4423 | Where the test conditions can be shown to be redundant we emit |
| 4424 | an unconditional jump to the target code. As a further |
| 4425 | optimization, the subordinates of a tree node are examined to |
| 4426 | check for bounded nodes. In this case conditional and/or |
| 4427 | unconditional jumps as a result of the boundary check for the |
| 4428 | current node are arranged to target the subordinates associated |
| 4429 | code for out of bound conditions on the current node node. |
| 4430 | |
| 4431 | We can assume that when control reaches the code generated here, |
| 4432 | the index value has already been compared with the parents |
| 4433 | of this node, and determined to be on the same side of each parent |
| 4434 | as this node is. Thus, if this node tests for the value 51, |
| 4435 | and a parent tested for 52, we don't need to consider |
| 4436 | the possibility of a value greater than 51. If another parent |
| 4437 | tests for the value 50, then this node need not test anything. */ |
| 4438 | |
| 4439 | static void |
| 4440 | emit_case_nodes (index, node, default_label, index_type) |
| 4441 | rtx index; |
| 4442 | case_node_ptr node; |
| 4443 | rtx default_label; |
| 4444 | tree index_type; |
| 4445 | { |
| 4446 | /* If INDEX has an unsigned type, we must make unsigned branches. */ |
| 4447 | int unsignedp = TREE_UNSIGNED (index_type); |
| 4448 | typedef rtx rtx_function (); |
| 4449 | rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt; |
| 4450 | rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge; |
| 4451 | rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt; |
| 4452 | rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble; |
| 4453 | enum machine_mode mode = GET_MODE (index); |
| 4454 | |
| 4455 | /* See if our parents have already tested everything for us. |
| 4456 | If they have, emit an unconditional jump for this node. */ |
| 4457 | if (node_is_bounded (node, index_type)) |
| 4458 | emit_jump (label_rtx (node->code_label)); |
| 4459 | |
| 4460 | else if (tree_int_cst_equal (node->low, node->high)) |
| 4461 | { |
| 4462 | /* Node is single valued. First see if the index expression matches |
| 4463 | this node and then check our children, if any. */ |
| 4464 | |
| 4465 | do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0), |
| 4466 | label_rtx (node->code_label), unsignedp); |
| 4467 | |
| 4468 | if (node->right != 0 && node->left != 0) |
| 4469 | { |
| 4470 | /* This node has children on both sides. |
| 4471 | Dispatch to one side or the other |
| 4472 | by comparing the index value with this node's value. |
| 4473 | If one subtree is bounded, check that one first, |
| 4474 | so we can avoid real branches in the tree. */ |
| 4475 | |
| 4476 | if (node_is_bounded (node->right, index_type)) |
| 4477 | { |
| 4478 | emit_cmp_insn (index, expand_expr (node->high, NULL_RTX, |
| 4479 | VOIDmode, 0), |
| 4480 | GT, NULL_RTX, mode, unsignedp, 0); |
| 4481 | |
| 4482 | emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label))); |
| 4483 | emit_case_nodes (index, node->left, default_label, index_type); |
| 4484 | } |
| 4485 | |
| 4486 | else if (node_is_bounded (node->left, index_type)) |
| 4487 | { |
| 4488 | emit_cmp_insn (index, expand_expr (node->high, NULL_RTX, |
| 4489 | VOIDmode, 0), |
| 4490 | LT, NULL_RTX, mode, unsignedp, 0); |
| 4491 | emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label))); |
| 4492 | emit_case_nodes (index, node->right, default_label, index_type); |
| 4493 | } |
| 4494 | |
| 4495 | else |
| 4496 | { |
| 4497 | /* Neither node is bounded. First distinguish the two sides; |
| 4498 | then emit the code for one side at a time. */ |
| 4499 | |
| 4500 | tree test_label |
| 4501 | = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE); |
| 4502 | |
| 4503 | /* See if the value is on the right. */ |
| 4504 | emit_cmp_insn (index, expand_expr (node->high, NULL_RTX, |
| 4505 | VOIDmode, 0), |
| 4506 | GT, NULL_RTX, mode, unsignedp, 0); |
| 4507 | emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label))); |
| 4508 | |
| 4509 | /* Value must be on the left. |
| 4510 | Handle the left-hand subtree. */ |
| 4511 | emit_case_nodes (index, node->left, default_label, index_type); |
| 4512 | /* If left-hand subtree does nothing, |
| 4513 | go to default. */ |
| 4514 | emit_jump_if_reachable (default_label); |
| 4515 | |
| 4516 | /* Code branches here for the right-hand subtree. */ |
| 4517 | expand_label (test_label); |
| 4518 | emit_case_nodes (index, node->right, default_label, index_type); |
| 4519 | } |
| 4520 | } |
| 4521 | |
| 4522 | else if (node->right != 0 && node->left == 0) |
| 4523 | { |
| 4524 | /* Here we have a right child but no left so we issue conditional |
| 4525 | branch to default and process the right child. |
| 4526 | |
| 4527 | Omit the conditional branch to default if we it avoid only one |
| 4528 | right child; it costs too much space to save so little time. */ |
| 4529 | |
| 4530 | if (node->right->right || node->right->left |
| 4531 | || !tree_int_cst_equal (node->right->low, node->right->high)) |
| 4532 | { |
| 4533 | if (!node_has_low_bound (node, index_type)) |
| 4534 | { |
| 4535 | emit_cmp_insn (index, expand_expr (node->high, NULL_RTX, |
| 4536 | VOIDmode, 0), |
| 4537 | LT, NULL_RTX, mode, unsignedp, 0); |
| 4538 | emit_jump_insn ((*gen_blt_pat) (default_label)); |
| 4539 | } |
| 4540 | |
| 4541 | emit_case_nodes (index, node->right, default_label, index_type); |
| 4542 | } |
| 4543 | else |
| 4544 | /* We cannot process node->right normally |
| 4545 | since we haven't ruled out the numbers less than |
| 4546 | this node's value. So handle node->right explicitly. */ |
| 4547 | do_jump_if_equal (index, |
| 4548 | expand_expr (node->right->low, NULL_RTX, |
| 4549 | VOIDmode, 0), |
| 4550 | label_rtx (node->right->code_label), unsignedp); |
| 4551 | } |
| 4552 | |
| 4553 | else if (node->right == 0 && node->left != 0) |
| 4554 | { |
| 4555 | /* Just one subtree, on the left. */ |
| 4556 | |
| 4557 | #if 0 /* The following code and comment were formerly part |
| 4558 | of the condition here, but they didn't work |
| 4559 | and I don't understand what the idea was. -- rms. */ |
| 4560 | /* If our "most probable entry" is less probable |
| 4561 | than the default label, emit a jump to |
| 4562 | the default label using condition codes |
| 4563 | already lying around. With no right branch, |
| 4564 | a branch-greater-than will get us to the default |
| 4565 | label correctly. */ |
| 4566 | if (use_cost_table |
| 4567 | && cost_table[TREE_INT_CST_LOW (node->high)] < 12) |
| 4568 | ; |
| 4569 | #endif /* 0 */ |
| 4570 | if (node->left->left || node->left->right |
| 4571 | || !tree_int_cst_equal (node->left->low, node->left->high)) |
| 4572 | { |
| 4573 | if (!node_has_high_bound (node, index_type)) |
| 4574 | { |
| 4575 | emit_cmp_insn (index, expand_expr (node->high, NULL_RTX, |
| 4576 | VOIDmode, 0), |
| 4577 | GT, NULL_RTX, mode, unsignedp, 0); |
| 4578 | emit_jump_insn ((*gen_bgt_pat) (default_label)); |
| 4579 | } |
| 4580 | |
| 4581 | emit_case_nodes (index, node->left, default_label, index_type); |
| 4582 | } |
| 4583 | else |
| 4584 | /* We cannot process node->left normally |
| 4585 | since we haven't ruled out the numbers less than |
| 4586 | this node's value. So handle node->left explicitly. */ |
| 4587 | do_jump_if_equal (index, |
| 4588 | expand_expr (node->left->low, NULL_RTX, |
| 4589 | VOIDmode, 0), |
| 4590 | label_rtx (node->left->code_label), unsignedp); |
| 4591 | } |
| 4592 | } |
| 4593 | else |
| 4594 | { |
| 4595 | /* Node is a range. These cases are very similar to those for a single |
| 4596 | value, except that we do not start by testing whether this node |
| 4597 | is the one to branch to. */ |
| 4598 | |
| 4599 | if (node->right != 0 && node->left != 0) |
| 4600 | { |
| 4601 | /* Node has subtrees on both sides. |
| 4602 | If the right-hand subtree is bounded, |
| 4603 | test for it first, since we can go straight there. |
| 4604 | Otherwise, we need to make a branch in the control structure, |
| 4605 | then handle the two subtrees. */ |
| 4606 | tree test_label = 0; |
| 4607 | |
| 4608 | emit_cmp_insn (index, expand_expr (node->high, NULL_RTX, |
| 4609 | VOIDmode, 0), |
| 4610 | GT, NULL_RTX, mode, unsignedp, 0); |
| 4611 | |
| 4612 | if (node_is_bounded (node->right, index_type)) |
| 4613 | /* Right hand node is fully bounded so we can eliminate any |
| 4614 | testing and branch directly to the target code. */ |
| 4615 | emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label))); |
| 4616 | else |
| 4617 | { |
| 4618 | /* Right hand node requires testing. |
| 4619 | Branch to a label where we will handle it later. */ |
| 4620 | |
| 4621 | test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE); |
| 4622 | emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label))); |
| 4623 | } |
| 4624 | |
| 4625 | /* Value belongs to this node or to the left-hand subtree. */ |
| 4626 | |
| 4627 | emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0), |
| 4628 | GE, NULL_RTX, mode, unsignedp, 0); |
| 4629 | emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label))); |
| 4630 | |
| 4631 | /* Handle the left-hand subtree. */ |
| 4632 | emit_case_nodes (index, node->left, default_label, index_type); |
| 4633 | |
| 4634 | /* If right node had to be handled later, do that now. */ |
| 4635 | |
| 4636 | if (test_label) |
| 4637 | { |
| 4638 | /* If the left-hand subtree fell through, |
| 4639 | don't let it fall into the right-hand subtree. */ |
| 4640 | emit_jump_if_reachable (default_label); |
| 4641 | |
| 4642 | expand_label (test_label); |
| 4643 | emit_case_nodes (index, node->right, default_label, index_type); |
| 4644 | } |
| 4645 | } |
| 4646 | |
| 4647 | else if (node->right != 0 && node->left == 0) |
| 4648 | { |
| 4649 | /* Deal with values to the left of this node, |
| 4650 | if they are possible. */ |
| 4651 | if (!node_has_low_bound (node, index_type)) |
| 4652 | { |
| 4653 | emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, |
| 4654 | VOIDmode, 0), |
| 4655 | LT, NULL_RTX, mode, unsignedp, 0); |
| 4656 | emit_jump_insn ((*gen_blt_pat) (default_label)); |
| 4657 | } |
| 4658 | |
| 4659 | /* Value belongs to this node or to the right-hand subtree. */ |
| 4660 | |
| 4661 | emit_cmp_insn (index, expand_expr (node->high, NULL_RTX, |
| 4662 | VOIDmode, 0), |
| 4663 | LE, NULL_RTX, mode, unsignedp, 0); |
| 4664 | emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label))); |
| 4665 | |
| 4666 | emit_case_nodes (index, node->right, default_label, index_type); |
| 4667 | } |
| 4668 | |
| 4669 | else if (node->right == 0 && node->left != 0) |
| 4670 | { |
| 4671 | /* Deal with values to the right of this node, |
| 4672 | if they are possible. */ |
| 4673 | if (!node_has_high_bound (node, index_type)) |
| 4674 | { |
| 4675 | emit_cmp_insn (index, expand_expr (node->high, NULL_RTX, |
| 4676 | VOIDmode, 0), |
| 4677 | GT, NULL_RTX, mode, unsignedp, 0); |
| 4678 | emit_jump_insn ((*gen_bgt_pat) (default_label)); |
| 4679 | } |
| 4680 | |
| 4681 | /* Value belongs to this node or to the left-hand subtree. */ |
| 4682 | |
| 4683 | emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0), |
| 4684 | GE, NULL_RTX, mode, unsignedp, 0); |
| 4685 | emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label))); |
| 4686 | |
| 4687 | emit_case_nodes (index, node->left, default_label, index_type); |
| 4688 | } |
| 4689 | |
| 4690 | else |
| 4691 | { |
| 4692 | /* Node has no children so we check low and high bounds to remove |
| 4693 | redundant tests. Only one of the bounds can exist, |
| 4694 | since otherwise this node is bounded--a case tested already. */ |
| 4695 | |
| 4696 | if (!node_has_high_bound (node, index_type)) |
| 4697 | { |
| 4698 | emit_cmp_insn (index, expand_expr (node->high, NULL_RTX, |
| 4699 | VOIDmode, 0), |
| 4700 | GT, NULL_RTX, mode, unsignedp, 0); |
| 4701 | emit_jump_insn ((*gen_bgt_pat) (default_label)); |
| 4702 | } |
| 4703 | |
| 4704 | if (!node_has_low_bound (node, index_type)) |
| 4705 | { |
| 4706 | emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, |
| 4707 | VOIDmode, 0), |
| 4708 | LT, NULL_RTX, mode, unsignedp, 0); |
| 4709 | emit_jump_insn ((*gen_blt_pat) (default_label)); |
| 4710 | } |
| 4711 | |
| 4712 | emit_jump (label_rtx (node->code_label)); |
| 4713 | } |
| 4714 | } |
| 4715 | } |
| 4716 | \f |
| 4717 | /* These routines are used by the loop unrolling code. They copy BLOCK trees |
| 4718 | so that the debugging info will be correct for the unrolled loop. */ |
| 4719 | |
| 4720 | /* Indexed by block number, contains a pointer to the N'th block node. */ |
| 4721 | |
| 4722 | static tree *block_vector; |
| 4723 | |
| 4724 | void |
| 4725 | find_loop_tree_blocks () |
| 4726 | { |
| 4727 | tree block = DECL_INITIAL (current_function_decl); |
| 4728 | |
| 4729 | /* There first block is for the function body, and does not have |
| 4730 | corresponding block notes. Don't include it in the block vector. */ |
| 4731 | block = BLOCK_SUBBLOCKS (block); |
| 4732 | |
| 4733 | block_vector = identify_blocks (block, get_insns ()); |
| 4734 | } |
| 4735 | |
| 4736 | void |
| 4737 | unroll_block_trees () |
| 4738 | { |
| 4739 | tree block = DECL_INITIAL (current_function_decl); |
| 4740 | |
| 4741 | reorder_blocks (block_vector, block, get_insns ()); |
| 4742 | } |
| 4743 | |