Commit | Line | Data |
---|---|---|
7d7f4d7d WJ |
1 | /* Expands front end tree to back end RTL for GNU C-Compiler |
2 | Copyright (C) 1987, 1988, 1989 Free Software Foundation, Inc. | |
3 | ||
4 | This file is part of GNU CC. | |
5 | ||
6 | GNU CC is free software; you can redistribute it and/or modify | |
7 | it under the terms of the GNU General Public License as published by | |
8 | the Free Software Foundation; either version 1, or (at your option) | |
9 | any later version. | |
10 | ||
11 | GNU CC is distributed in the hope that it will be useful, | |
12 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
14 | GNU General Public License for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
17 | along with GNU CC; see the file COPYING. If not, write to | |
18 | the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */ | |
19 | ||
20 | ||
21 | /* This file handles the generation of rtl code from tree structure | |
22 | above the level of expressions, using subroutines in exp*.c and emit-rtl.c. | |
23 | It also creates the rtl expressions for parameters and auto variables | |
24 | and has full responsibility for allocating stack slots. | |
25 | ||
26 | The functions whose names start with `expand_' are called by the | |
27 | parser to generate RTL instructions for various kinds of constructs. | |
28 | ||
29 | Some control and binding constructs require calling several such | |
30 | functions at different times. For example, a simple if-then | |
31 | is expanded by calling `expand_start_cond' (with the condition-expression | |
32 | as argument) before parsing the then-clause and calling `expand_end_cond' | |
33 | after parsing the then-clause. | |
34 | ||
35 | `expand_function_start' is called at the beginning of a function, | |
36 | before the function body is parsed, and `expand_function_end' is | |
37 | called after parsing the body. | |
38 | ||
39 | Call `assign_stack_local' to allocate a stack slot for a local variable. | |
40 | This is usually done during the RTL generation for the function body, | |
41 | but it can also be done in the reload pass when a pseudo-register does | |
42 | not get a hard register. | |
43 | ||
44 | Call `put_var_into_stack' when you learn, belatedly, that a variable | |
45 | previously given a pseudo-register must in fact go in the stack. | |
46 | This function changes the DECL_RTL to be a stack slot instead of a reg | |
47 | then scans all the RTL instructions so far generated to correct them. */ | |
48 | ||
49 | #include "config.h" | |
50 | ||
51 | #include <stdio.h> | |
52 | ||
53 | #include "rtl.h" | |
54 | #include "tree.h" | |
55 | #include "flags.h" | |
56 | #include "insn-flags.h" | |
57 | #include "insn-config.h" | |
58 | #include "insn-codes.h" | |
59 | #include "expr.h" | |
60 | #include "regs.h" | |
61 | #include "hard-reg-set.h" | |
62 | #include "recog.h" | |
63 | ||
64 | #define MAX(x,y) (((x) > (y)) ? (x) : (y)) | |
65 | #define MIN(x,y) (((x) < (y)) ? (x) : (y)) | |
66 | ||
67 | /* Nonzero if function being compiled pops its args on return. | |
68 | May affect compilation of return insn or of function epilogue. */ | |
69 | ||
70 | int current_function_pops_args; | |
71 | ||
72 | /* Nonzero if function being compiled needs to be given an address | |
73 | where the value should be stored. */ | |
74 | ||
75 | int current_function_returns_struct; | |
76 | ||
77 | /* Nonzero if function being compiled needs to | |
78 | return the address of where it has put a structure value. */ | |
79 | ||
80 | int current_function_returns_pcc_struct; | |
81 | ||
82 | /* Nonzero if function being compiled needs to be passed a static chain. */ | |
83 | ||
84 | int current_function_needs_context; | |
85 | ||
86 | /* Nonzero if function being compiled can call setjmp. */ | |
87 | ||
88 | int current_function_calls_setjmp; | |
89 | ||
90 | /* Nonzero if function being compiled can call alloca, | |
91 | either as a subroutine or builtin. */ | |
92 | ||
93 | int current_function_calls_alloca; | |
94 | ||
95 | /* Nonzero if the current function returns a pointer type */ | |
96 | ||
97 | int current_function_returns_pointer; | |
98 | ||
99 | /* If function's args have a fixed size, this is that size, in bytes. | |
100 | Otherwise, it is -1. | |
101 | May affect compilation of return insn or of function epilogue. */ | |
102 | ||
103 | int current_function_args_size; | |
104 | ||
105 | /* # bytes the prologue should push and pretend that the caller pushed them. | |
106 | The prologue must do this, but only if parms can be passed in registers. */ | |
107 | ||
108 | int current_function_pretend_args_size; | |
109 | ||
110 | /* This is the offset from the arg pointer to the place where the first | |
111 | anonymous arg can be found, if there is one. */ | |
112 | rtx current_function_arg_offset_rtx; | |
113 | ||
114 | /* Name of function now being compiled. */ | |
115 | ||
116 | char *current_function_name; | |
117 | ||
118 | /* Label that will go on parm cleanup code, if any. | |
119 | Jumping to this label runs cleanup code for parameters, if | |
120 | such code must be run. Following this code is the logical return label. */ | |
121 | ||
122 | rtx cleanup_label; | |
123 | ||
124 | /* Label that will go on function epilogue. | |
125 | Jumping to this label serves as a "return" instruction | |
126 | on machines which require execution of the epilogue on all returns. */ | |
127 | ||
128 | rtx return_label; | |
129 | ||
130 | /* List (chain of EXPR_LISTs) of pseudo-regs of SAVE_EXPRs. | |
131 | So we can mark them all live at the end of the function, if nonopt. */ | |
132 | rtx save_expr_regs; | |
133 | ||
134 | /* List (chain of EXPR_LISTs) of all stack slots in this function. | |
135 | Made for the sake of unshare_all_rtl. */ | |
136 | rtx stack_slot_list; | |
137 | ||
138 | /* Filename and line number of last line-number note, | |
139 | whether we actually emitted it or not. */ | |
140 | char *emit_filename; | |
141 | int emit_lineno; | |
142 | ||
143 | /* Insn after which register parms and SAVE_EXPRs are born, if nonopt. */ | |
144 | static rtx parm_birth_insn; | |
145 | ||
146 | /* The FUNCTION_DECL node for the function being compiled. */ | |
147 | ||
148 | static tree this_function; | |
149 | ||
150 | /* Number of binding contours started so far in this function. */ | |
151 | ||
152 | static int block_start_count; | |
153 | ||
154 | /* Offset to end of allocated area of stack frame. | |
155 | If stack grows down, this is the address of the last stack slot allocated. | |
156 | If stack grows up, this is the address for the next slot. */ | |
157 | static int frame_offset; | |
158 | ||
159 | /* Nonzero if a stack slot has been generated whose address is not | |
160 | actually valid. It means that the generated rtl must all be scanned | |
161 | to detect and correct the invalid addresses where they occur. */ | |
162 | static int invalid_stack_slot; | |
163 | ||
164 | /* Label to jump back to for tail recursion, or 0 if we have | |
165 | not yet needed one for this function. */ | |
166 | static rtx tail_recursion_label; | |
167 | ||
168 | /* Place after which to insert the tail_recursion_label if we need one. */ | |
169 | static rtx tail_recursion_reentry; | |
170 | ||
171 | /* Each time we expand an expression-statement, | |
172 | record the expr's type and its RTL value here. */ | |
173 | ||
174 | static tree last_expr_type; | |
175 | static rtx last_expr_value; | |
176 | ||
177 | /* Chain of all RTL_EXPRs that have insns in them. */ | |
178 | static tree rtl_expr_chain; | |
179 | ||
180 | /* Last insn of those whose job was to put parms into their nominal homes. */ | |
181 | static rtx last_parm_insn; | |
182 | \f | |
183 | /* Functions and data structures for expanding case statements. */ | |
184 | ||
185 | /* Case label structure, used to hold info on labels within case | |
186 | statements. We handle "range" labels; for a single-value label | |
187 | as in C, the high and low limits are the same. */ | |
188 | ||
189 | struct case_node | |
190 | { | |
191 | struct case_node *left; | |
192 | struct case_node *right; | |
193 | struct case_node *parent; | |
194 | tree low; | |
195 | tree high; | |
196 | tree test_label; | |
197 | tree code_label; | |
198 | }; | |
199 | ||
200 | typedef struct case_node case_node; | |
201 | typedef struct case_node *case_node_ptr; | |
202 | ||
203 | static void balance_case_nodes (); | |
204 | static void emit_case_nodes (); | |
205 | static void group_case_nodes (); | |
206 | static void emit_jump_if_reachable (); | |
207 | \f | |
208 | /* Stack of control and binding constructs we are currently inside. | |
209 | ||
210 | These constructs begin when you call `expand_start_WHATEVER' | |
211 | and end when you call `expand_end_WHATEVER'. This stack records | |
212 | info about how the construct began that tells the end-function | |
213 | what to do. It also may provide information about the construct | |
214 | to alter the behavior of other constructs within the body. | |
215 | For example, they may affect the behavior of C `break' and `continue'. | |
216 | ||
217 | Each construct gets one `struct nesting' object. | |
218 | All of these objects are chained through the `all' field. | |
219 | `nesting_stack' points to the first object (innermost construct). | |
220 | The position of an entry on `nesting_stack' is in its `depth' field. | |
221 | ||
222 | Each type of construct has its own individual stack. | |
223 | For example, loops have `loop_stack'. Each object points to the | |
224 | next object of the same type through the `next' field. | |
225 | ||
226 | Some constructs are visible to `break' exit-statements and others | |
227 | are not. Which constructs are visible depends on the language. | |
228 | Therefore, the data structure allows each construct to be visible | |
229 | or not, according to the args given when the construct is started. | |
230 | The construct is visible if the `exit_label' field is non-null. | |
231 | In that case, the value should be a CODE_LABEL rtx. */ | |
232 | ||
233 | struct nesting | |
234 | { | |
235 | struct nesting *all; | |
236 | struct nesting *next; | |
237 | int depth; | |
238 | rtx exit_label; | |
239 | union | |
240 | { | |
241 | /* For conds (if-then and if-then-else statements). */ | |
242 | struct | |
243 | { | |
244 | /* Label on the else-part, if any, else 0. */ | |
245 | rtx else_label; | |
246 | /* Label at the end of the whole construct. */ | |
247 | rtx after_label; | |
248 | } cond; | |
249 | /* For loops. */ | |
250 | struct | |
251 | { | |
252 | /* Label at the top of the loop; place to loop back to. */ | |
253 | rtx start_label; | |
254 | /* Label at the end of the whole construct. */ | |
255 | rtx end_label; | |
256 | /* Label for `continue' statement to jump to; | |
257 | this is in front of the stepper of the loop. */ | |
258 | rtx continue_label; | |
259 | } loop; | |
260 | /* For variable binding contours. */ | |
261 | struct | |
262 | { | |
263 | /* Sequence number of this binding contour within the function, | |
264 | in order of entry. */ | |
265 | int block_start_count; | |
266 | /* Nonzero => value to restore stack to on exit. */ | |
267 | rtx stack_level; | |
268 | /* The NOTE that starts this contour. | |
269 | Used by expand_goto to check whether the destination | |
270 | is within each contour or not. */ | |
271 | rtx first_insn; | |
272 | /* Innermost containing binding contour that has a stack level. */ | |
273 | struct nesting *innermost_stack_block; | |
274 | /* List of cleanups to be run on exit from this contour. | |
275 | This is a list of expressions to be evaluated. | |
276 | The TREE_PURPOSE of each link is the ..._DECL node | |
277 | which the cleanup pertains to. */ | |
278 | tree cleanups; | |
279 | /* List of cleanup-lists of blocks containing this block, | |
280 | as they were at the locus where this block appears. | |
281 | There is an element for each containing block, | |
282 | ordered innermost containing block first. | |
283 | The element's TREE_VALUE is the cleanup-list of that block, | |
284 | which may be null. */ | |
285 | tree outer_cleanups; | |
286 | /* Chain of labels defined inside this binding contour. | |
287 | For contours that have stack levels or cleanups. */ | |
288 | struct label_chain *label_chain; | |
289 | } block; | |
290 | /* For switch (C) or case (Pascal) statements, | |
291 | and also for dummies (see `expand_start_case_dummy'). */ | |
292 | struct | |
293 | { | |
294 | /* The insn after which the case dispatch should finally | |
295 | be emitted. Zero for a dummy. */ | |
296 | rtx start; | |
297 | /* A list of case labels, kept in ascending order by value | |
298 | as the list is built. | |
299 | During expand_end_case, this list may be rearranged into a | |
300 | nearly balanced binary tree. */ | |
301 | struct case_node *case_list; | |
302 | /* Label to jump to if no case matches. */ | |
303 | tree default_label; | |
304 | /* The expression to be dispatched on. */ | |
305 | tree index_expr; | |
306 | /* Type that INDEX_EXPR should be converted to. */ | |
307 | tree nominal_type; | |
308 | /* Number of range exprs in case statement. */ | |
309 | short num_ranges; | |
310 | } case_stmt; | |
311 | } data; | |
312 | }; | |
313 | ||
314 | /* Chain of all pending binding contours. */ | |
315 | struct nesting *block_stack; | |
316 | ||
317 | /* Chain of all pending binding contours that restore stack levels | |
318 | or have cleanups. */ | |
319 | struct nesting *stack_block_stack; | |
320 | ||
321 | /* Chain of all pending conditional statements. */ | |
322 | struct nesting *cond_stack; | |
323 | ||
324 | /* Chain of all pending loops. */ | |
325 | struct nesting *loop_stack; | |
326 | ||
327 | /* Chain of all pending case or switch statements. */ | |
328 | struct nesting *case_stack; | |
329 | ||
330 | /* Separate chain including all of the above, | |
331 | chained through the `all' field. */ | |
332 | struct nesting *nesting_stack; | |
333 | ||
334 | /* Number of entries on nesting_stack now. */ | |
335 | int nesting_depth; | |
336 | ||
337 | /* Pop one of the sub-stacks, such as `loop_stack' or `cond_stack'; | |
338 | and pop off `nesting_stack' down to the same level. */ | |
339 | ||
340 | #define POPSTACK(STACK) \ | |
341 | do { int initial_depth = nesting_stack->depth; \ | |
342 | do { struct nesting *this = STACK; \ | |
343 | STACK = this->next; \ | |
344 | nesting_stack = this->all; \ | |
345 | nesting_depth = this->depth; \ | |
346 | free (this); } \ | |
347 | while (nesting_depth > initial_depth); } while (0) | |
348 | \f | |
349 | static int warn_if_unused_value (); | |
350 | static void expand_goto_internal (); | |
351 | static int expand_fixup (); | |
352 | static void fixup_gotos (); | |
353 | static void expand_cleanups (); | |
354 | static void fixup_cleanups (); | |
355 | static void expand_null_return_1 (); | |
356 | static int tail_recursion_args (); | |
357 | static void fixup_stack_slots (); | |
358 | static rtx fixup_stack_1 (); | |
359 | static rtx fixup_memory_subreg (); | |
360 | static rtx walk_fixup_memory_subreg (); | |
361 | static void fixup_var_refs (); | |
362 | static void fixup_var_refs_insns (); | |
363 | static rtx fixup_var_refs_1 (); | |
364 | static rtx parm_stack_loc (); | |
365 | static void optimize_bit_field (); | |
366 | static void do_jump_if_equal (); | |
367 | \f | |
368 | /* Emit a no-op instruction. */ | |
369 | ||
370 | rtx | |
371 | emit_nop () | |
372 | { | |
373 | rtx last_insn = get_last_insn (); | |
374 | if (!optimize | |
375 | && (GET_CODE (last_insn) == CODE_LABEL | |
376 | || prev_real_insn (last_insn) == 0)) | |
377 | emit_insn (gen_nop ()); | |
378 | } | |
379 | \f | |
380 | /* Return the rtx-label that corresponds to a LABEL_DECL, | |
381 | creating it if necessary. */ | |
382 | ||
383 | static rtx | |
384 | label_rtx (label) | |
385 | tree label; | |
386 | { | |
387 | if (TREE_CODE (label) != LABEL_DECL) | |
388 | abort (); | |
389 | ||
390 | if (DECL_RTL (label)) | |
391 | return DECL_RTL (label); | |
392 | ||
393 | return DECL_RTL (label) = gen_label_rtx (); | |
394 | } | |
395 | ||
396 | /* Add an unconditional jump to LABEL as the next sequential instruction. */ | |
397 | ||
398 | void | |
399 | emit_jump (label) | |
400 | rtx label; | |
401 | { | |
402 | do_pending_stack_adjust (); | |
403 | emit_jump_insn (gen_jump (label)); | |
404 | emit_barrier (); | |
405 | } | |
406 | \f | |
407 | /* Handle goto statements and the labels that they can go to. */ | |
408 | ||
409 | /* In some cases it is impossible to generate code for a forward goto | |
410 | until the label definition is seen. This happens when it may be necessary | |
411 | for the goto to reset the stack pointer: we don't yet know how to do that. | |
412 | So expand_goto puts an entry on this fixup list. | |
413 | Each time a binding contour that resets the stack is exited, | |
414 | we check each fixup. | |
415 | If the target label has now been defined, we can insert the proper code. */ | |
416 | ||
417 | struct goto_fixup | |
418 | { | |
419 | /* Points to following fixup. */ | |
420 | struct goto_fixup *next; | |
421 | /* Points to the insn before the jump insn. | |
422 | If more code must be inserted, it goes after this insn. */ | |
423 | rtx before_jump; | |
424 | /* The LABEL_DECL that this jump is jumping to, or 0 | |
425 | for break, continue or return. */ | |
426 | tree target; | |
427 | /* The CODE_LABEL rtx that this is jumping to. */ | |
428 | rtx target_rtl; | |
429 | /* Number of binding contours started in current function | |
430 | before the label reference. */ | |
431 | int block_start_count; | |
432 | /* The outermost stack level that should be restored for this jump. | |
433 | Each time a binding contour that resets the stack is exited, | |
434 | if the target label is *not* yet defined, this slot is updated. */ | |
435 | rtx stack_level; | |
436 | /* List of lists of cleanup expressions to be run by this goto. | |
437 | There is one element for each block that this goto is within. | |
438 | The TREE_VALUE contains the cleanup list of that block as of the | |
439 | time this goto was seen. | |
440 | The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */ | |
441 | tree cleanup_list_list; | |
442 | }; | |
443 | ||
444 | static struct goto_fixup *goto_fixup_chain; | |
445 | ||
446 | /* Within any binding contour that must restore a stack level, | |
447 | all labels are recorded with a chain of these structures. */ | |
448 | ||
449 | struct label_chain | |
450 | { | |
451 | /* Points to following fixup. */ | |
452 | struct label_chain *next; | |
453 | tree label; | |
454 | }; | |
455 | ||
456 | /* Specify the location in the RTL code of a label BODY, | |
457 | which is a LABEL_DECL tree node. | |
458 | ||
459 | This is used for the kind of label that the user can jump to with a | |
460 | goto statement, and for alternatives of a switch or case statement. | |
461 | RTL labels generated for loops and conditionals don't go through here; | |
462 | they are generated directly at the RTL level, by other functions below. | |
463 | ||
464 | Note that this has nothing to do with defining label *names*. | |
465 | Languages vary in how they do that and what that even means. */ | |
466 | ||
467 | void | |
468 | expand_label (body) | |
469 | tree body; | |
470 | { | |
471 | struct label_chain *p; | |
472 | ||
473 | do_pending_stack_adjust (); | |
474 | emit_label (label_rtx (body)); | |
475 | ||
476 | if (stack_block_stack != 0) | |
477 | { | |
478 | p = (struct label_chain *) oballoc (sizeof (struct label_chain)); | |
479 | p->next = stack_block_stack->data.block.label_chain; | |
480 | stack_block_stack->data.block.label_chain = p; | |
481 | p->label = body; | |
482 | } | |
483 | } | |
484 | ||
485 | /* Generate RTL code for a `goto' statement with target label BODY. | |
486 | BODY should be a LABEL_DECL tree node that was or will later be | |
487 | defined with `expand_label'. */ | |
488 | ||
489 | void | |
490 | expand_goto (body) | |
491 | tree body; | |
492 | { | |
493 | expand_goto_internal (body, label_rtx (body), 0); | |
494 | } | |
495 | ||
496 | /* Generate RTL code for a `goto' statement with target label BODY. | |
497 | LABEL should be a LABEL_REF. | |
498 | LAST_INSN, if non-0, is the rtx we should consider as the last | |
499 | insn emitted (for the purposes of cleaning up a return). */ | |
500 | ||
501 | static void | |
502 | expand_goto_internal (body, label, last_insn) | |
503 | tree body; | |
504 | rtx label; | |
505 | rtx last_insn; | |
506 | { | |
507 | struct nesting *block; | |
508 | rtx stack_level = 0; | |
509 | ||
510 | if (GET_CODE (label) != CODE_LABEL) | |
511 | abort (); | |
512 | ||
513 | /* If label has already been defined, we can tell now | |
514 | whether and how we must alter the stack level. */ | |
515 | ||
516 | if (PREV_INSN (label) != 0) | |
517 | { | |
518 | /* Find the innermost pending block that contains the label. | |
519 | (Check containment by comparing insn-uids.) | |
520 | Then restore the outermost stack level within that block, | |
521 | and do cleanups of all blocks contained in it. */ | |
522 | for (block = block_stack; block; block = block->next) | |
523 | { | |
524 | if (INSN_UID (block->data.block.first_insn) < INSN_UID (label)) | |
525 | break; | |
526 | if (block->data.block.stack_level != 0) | |
527 | stack_level = block->data.block.stack_level; | |
528 | /* Execute the cleanups for blocks we are exiting. */ | |
529 | if (block->data.block.cleanups != 0) | |
530 | expand_cleanups (block->data.block.cleanups, 0); | |
531 | } | |
532 | ||
533 | if (stack_level) | |
534 | emit_move_insn (stack_pointer_rtx, stack_level); | |
535 | ||
536 | if (body != 0 && TREE_PACKED (body)) | |
537 | error ("jump to `%s' invalidly jumps into binding contour", | |
538 | IDENTIFIER_POINTER (DECL_NAME (body))); | |
539 | } | |
540 | /* Label not yet defined: may need to put this goto | |
541 | on the fixup list. */ | |
542 | else if (! expand_fixup (body, label, last_insn)) | |
543 | { | |
544 | /* No fixup needed. Record that the label is the target | |
545 | of at least one goto that has no fixup. */ | |
546 | if (body != 0) | |
547 | TREE_ADDRESSABLE (body) = 1; | |
548 | } | |
549 | ||
550 | emit_jump (label); | |
551 | } | |
552 | \f | |
553 | /* Generate if necessary a fixup for a goto | |
554 | whose target label in tree structure (if any) is TREE_LABEL | |
555 | and whose target in rtl is RTL_LABEL. | |
556 | ||
557 | If LAST_INSN is nonzero, we pretend that the jump appears | |
558 | after insn LAST_INSN instead of at the current point in the insn stream. | |
559 | ||
560 | The fixup will be used later to insert insns at this point | |
561 | to restore the stack level as appropriate for the target label. | |
562 | ||
563 | Value is nonzero if a fixup is made. */ | |
564 | ||
565 | static int | |
566 | expand_fixup (tree_label, rtl_label, last_insn) | |
567 | tree tree_label; | |
568 | rtx rtl_label; | |
569 | rtx last_insn; | |
570 | { | |
571 | struct nesting *block, *end_block; | |
572 | ||
573 | /* See if we can recognize which block the label will be output in. | |
574 | This is possible in some very common cases. | |
575 | If we succeed, set END_BLOCK to that block. | |
576 | Otherwise, set it to 0. */ | |
577 | ||
578 | if (cond_stack | |
579 | && (rtl_label == cond_stack->data.cond.else_label | |
580 | || rtl_label == cond_stack->data.cond.after_label)) | |
581 | end_block = cond_stack; | |
582 | /* If we are in a loop, recognize certain labels which | |
583 | are likely targets. This reduces the number of fixups | |
584 | we need to create. */ | |
585 | else if (loop_stack | |
586 | && (rtl_label == loop_stack->data.loop.start_label | |
587 | || rtl_label == loop_stack->data.loop.end_label | |
588 | || rtl_label == loop_stack->data.loop.continue_label)) | |
589 | end_block = loop_stack; | |
590 | else | |
591 | end_block = 0; | |
592 | ||
593 | /* Now set END_BLOCK to the binding level to which we will return. */ | |
594 | ||
595 | if (end_block) | |
596 | { | |
597 | struct nesting *next_block = end_block->all; | |
598 | block = block_stack; | |
599 | ||
600 | /* First see if the END_BLOCK is inside the innermost binding level. | |
601 | If so, then no cleanups or stack levels are relevant. */ | |
602 | while (next_block && next_block != block) | |
603 | next_block = next_block->all; | |
604 | ||
605 | if (next_block) | |
606 | return 0; | |
607 | ||
608 | /* Otherwise, set END_BLOCK to the innermost binding level | |
609 | which is outside the relevant control-structure nesting. */ | |
610 | next_block = block_stack->next; | |
611 | for (block = block_stack; block != end_block; block = block->all) | |
612 | if (block == next_block) | |
613 | next_block = next_block->next; | |
614 | end_block = next_block; | |
615 | } | |
616 | ||
617 | /* Does any containing block have a stack level or cleanups? | |
618 | If not, no fixup is needed, and that is the normal case | |
619 | (the only case, for standard C). */ | |
620 | for (block = block_stack; block != end_block; block = block->next) | |
621 | if (block->data.block.stack_level != 0 | |
622 | || block->data.block.cleanups != 0) | |
623 | break; | |
624 | ||
625 | if (block != end_block) | |
626 | { | |
627 | /* Ok, a fixup is needed. Add a fixup to the list of such. */ | |
628 | struct goto_fixup *fixup | |
629 | = (struct goto_fixup *) oballoc (sizeof (struct goto_fixup)); | |
630 | /* In case an old stack level is restored, make sure that comes | |
631 | after any pending stack adjust. */ | |
632 | do_pending_stack_adjust (); | |
633 | fixup->before_jump = last_insn ? last_insn : get_last_insn (); | |
634 | fixup->target = tree_label; | |
635 | fixup->target_rtl = rtl_label; | |
636 | fixup->block_start_count = block_start_count; | |
637 | fixup->stack_level = 0; | |
638 | fixup->cleanup_list_list | |
639 | = (block->data.block.outer_cleanups || block->data.block.cleanups | |
640 | ? tree_cons (0, block->data.block.cleanups, | |
641 | block->data.block.outer_cleanups) | |
642 | : 0); | |
643 | fixup->next = goto_fixup_chain; | |
644 | goto_fixup_chain = fixup; | |
645 | } | |
646 | ||
647 | return block != 0; | |
648 | } | |
649 | ||
650 | /* When exiting a binding contour, process all pending gotos requiring fixups. | |
651 | THISBLOCK is the structure that describes the block being exited. | |
652 | STACK_LEVEL is the rtx for the stack level to restore exiting this contour. | |
653 | CLEANUP_LIST is a list of expressions to evaluate on exiting this contour. | |
654 | FIRST_INSN is the insn that began this contour. | |
655 | ||
656 | Gotos that jump out of this contour must restore the | |
657 | stack level and do the cleanups before actually jumping. | |
658 | ||
659 | DONT_JUMP_IN nonzero means report error there is a jump into this | |
660 | contour from before the beginning of the contour. | |
661 | This is also done if STACK_LEVEL is nonzero. */ | |
662 | ||
663 | static void | |
664 | fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in) | |
665 | struct nesting *thisblock; | |
666 | rtx stack_level; | |
667 | tree cleanup_list; | |
668 | rtx first_insn; | |
669 | int dont_jump_in; | |
670 | { | |
671 | register struct goto_fixup *f, *prev; | |
672 | ||
673 | /* F is the fixup we are considering; PREV is the previous one. */ | |
674 | ||
675 | for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next) | |
676 | { | |
677 | /* Test for a fixup that is inactive because it is already handled. */ | |
678 | if (f->before_jump == 0) | |
679 | { | |
680 | /* Delete inactive fixup from the chain, if that is easy to do. */ | |
681 | if (prev != 0) | |
682 | prev->next = f->next; | |
683 | } | |
684 | /* Has this fixup's target label been defined? | |
685 | If so, we can finalize it. */ | |
686 | else if (PREV_INSN (f->target_rtl) != 0) | |
687 | { | |
688 | /* If this fixup jumped into this contour from before the beginning | |
689 | of this contour, report an error. */ | |
690 | /* ??? Bug: this does not detect jumping in through intermediate | |
691 | blocks that have stack levels or cleanups. | |
692 | It detects only a problem with the innermost block | |
693 | around the label. */ | |
694 | if (f->target != 0 | |
695 | && (dont_jump_in || stack_level || cleanup_list) | |
696 | && INSN_UID (first_insn) > INSN_UID (f->before_jump) | |
697 | && ! TREE_ADDRESSABLE (f->target)) | |
698 | { | |
699 | error_with_decl (f->target, | |
700 | "label `%s' used before containing binding contour"); | |
701 | /* Prevent multiple errors for one label. */ | |
702 | TREE_ADDRESSABLE (f->target) = 1; | |
703 | } | |
704 | ||
705 | /* Execute cleanups for blocks this jump exits. */ | |
706 | if (f->cleanup_list_list) | |
707 | { | |
708 | tree lists; | |
709 | for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists)) | |
710 | /* Marked elements correspond to blocks that have been closed. | |
711 | Do their cleanups. */ | |
712 | if (TREE_ADDRESSABLE (lists) | |
713 | && TREE_VALUE (lists) != 0) | |
714 | fixup_cleanups (TREE_VALUE (lists), &f->before_jump); | |
715 | } | |
716 | ||
717 | /* Restore stack level for the biggest contour that this | |
718 | jump jumps out of. */ | |
719 | if (f->stack_level) | |
720 | emit_insn_after (gen_move_insn (stack_pointer_rtx, f->stack_level), | |
721 | f->before_jump); | |
722 | f->before_jump = 0; | |
723 | } | |
724 | /* Label has still not appeared. If we are exiting a block with | |
725 | a stack level to restore, that started before the fixup, | |
726 | mark this stack level as needing restoration | |
727 | when the fixup is later finalized. | |
728 | Also mark the cleanup_list_list element for F | |
729 | that corresponds to this block, so that ultimately | |
730 | this block's cleanups will be executed by the code above. */ | |
731 | /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, | |
732 | it means the label is undefined. That's erroneous, but possible. */ | |
733 | else if (thisblock != 0 | |
734 | && (thisblock->data.block.block_start_count | |
735 | < f->block_start_count)) | |
736 | { | |
737 | tree lists = f->cleanup_list_list; | |
738 | for (; lists; lists = TREE_CHAIN (lists)) | |
739 | /* If the following elt. corresponds to our containing block | |
740 | then the elt. must be for this block. */ | |
741 | if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups) | |
742 | TREE_ADDRESSABLE (lists) = 1; | |
743 | ||
744 | if (stack_level) | |
745 | f->stack_level = stack_level; | |
746 | } | |
747 | } | |
748 | } | |
749 | \f | |
750 | /* Generate RTL for an asm statement (explicit assembler code). | |
751 | BODY is a STRING_CST node containing the assembler code text. */ | |
752 | ||
753 | void | |
754 | expand_asm (body) | |
755 | tree body; | |
756 | { | |
757 | emit_insn (gen_rtx (ASM_INPUT, VOIDmode, | |
758 | TREE_STRING_POINTER (body))); | |
759 | last_expr_type = 0; | |
760 | } | |
761 | ||
762 | /* Generate RTL for an asm statement with arguments. | |
763 | STRING is the instruction template. | |
764 | OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs. | |
765 | Each output or input has an expression in the TREE_VALUE and | |
766 | a constraint-string in the TREE_PURPOSE. | |
767 | CLOBBERS is a list of STRING_CST nodes each naming a hard register | |
768 | that is clobbered by this insn. | |
769 | ||
770 | Not all kinds of lvalue that may appear in OUTPUTS can be stored directly. | |
771 | Some elements of OUTPUTS may be replaced with trees representing temporary | |
772 | values. The caller should copy those temporary values to the originally | |
773 | specified lvalues. | |
774 | ||
775 | VOL nonzero means the insn is volatile; don't optimize it. */ | |
776 | ||
777 | void | |
778 | expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line) | |
779 | tree string, outputs, inputs, clobbers; | |
780 | int vol; | |
781 | char *filename; | |
782 | int line; | |
783 | { | |
784 | rtvec argvec, constraints; | |
785 | rtx body; | |
786 | int ninputs = list_length (inputs); | |
787 | int noutputs = list_length (outputs); | |
788 | int nclobbers = list_length (clobbers); | |
789 | tree tail; | |
790 | register int i; | |
791 | /* Vector of RTX's of evaluated output operands. */ | |
792 | rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx)); | |
793 | /* The insn we have emitted. */ | |
794 | rtx insn; | |
795 | ||
796 | last_expr_type = 0; | |
797 | ||
798 | for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++) | |
799 | { | |
800 | tree val = TREE_VALUE (tail); | |
801 | tree val1; | |
802 | int j; | |
803 | int found_equal; | |
804 | ||
805 | /* If there's an erroneous arg, emit no insn. */ | |
806 | if (TREE_TYPE (val) == error_mark_node) | |
807 | return; | |
808 | ||
809 | /* Make sure constraint has `=' and does not have `+'. */ | |
810 | ||
811 | found_equal = 0; | |
812 | for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++) | |
813 | { | |
814 | if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+') | |
815 | { | |
816 | error ("output operand constraint contains `+'"); | |
817 | return; | |
818 | } | |
819 | if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '=') | |
820 | found_equal = 1; | |
821 | } | |
822 | if (! found_equal) | |
823 | { | |
824 | error ("output operand constraint lacks `='"); | |
825 | return; | |
826 | } | |
827 | ||
828 | /* If an output operand is not a variable or indirect ref, | |
829 | or a part of one, | |
830 | create a SAVE_EXPR which is a pseudo-reg | |
831 | to act as an intermediate temporary. | |
832 | Make the asm insn write into that, then copy it to | |
833 | the real output operand. */ | |
834 | ||
835 | val1 = val; | |
836 | while (TREE_CODE (val1) == COMPONENT_REF | |
837 | || TREE_CODE (val1) == ARRAY_REF) | |
838 | val1 = TREE_OPERAND (val1, 0); | |
839 | ||
840 | if (TREE_CODE (val1) != VAR_DECL | |
841 | && TREE_CODE (val1) != PARM_DECL | |
842 | && TREE_CODE (val1) != INDIRECT_REF) | |
843 | { | |
844 | rtx reg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (val))); | |
845 | /* `build' isn't safe; it really expects args to be trees. */ | |
846 | tree t = build_nt (SAVE_EXPR, val, reg); | |
847 | ||
848 | if (GET_MODE (reg) == BLKmode) | |
849 | abort (); | |
850 | ||
851 | save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, reg, save_expr_regs); | |
852 | TREE_VALUE (tail) = t; | |
853 | TREE_TYPE (t) = TREE_TYPE (val); | |
854 | } | |
855 | output_rtx[i] = expand_expr (TREE_VALUE (tail), 0, VOIDmode, 0); | |
856 | } | |
857 | ||
858 | if (ninputs + noutputs > MAX_RECOG_OPERANDS) | |
859 | { | |
860 | error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS); | |
861 | return; | |
862 | } | |
863 | ||
864 | /* Make vectors for the expression-rtx and constraint strings. */ | |
865 | ||
866 | argvec = rtvec_alloc (ninputs); | |
867 | constraints = rtvec_alloc (ninputs); | |
868 | ||
869 | body = gen_rtx (ASM_OPERANDS, VOIDmode, | |
870 | TREE_STRING_POINTER (string), "", 0, argvec, constraints, | |
871 | filename, line); | |
872 | MEM_VOLATILE_P (body) = vol; | |
873 | ||
874 | /* Eval the inputs and put them into ARGVEC. | |
875 | Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */ | |
876 | ||
877 | i = 0; | |
878 | for (tail = inputs; tail; tail = TREE_CHAIN (tail)) | |
879 | { | |
880 | int j; | |
881 | ||
882 | /* If there's an erroneous arg, emit no insn, | |
883 | because the ASM_INPUT would get VOIDmode | |
884 | and that could cause a crash in reload. */ | |
885 | if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node) | |
886 | return; | |
887 | if (TREE_PURPOSE (tail) == NULL_TREE) | |
888 | { | |
889 | error ("hard register `%s' listed as input operand to `asm'", | |
890 | TREE_STRING_POINTER (TREE_VALUE (tail)) ); | |
891 | return; | |
892 | } | |
893 | ||
894 | /* Make sure constraint has neither `=' nor `+'. */ | |
895 | ||
896 | for (j = 0; j < TREE_STRING_LENGTH (TREE_PURPOSE (tail)); j++) | |
897 | if (TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '=' | |
898 | || TREE_STRING_POINTER (TREE_PURPOSE (tail))[j] == '+') | |
899 | { | |
900 | error ("input operand constraint contains `%c'", | |
901 | TREE_STRING_POINTER (TREE_PURPOSE (tail))[j]); | |
902 | return; | |
903 | } | |
904 | ||
905 | XVECEXP (body, 3, i) /* argvec */ | |
906 | = expand_expr (TREE_VALUE (tail), 0, VOIDmode, 0); | |
907 | XVECEXP (body, 4, i) /* constraints */ | |
908 | = gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), | |
909 | TREE_STRING_POINTER (TREE_PURPOSE (tail))); | |
910 | i++; | |
911 | } | |
912 | ||
913 | /* Protect all the operands from the queue, | |
914 | now that they have all been evaluated. */ | |
915 | ||
916 | for (i = 0; i < ninputs; i++) | |
917 | XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0); | |
918 | ||
919 | for (i = 0; i < noutputs; i++) | |
920 | output_rtx[i] = protect_from_queue (output_rtx[i], 1); | |
921 | ||
922 | /* Now, for each output, construct an rtx | |
923 | (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT | |
924 | ARGVEC CONSTRAINTS)) | |
925 | If there is more than one, put them inside a PARALLEL. */ | |
926 | ||
927 | if (noutputs == 1 && nclobbers == 0) | |
928 | { | |
929 | XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs)); | |
930 | insn = emit_insn (gen_rtx (SET, VOIDmode, output_rtx[0], body)); | |
931 | } | |
932 | else if (noutputs == 0 && nclobbers == 0) | |
933 | { | |
934 | /* No output operands: put in a raw ASM_OPERANDS rtx. */ | |
935 | insn = emit_insn (body); | |
936 | } | |
937 | else | |
938 | { | |
939 | rtx obody = body; | |
940 | int num = noutputs; | |
941 | if (num == 0) num = 1; | |
942 | body = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (num + nclobbers)); | |
943 | ||
944 | /* For each output operand, store a SET. */ | |
945 | ||
946 | for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++) | |
947 | { | |
948 | XVECEXP (body, 0, i) | |
949 | = gen_rtx (SET, VOIDmode, | |
950 | output_rtx[i], | |
951 | gen_rtx (ASM_OPERANDS, VOIDmode, | |
952 | TREE_STRING_POINTER (string), | |
953 | TREE_STRING_POINTER (TREE_PURPOSE (tail)), | |
954 | i, argvec, constraints, | |
955 | filename, line)); | |
956 | MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol; | |
957 | } | |
958 | ||
959 | /* If there are no outputs (but there are some clobbers) | |
960 | store the bare ASM_OPERANDS into the PARALLEL. */ | |
961 | ||
962 | if (i == 0) | |
963 | XVECEXP (body, 0, i++) = obody; | |
964 | ||
965 | /* Store (clobber REG) for each clobbered register specified. */ | |
966 | ||
967 | for (tail = clobbers; tail; tail = TREE_CHAIN (tail), i++) | |
968 | { | |
969 | int j; | |
970 | char *regname = TREE_STRING_POINTER (TREE_VALUE (tail)); | |
971 | extern char *reg_names[]; | |
972 | ||
973 | for (j = 0; j < FIRST_PSEUDO_REGISTER; j++) | |
974 | if (!strcmp (regname, reg_names[j])) | |
975 | break; | |
976 | ||
977 | if (j == FIRST_PSEUDO_REGISTER) | |
978 | { | |
979 | error ("unknown register name `%s' in `asm'", regname); | |
980 | return; | |
981 | } | |
982 | ||
983 | /* Use QImode since that's guaranteed to clobber just one reg. */ | |
984 | XVECEXP (body, 0, i) | |
985 | = gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, QImode, j)); | |
986 | } | |
987 | ||
988 | insn = emit_insn (body); | |
989 | } | |
990 | ||
991 | last_expr_type = 0; | |
992 | } | |
993 | ||
994 | /* Nonzero if within a ({...}) grouping, in which case we must | |
995 | always compute a value for each expr-stmt in case it is the last one. */ | |
996 | ||
997 | int expr_stmts_for_value; | |
998 | ||
999 | /* Generate RTL to evaluate the expression EXP | |
1000 | and remember it in case this is the VALUE in a ({... VALUE; }) constr. */ | |
1001 | ||
1002 | void | |
1003 | expand_expr_stmt (exp) | |
1004 | tree exp; | |
1005 | { | |
1006 | /* If -W, warn about statements with no side effects, | |
1007 | except inside a ({...}) where they may be useful. */ | |
1008 | if (expr_stmts_for_value == 0 && exp != error_mark_node) | |
1009 | { | |
1010 | if (! TREE_VOLATILE (exp) && (extra_warnings || warn_unused)) | |
1011 | warning_with_file_and_line (emit_filename, emit_lineno, | |
1012 | "statement with no effect"); | |
1013 | else if (warn_unused) | |
1014 | warn_if_unused_value (exp); | |
1015 | } | |
1016 | last_expr_type = TREE_TYPE (exp); | |
1017 | if (! flag_syntax_only) | |
1018 | last_expr_value = expand_expr (exp, expr_stmts_for_value ? 0 : const0_rtx, | |
1019 | VOIDmode, 0); | |
1020 | emit_queue (); | |
1021 | } | |
1022 | ||
1023 | /* Warn if EXP contains any computations whose results are not used. | |
1024 | Return 1 if a warning is printed; 0 otherwise. */ | |
1025 | ||
1026 | static int | |
1027 | warn_if_unused_value (exp) | |
1028 | tree exp; | |
1029 | { | |
1030 | switch (TREE_CODE (exp)) | |
1031 | { | |
1032 | case PREINCREMENT_EXPR: | |
1033 | case POSTINCREMENT_EXPR: | |
1034 | case PREDECREMENT_EXPR: | |
1035 | case POSTDECREMENT_EXPR: | |
1036 | case MODIFY_EXPR: | |
1037 | case INIT_EXPR: | |
1038 | case NEW_EXPR: | |
1039 | case DELETE_EXPR: | |
1040 | case PUSH_EXPR: | |
1041 | case POP_EXPR: | |
1042 | case CALL_EXPR: | |
1043 | case METHOD_CALL_EXPR: | |
1044 | case RTL_EXPR: | |
1045 | case WRAPPER_EXPR: | |
1046 | case ANTI_WRAPPER_EXPR: | |
1047 | case WITH_CLEANUP_EXPR: | |
1048 | /* We don't warn about COND_EXPR because it may be a useful | |
1049 | construct if either arm contains a side effect. */ | |
1050 | case COND_EXPR: | |
1051 | return 0; | |
1052 | ||
1053 | case TRUTH_ORIF_EXPR: | |
1054 | case TRUTH_ANDIF_EXPR: | |
1055 | /* In && or ||, warn if 2nd operand has no side effect. */ | |
1056 | return warn_if_unused_value (TREE_OPERAND (exp, 1)); | |
1057 | ||
1058 | case COMPOUND_EXPR: | |
1059 | if (warn_if_unused_value (TREE_OPERAND (exp, 0))) | |
1060 | return 1; | |
1061 | return warn_if_unused_value (TREE_OPERAND (exp, 1)); | |
1062 | ||
1063 | case NOP_EXPR: | |
1064 | case CONVERT_EXPR: | |
1065 | /* Don't warn about values cast to void. */ | |
1066 | if (TREE_TYPE (exp) == void_type_node) | |
1067 | return 0; | |
1068 | /* Assignment to a cast results in a cast of a modify. | |
1069 | Don't complain about that. */ | |
1070 | if (TREE_CODE (TREE_OPERAND (exp, 0)) == MODIFY_EXPR) | |
1071 | return 0; | |
1072 | ||
1073 | default: | |
1074 | warning_with_file_and_line (emit_filename, emit_lineno, | |
1075 | "value computed is not used"); | |
1076 | return 1; | |
1077 | } | |
1078 | } | |
1079 | ||
1080 | /* Clear out the memory of the last expression evaluated. */ | |
1081 | ||
1082 | void | |
1083 | clear_last_expr () | |
1084 | { | |
1085 | last_expr_type = 0; | |
1086 | } | |
1087 | ||
1088 | /* Begin a statement which will return a value. | |
1089 | Return the RTL_EXPR for this statement expr. | |
1090 | The caller must save that value and pass it to expand_end_stmt_expr. */ | |
1091 | ||
1092 | tree | |
1093 | expand_start_stmt_expr () | |
1094 | { | |
1095 | rtx save = start_sequence (); | |
1096 | /* Make the RTL_EXPR node temporary, not momentary, | |
1097 | so that rtl_expr_chain doesn't become garbage. */ | |
1098 | int momentary = suspend_momentary (); | |
1099 | tree t = make_node (RTL_EXPR); | |
1100 | resume_momentary (momentary); | |
1101 | RTL_EXPR_RTL (t) = save; | |
1102 | NO_DEFER_POP; | |
1103 | expr_stmts_for_value++; | |
1104 | return t; | |
1105 | } | |
1106 | ||
1107 | /* Restore the previous state at the end of a statement that returns a value. | |
1108 | Returns a tree node representing the statement's value and the | |
1109 | insns to compute the value. | |
1110 | ||
1111 | The nodes of that expression have been freed by now, so we cannot use them. | |
1112 | But we don't want to do that anyway; the expression has already been | |
1113 | evaluated and now we just want to use the value. So generate a RTL_EXPR | |
1114 | with the proper type and RTL value. | |
1115 | ||
1116 | If the last substatement was not an expression, | |
1117 | return something with type `void'. */ | |
1118 | ||
1119 | tree | |
1120 | expand_end_stmt_expr (t) | |
1121 | tree t; | |
1122 | { | |
1123 | rtx saved = RTL_EXPR_RTL (t); | |
1124 | ||
1125 | OK_DEFER_POP; | |
1126 | ||
1127 | if (last_expr_type == 0) | |
1128 | { | |
1129 | last_expr_type = void_type_node; | |
1130 | last_expr_value = const0_rtx; | |
1131 | } | |
1132 | TREE_TYPE (t) = last_expr_type; | |
1133 | RTL_EXPR_RTL (t) = last_expr_value; | |
1134 | RTL_EXPR_SEQUENCE (t) = get_insns (); | |
1135 | ||
1136 | rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain); | |
1137 | ||
1138 | end_sequence (saved); | |
1139 | ||
1140 | /* Don't consider deleting this expr or containing exprs at tree level. */ | |
1141 | TREE_VOLATILE (t) = 1; | |
1142 | /* Propagate volatility of the actual RTL expr. */ | |
1143 | TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value); | |
1144 | ||
1145 | last_expr_type = 0; | |
1146 | expr_stmts_for_value--; | |
1147 | ||
1148 | return t; | |
1149 | } | |
1150 | \f | |
1151 | /* Generate RTL for the start of an if-then. COND is the expression | |
1152 | whose truth should be tested. | |
1153 | ||
1154 | If EXITFLAG is nonzero, this conditional is visible to | |
1155 | `exit_something'. */ | |
1156 | ||
1157 | void | |
1158 | expand_start_cond (cond, exitflag) | |
1159 | tree cond; | |
1160 | int exitflag; | |
1161 | { | |
1162 | struct nesting *thiscond | |
1163 | = (struct nesting *) xmalloc (sizeof (struct nesting)); | |
1164 | ||
1165 | /* Make an entry on cond_stack for the cond we are entering. */ | |
1166 | ||
1167 | thiscond->next = cond_stack; | |
1168 | thiscond->all = nesting_stack; | |
1169 | thiscond->depth = ++nesting_depth; | |
1170 | thiscond->data.cond.after_label = 0; | |
1171 | thiscond->data.cond.else_label = gen_label_rtx (); | |
1172 | thiscond->exit_label = exitflag ? thiscond->data.cond.else_label : 0; | |
1173 | cond_stack = thiscond; | |
1174 | nesting_stack = thiscond; | |
1175 | ||
1176 | do_jump (cond, thiscond->data.cond.else_label, NULL); | |
1177 | } | |
1178 | ||
1179 | /* Generate RTL for the end of an if-then with no else-clause. | |
1180 | Pop the record for it off of cond_stack. */ | |
1181 | ||
1182 | void | |
1183 | expand_end_cond () | |
1184 | { | |
1185 | struct nesting *thiscond = cond_stack; | |
1186 | ||
1187 | do_pending_stack_adjust (); | |
1188 | emit_label (thiscond->data.cond.else_label); | |
1189 | ||
1190 | POPSTACK (cond_stack); | |
1191 | last_expr_type = 0; | |
1192 | } | |
1193 | ||
1194 | /* Generate RTL between the then-clause and the else-clause | |
1195 | of an if-then-else. */ | |
1196 | ||
1197 | void | |
1198 | expand_start_else () | |
1199 | { | |
1200 | cond_stack->data.cond.after_label = gen_label_rtx (); | |
1201 | if (cond_stack->exit_label != 0) | |
1202 | cond_stack->exit_label = cond_stack->data.cond.after_label; | |
1203 | emit_jump (cond_stack->data.cond.after_label); | |
1204 | if (cond_stack->data.cond.else_label) | |
1205 | emit_label (cond_stack->data.cond.else_label); | |
1206 | } | |
1207 | ||
1208 | /* Generate RTL for the end of an if-then-else. | |
1209 | Pop the record for it off of cond_stack. */ | |
1210 | ||
1211 | void | |
1212 | expand_end_else () | |
1213 | { | |
1214 | struct nesting *thiscond = cond_stack; | |
1215 | ||
1216 | do_pending_stack_adjust (); | |
1217 | /* Note: a syntax error can cause this to be called | |
1218 | without first calling `expand_start_else'. */ | |
1219 | if (thiscond->data.cond.after_label) | |
1220 | emit_label (thiscond->data.cond.after_label); | |
1221 | ||
1222 | POPSTACK (cond_stack); | |
1223 | last_expr_type = 0; | |
1224 | } | |
1225 | \f | |
1226 | /* Generate RTL for the start of a loop. EXIT_FLAG is nonzero if this | |
1227 | loop should be exited by `exit_something'. This is a loop for which | |
1228 | `expand_continue' will jump to the top of the loop. | |
1229 | ||
1230 | Make an entry on loop_stack to record the labels associated with | |
1231 | this loop. */ | |
1232 | ||
1233 | void | |
1234 | expand_start_loop (exit_flag) | |
1235 | int exit_flag; | |
1236 | { | |
1237 | register struct nesting *thisloop | |
1238 | = (struct nesting *) xmalloc (sizeof (struct nesting)); | |
1239 | ||
1240 | /* Make an entry on loop_stack for the loop we are entering. */ | |
1241 | ||
1242 | thisloop->next = loop_stack; | |
1243 | thisloop->all = nesting_stack; | |
1244 | thisloop->depth = ++nesting_depth; | |
1245 | thisloop->data.loop.start_label = gen_label_rtx (); | |
1246 | thisloop->data.loop.end_label = gen_label_rtx (); | |
1247 | thisloop->data.loop.continue_label = thisloop->data.loop.start_label; | |
1248 | thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0; | |
1249 | loop_stack = thisloop; | |
1250 | nesting_stack = thisloop; | |
1251 | ||
1252 | do_pending_stack_adjust (); | |
1253 | emit_queue (); | |
1254 | emit_note (0, NOTE_INSN_LOOP_BEG); | |
1255 | emit_label (thisloop->data.loop.start_label); | |
1256 | } | |
1257 | ||
1258 | /* Like expand_start_loop but for a loop where the continuation point | |
1259 | (for expand_continue_loop) will be specified explicitly. */ | |
1260 | ||
1261 | void | |
1262 | expand_start_loop_continue_elsewhere (exit_flag) | |
1263 | int exit_flag; | |
1264 | { | |
1265 | expand_start_loop (exit_flag); | |
1266 | loop_stack->data.loop.continue_label = gen_label_rtx (); | |
1267 | } | |
1268 | ||
1269 | /* Specify the continuation point for a loop started with | |
1270 | expand_start_loop_continue_elsewhere. | |
1271 | Use this at the point in the code to which a continue statement | |
1272 | should jump. */ | |
1273 | ||
1274 | void | |
1275 | expand_loop_continue_here () | |
1276 | { | |
1277 | do_pending_stack_adjust (); | |
1278 | emit_note (0, NOTE_INSN_LOOP_CONT); | |
1279 | emit_label (loop_stack->data.loop.continue_label); | |
1280 | } | |
1281 | ||
1282 | /* Finish a loop. Generate a jump back to the top and the loop-exit label. | |
1283 | Pop the block off of loop_stack. */ | |
1284 | ||
1285 | void | |
1286 | expand_end_loop () | |
1287 | { | |
1288 | register rtx insn = get_last_insn (); | |
1289 | register rtx start_label = loop_stack->data.loop.start_label; | |
1290 | ||
1291 | do_pending_stack_adjust (); | |
1292 | ||
1293 | /* If optimizing, perhaps reorder the loop. If the loop | |
1294 | starts with a conditional exit, roll that to the end | |
1295 | where it will optimize together with the jump back. */ | |
1296 | if (optimize | |
1297 | && | |
1298 | ! (GET_CODE (insn) == JUMP_INSN | |
1299 | && GET_CODE (PATTERN (insn)) == SET | |
1300 | && SET_DEST (PATTERN (insn)) == pc_rtx | |
1301 | && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)) | |
1302 | { | |
1303 | /* Scan insns from the top of the loop looking for a qualified | |
1304 | conditional exit. */ | |
1305 | for (insn = loop_stack->data.loop.start_label; insn; insn= NEXT_INSN (insn)) | |
1306 | if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET | |
1307 | && SET_DEST (PATTERN (insn)) == pc_rtx | |
1308 | && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE | |
1309 | && | |
1310 | ((GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 1)) == LABEL_REF | |
1311 | && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 1), 0) | |
1312 | == loop_stack->data.loop.end_label)) | |
1313 | || | |
1314 | (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 2)) == LABEL_REF | |
1315 | && (XEXP (XEXP (SET_SRC (PATTERN (insn)), 2), 0) | |
1316 | == loop_stack->data.loop.end_label)))) | |
1317 | break; | |
1318 | if (insn != 0) | |
1319 | { | |
1320 | /* We found one. Move everything from there up | |
1321 | to the end of the loop, and add a jump into the loop | |
1322 | to jump to there. */ | |
1323 | register rtx newstart_label = gen_label_rtx (); | |
1324 | ||
1325 | emit_label_after (newstart_label, PREV_INSN (start_label)); | |
1326 | reorder_insns (start_label, insn, get_last_insn ()); | |
1327 | emit_jump_insn_after (gen_jump (start_label), PREV_INSN (newstart_label)); | |
1328 | emit_barrier_after (PREV_INSN (newstart_label)); | |
1329 | start_label = newstart_label; | |
1330 | } | |
1331 | } | |
1332 | ||
1333 | emit_jump (start_label); | |
1334 | emit_note (0, NOTE_INSN_LOOP_END); | |
1335 | emit_label (loop_stack->data.loop.end_label); | |
1336 | ||
1337 | POPSTACK (loop_stack); | |
1338 | ||
1339 | last_expr_type = 0; | |
1340 | } | |
1341 | ||
1342 | /* Generate a jump to the current loop's continue-point. | |
1343 | This is usually the top of the loop, but may be specified | |
1344 | explicitly elsewhere. If not currently inside a loop, | |
1345 | return 0 and do nothing; caller will print an error message. */ | |
1346 | ||
1347 | int | |
1348 | expand_continue_loop () | |
1349 | { | |
1350 | last_expr_type = 0; | |
1351 | if (loop_stack == 0) | |
1352 | return 0; | |
1353 | expand_goto_internal (0, loop_stack->data.loop.continue_label, 0); | |
1354 | return 1; | |
1355 | } | |
1356 | ||
1357 | /* Generate a jump to exit the current loop. If not currently inside a loop, | |
1358 | return 0 and do nothing; caller will print an error message. */ | |
1359 | ||
1360 | int | |
1361 | expand_exit_loop () | |
1362 | { | |
1363 | last_expr_type = 0; | |
1364 | if (loop_stack == 0) | |
1365 | return 0; | |
1366 | expand_goto_internal (0, loop_stack->data.loop.end_label, 0); | |
1367 | return 1; | |
1368 | } | |
1369 | ||
1370 | /* Generate a conditional jump to exit the current loop if COND | |
1371 | evaluates to zero. If not currently inside a loop, | |
1372 | return 0 and do nothing; caller will print an error message. */ | |
1373 | ||
1374 | int | |
1375 | expand_exit_loop_if_false (cond) | |
1376 | tree cond; | |
1377 | { | |
1378 | last_expr_type = 0; | |
1379 | if (loop_stack == 0) | |
1380 | return 0; | |
1381 | do_jump (cond, loop_stack->data.loop.end_label, NULL); | |
1382 | return 1; | |
1383 | } | |
1384 | ||
1385 | /* Return non-zero if currently inside a loop. */ | |
1386 | ||
1387 | int | |
1388 | inside_loop () | |
1389 | { | |
1390 | return loop_stack != 0; | |
1391 | } | |
1392 | ||
1393 | /* Generate a jump to exit the current loop, conditional, binding contour | |
1394 | or case statement. Not all such constructs are visible to this function, | |
1395 | only those started with EXIT_FLAG nonzero. Individual languages use | |
1396 | the EXIT_FLAG parameter to control which kinds of constructs you can | |
1397 | exit this way. | |
1398 | ||
1399 | If not currently inside anything that can be exited, | |
1400 | return 0 and do nothing; caller will print an error message. */ | |
1401 | ||
1402 | int | |
1403 | expand_exit_something () | |
1404 | { | |
1405 | struct nesting *n; | |
1406 | last_expr_type = 0; | |
1407 | for (n = nesting_stack; n; n = n->all) | |
1408 | if (n->exit_label != 0) | |
1409 | { | |
1410 | expand_goto_internal (0, n->exit_label, 0); | |
1411 | return 1; | |
1412 | } | |
1413 | ||
1414 | return 0; | |
1415 | } | |
1416 | \f | |
1417 | /* Generate RTL to return from the current function, with no value. | |
1418 | (That is, we do not do anything about returning any value.) */ | |
1419 | ||
1420 | void | |
1421 | expand_null_return () | |
1422 | { | |
1423 | struct nesting *block = block_stack; | |
1424 | rtx last_insn = 0; | |
1425 | ||
1426 | /* Does any pending block have cleanups? */ | |
1427 | ||
1428 | while (block && block->data.block.cleanups == 0) | |
1429 | block = block->next; | |
1430 | ||
1431 | /* If yes, use a goto to return, since that runs cleanups. */ | |
1432 | ||
1433 | expand_null_return_1 (last_insn, block != 0); | |
1434 | } | |
1435 | ||
1436 | /* Output a return with no value. If LAST_INSN is nonzero, | |
1437 | pretend that the return takes place after LAST_INSN. | |
1438 | If USE_GOTO is nonzero then don't use a return instruction; | |
1439 | go to the return label instead. This causes any cleanups | |
1440 | of pending blocks to be executed normally. */ | |
1441 | ||
1442 | static void | |
1443 | expand_null_return_1 (last_insn, use_goto) | |
1444 | rtx last_insn; | |
1445 | int use_goto; | |
1446 | { | |
1447 | rtx end_label = cleanup_label ? cleanup_label : return_label; | |
1448 | ||
1449 | clear_pending_stack_adjust (); | |
1450 | do_pending_stack_adjust (); | |
1451 | last_expr_type = 0; | |
1452 | ||
1453 | /* PCC-struct return always uses an epilogue. */ | |
1454 | if (current_function_returns_pcc_struct || use_goto) | |
1455 | { | |
1456 | if (end_label == 0) | |
1457 | end_label = return_label = gen_label_rtx (); | |
1458 | expand_goto_internal (0, end_label, last_insn); | |
1459 | return; | |
1460 | } | |
1461 | ||
1462 | /* Otherwise output a simple return-insn if one is available, | |
1463 | unless it won't do the job. */ | |
1464 | #ifdef HAVE_return | |
1465 | if (HAVE_return && cleanup_label == 0) | |
1466 | { | |
1467 | emit_jump_insn (gen_return ()); | |
1468 | emit_barrier (); | |
1469 | return; | |
1470 | } | |
1471 | #endif | |
1472 | ||
1473 | /* Otherwise jump to the epilogue. */ | |
1474 | expand_goto_internal (0, end_label, last_insn); | |
1475 | } | |
1476 | ||
1477 | /* Generate RTL to evaluate the expression RETVAL and return it | |
1478 | from the current function. */ | |
1479 | ||
1480 | void | |
1481 | expand_return (retval) | |
1482 | tree retval; | |
1483 | { | |
1484 | /* If there are any cleanups to be performed, then they will | |
1485 | be inserted following LAST_INSN. It is desirable | |
1486 | that the last_insn, for such purposes, should be the | |
1487 | last insn before computing the return value. Otherwise, cleanups | |
1488 | which call functions can clobber the return value. */ | |
1489 | /* ??? rms: I think that is erroneous, because in C++ it would | |
1490 | run destructors on variables that might be used in the subsequent | |
1491 | computation of the return value. */ | |
1492 | rtx last_insn = 0; | |
1493 | register rtx val = 0; | |
1494 | register rtx op0; | |
1495 | tree retval_rhs; | |
1496 | int cleanups; | |
1497 | struct nesting *block; | |
1498 | ||
1499 | /* Are any cleanups needed? E.g. C++ destructors to be run? */ | |
1500 | cleanups = 0; | |
1501 | for (block = block_stack; block; block = block->next) | |
1502 | if (block->data.block.cleanups != 0) | |
1503 | { | |
1504 | cleanups = 1; | |
1505 | break; | |
1506 | } | |
1507 | ||
1508 | if (TREE_CODE (retval) == RESULT_DECL) | |
1509 | retval_rhs = retval; | |
1510 | else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR) | |
1511 | && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL) | |
1512 | retval_rhs = TREE_OPERAND (retval, 1); | |
1513 | else if (TREE_TYPE (retval) == void_type_node) | |
1514 | /* Recognize tail-recursive call to void function. */ | |
1515 | retval_rhs = retval; | |
1516 | else | |
1517 | retval_rhs = NULL_TREE; | |
1518 | ||
1519 | /* Only use `last_insn' if there are cleanups which must be run. */ | |
1520 | if (cleanups || cleanup_label != 0) | |
1521 | last_insn = get_last_insn (); | |
1522 | ||
1523 | /* For tail-recursive call to current function, | |
1524 | just jump back to the beginning. | |
1525 | It's unsafe if any auto variable in this function | |
1526 | has its address taken; for simplicity, | |
1527 | require stack frame to be empty. */ | |
1528 | if (optimize && retval_rhs != 0 | |
1529 | && frame_offset == STARTING_FRAME_OFFSET | |
1530 | && TREE_CODE (retval_rhs) == CALL_EXPR | |
1531 | && TREE_CODE (TREE_OPERAND (retval_rhs, 0)) == ADDR_EXPR | |
1532 | && TREE_OPERAND (TREE_OPERAND (retval_rhs, 0), 0) == this_function | |
1533 | /* Finish checking validity, and if valid emit code | |
1534 | to set the argument variables for the new call. */ | |
1535 | && tail_recursion_args (TREE_OPERAND (retval_rhs, 1), | |
1536 | DECL_ARGUMENTS (this_function))) | |
1537 | { | |
1538 | if (tail_recursion_label == 0) | |
1539 | { | |
1540 | tail_recursion_label = gen_label_rtx (); | |
1541 | emit_label_after (tail_recursion_label, | |
1542 | tail_recursion_reentry); | |
1543 | } | |
1544 | expand_goto_internal (0, tail_recursion_label, last_insn); | |
1545 | emit_barrier (); | |
1546 | return; | |
1547 | } | |
1548 | #ifdef HAVE_return | |
1549 | /* This optimization is safe if there are local cleanups | |
1550 | because expand_null_return takes care of them. | |
1551 | ??? I think it should also be safe when there is a cleanup label, | |
1552 | because expand_null_return takes care of them, too. | |
1553 | Any reason why not? */ | |
1554 | if (HAVE_return && cleanup_label == 0 | |
1555 | && ! current_function_returns_pcc_struct) | |
1556 | { | |
1557 | /* If this is return x == y; then generate | |
1558 | if (x == y) return 1; else return 0; | |
1559 | if we can do it with explicit return insns. */ | |
1560 | if (retval_rhs) | |
1561 | switch (TREE_CODE (retval_rhs)) | |
1562 | { | |
1563 | case EQ_EXPR: | |
1564 | case NE_EXPR: | |
1565 | case GT_EXPR: | |
1566 | case GE_EXPR: | |
1567 | case LT_EXPR: | |
1568 | case LE_EXPR: | |
1569 | case TRUTH_ANDIF_EXPR: | |
1570 | case TRUTH_ORIF_EXPR: | |
1571 | case TRUTH_AND_EXPR: | |
1572 | case TRUTH_OR_EXPR: | |
1573 | case TRUTH_NOT_EXPR: | |
1574 | op0 = gen_label_rtx (); | |
1575 | val = DECL_RTL (DECL_RESULT (this_function)); | |
1576 | jumpifnot (retval_rhs, op0); | |
1577 | emit_move_insn (val, const1_rtx); | |
1578 | emit_insn (gen_rtx (USE, VOIDmode, val)); | |
1579 | expand_null_return (); | |
1580 | emit_label (op0); | |
1581 | emit_move_insn (val, const0_rtx); | |
1582 | emit_insn (gen_rtx (USE, VOIDmode, val)); | |
1583 | expand_null_return (); | |
1584 | return; | |
1585 | } | |
1586 | } | |
1587 | #endif /* HAVE_return */ | |
1588 | ||
1589 | if (cleanups | |
1590 | && retval_rhs != 0 | |
1591 | && TREE_TYPE (retval_rhs) != void_type_node | |
1592 | && GET_CODE (DECL_RTL (DECL_RESULT (this_function))) == REG) | |
1593 | { | |
1594 | rtx last_insn; | |
1595 | /* Calculate the return value into a pseudo reg. */ | |
1596 | val = expand_expr (retval_rhs, 0, VOIDmode, 0); | |
1597 | emit_queue (); | |
1598 | /* Put the cleanups here. */ | |
1599 | last_insn = get_last_insn (); | |
1600 | /* Copy the value into hard return reg. */ | |
1601 | emit_move_insn (DECL_RTL (DECL_RESULT (this_function)), val); | |
1602 | val = DECL_RTL (DECL_RESULT (this_function)); | |
1603 | ||
1604 | if (GET_CODE (val) == REG) | |
1605 | emit_insn (gen_rtx (USE, VOIDmode, val)); | |
1606 | expand_null_return_1 (last_insn, cleanups); | |
1607 | } | |
1608 | else | |
1609 | { | |
1610 | /* No cleanups or no hard reg used; | |
1611 | calculate value into hard return reg | |
1612 | and let cleanups come after. */ | |
1613 | val = expand_expr (retval, 0, VOIDmode, 0); | |
1614 | emit_queue (); | |
1615 | ||
1616 | val = DECL_RTL (DECL_RESULT (this_function)); | |
1617 | if (val && GET_CODE (val) == REG) | |
1618 | emit_insn (gen_rtx (USE, VOIDmode, val)); | |
1619 | expand_null_return (); | |
1620 | } | |
1621 | } | |
1622 | ||
1623 | /* Return 1 if the end of the generated RTX is not a barrier. | |
1624 | This means code already compiled can drop through. */ | |
1625 | ||
1626 | int | |
1627 | drop_through_at_end_p () | |
1628 | { | |
1629 | rtx insn = get_last_insn (); | |
1630 | while (insn && GET_CODE (insn) == NOTE) | |
1631 | insn = PREV_INSN (insn); | |
1632 | return insn && GET_CODE (insn) != BARRIER; | |
1633 | } | |
1634 | \f | |
1635 | /* Emit code to alter this function's formal parms for a tail-recursive call. | |
1636 | ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs). | |
1637 | FORMALS is the chain of decls of formals. | |
1638 | Return 1 if this can be done; | |
1639 | otherwise return 0 and do not emit any code. */ | |
1640 | ||
1641 | static int | |
1642 | tail_recursion_args (actuals, formals) | |
1643 | tree actuals, formals; | |
1644 | { | |
1645 | register tree a = actuals, f = formals; | |
1646 | register int i; | |
1647 | register rtx *argvec; | |
1648 | ||
1649 | /* Check that number and types of actuals are compatible | |
1650 | with the formals. This is not always true in valid C code. | |
1651 | Also check that no formal needs to be addressable | |
1652 | and that all formals are scalars. */ | |
1653 | ||
1654 | /* Also count the args. */ | |
1655 | ||
1656 | for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++) | |
1657 | { | |
1658 | if (TREE_TYPE (TREE_VALUE (a)) != TREE_TYPE (f)) | |
1659 | return 0; | |
1660 | if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode) | |
1661 | return 0; | |
1662 | } | |
1663 | if (a != 0 || f != 0) | |
1664 | return 0; | |
1665 | ||
1666 | /* Compute all the actuals. */ | |
1667 | ||
1668 | argvec = (rtx *) alloca (i * sizeof (rtx)); | |
1669 | ||
1670 | for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++) | |
1671 | argvec[i] = expand_expr (TREE_VALUE (a), 0, VOIDmode, 0); | |
1672 | ||
1673 | /* Find which actual values refer to current values of previous formals. | |
1674 | Copy each of them now, before any formal is changed. */ | |
1675 | ||
1676 | for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++) | |
1677 | { | |
1678 | int copy = 0; | |
1679 | register int j; | |
1680 | for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++) | |
1681 | if (reg_mentioned_p (DECL_RTL (f), argvec[i])) | |
1682 | { copy = 1; break; } | |
1683 | if (copy) | |
1684 | argvec[i] = copy_to_reg (argvec[i]); | |
1685 | } | |
1686 | ||
1687 | /* Store the values of the actuals into the formals. */ | |
1688 | ||
1689 | for (f = formals, a = actuals, i = 0; f; | |
1690 | f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++) | |
1691 | { | |
1692 | if (DECL_MODE (f) == GET_MODE (argvec[i])) | |
1693 | emit_move_insn (DECL_RTL (f), argvec[i]); | |
1694 | else | |
1695 | convert_move (DECL_RTL (f), argvec[i], | |
1696 | TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a)))); | |
1697 | } | |
1698 | ||
1699 | return 1; | |
1700 | } | |
1701 | \f | |
1702 | /* Generate the RTL code for entering a binding contour. | |
1703 | The variables are declared one by one, by calls to `expand_decl'. | |
1704 | ||
1705 | EXIT_FLAG is nonzero if this construct should be visible to | |
1706 | `exit_something'. */ | |
1707 | ||
1708 | void | |
1709 | expand_start_bindings (exit_flag) | |
1710 | int exit_flag; | |
1711 | { | |
1712 | struct nesting *thisblock | |
1713 | = (struct nesting *) xmalloc (sizeof (struct nesting)); | |
1714 | ||
1715 | rtx note = emit_note (0, NOTE_INSN_BLOCK_BEG); | |
1716 | ||
1717 | /* Make an entry on block_stack for the block we are entering. */ | |
1718 | ||
1719 | thisblock->next = block_stack; | |
1720 | thisblock->all = nesting_stack; | |
1721 | thisblock->depth = ++nesting_depth; | |
1722 | thisblock->data.block.stack_level = 0; | |
1723 | thisblock->data.block.cleanups = 0; | |
1724 | /* We build this even if the cleanups lists are empty | |
1725 | because we rely on having an element in the chain | |
1726 | for each block that is pending. */ | |
1727 | thisblock->data.block.outer_cleanups | |
1728 | = (block_stack | |
1729 | ? tree_cons (NULL_TREE, block_stack->data.block.cleanups, | |
1730 | block_stack->data.block.outer_cleanups) | |
1731 | : 0); | |
1732 | thisblock->data.block.label_chain = 0; | |
1733 | thisblock->data.block.innermost_stack_block = stack_block_stack; | |
1734 | thisblock->data.block.first_insn = note; | |
1735 | thisblock->data.block.block_start_count = ++block_start_count; | |
1736 | thisblock->exit_label = exit_flag ? gen_label_rtx () : 0; | |
1737 | ||
1738 | block_stack = thisblock; | |
1739 | nesting_stack = thisblock; | |
1740 | } | |
1741 | ||
1742 | /* Output a USE for any register use in RTL. | |
1743 | This is used with -noreg to mark the extent of lifespan | |
1744 | of any registers used in a user-visible variable's DECL_RTL. */ | |
1745 | ||
1746 | void | |
1747 | use_variable (rtl) | |
1748 | rtx rtl; | |
1749 | { | |
1750 | if (GET_CODE (rtl) == REG) | |
1751 | /* This is a register variable. */ | |
1752 | emit_insn (gen_rtx (USE, VOIDmode, rtl)); | |
1753 | else if (GET_CODE (rtl) == MEM | |
1754 | && GET_CODE (XEXP (rtl, 0)) == REG | |
1755 | && XEXP (rtl, 0) != frame_pointer_rtx | |
1756 | && XEXP (rtl, 0) != arg_pointer_rtx) | |
1757 | /* This is a variable-sized structure. */ | |
1758 | emit_insn (gen_rtx (USE, VOIDmode, XEXP (rtl, 0))); | |
1759 | } | |
1760 | ||
1761 | /* Like use_variable except that it outputs the USEs after INSN | |
1762 | instead of at the end of the insn-chain. */ | |
1763 | ||
1764 | static void | |
1765 | use_variable_after (rtl, insn) | |
1766 | rtx rtl, insn; | |
1767 | { | |
1768 | if (GET_CODE (rtl) == REG) | |
1769 | /* This is a register variable. */ | |
1770 | emit_insn_after (gen_rtx (USE, VOIDmode, rtl), insn); | |
1771 | else if (GET_CODE (rtl) == MEM | |
1772 | && GET_CODE (XEXP (rtl, 0)) == REG | |
1773 | && XEXP (rtl, 0) != frame_pointer_rtx | |
1774 | && XEXP (rtl, 0) != arg_pointer_rtx) | |
1775 | /* This is a variable-sized structure. */ | |
1776 | emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (rtl, 0)), insn); | |
1777 | } | |
1778 | ||
1779 | /* Generate RTL code to terminate a binding contour. | |
1780 | VARS is the chain of VAR_DECL nodes | |
1781 | for the variables bound in this contour. | |
1782 | MARK_ENDS is nonzero if we should put a note at the beginning | |
1783 | and end of this binding contour. | |
1784 | ||
1785 | DONT_JUMP_IN is nonzero if it is not valid to jump into this contour. | |
1786 | (That is true automatically if the contour has a saved stack level.) */ | |
1787 | ||
1788 | void | |
1789 | expand_end_bindings (vars, mark_ends, dont_jump_in) | |
1790 | tree vars; | |
1791 | int mark_ends; | |
1792 | int dont_jump_in; | |
1793 | { | |
1794 | register struct nesting *thisblock = block_stack; | |
1795 | register tree decl; | |
1796 | ||
1797 | if (warn_unused) | |
1798 | for (decl = vars; decl; decl = TREE_CHAIN (decl)) | |
1799 | if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL) | |
1800 | warning_with_decl (decl, "unused variable `%s'"); | |
1801 | ||
1802 | /* Mark the beginning and end of the scope if requested. */ | |
1803 | ||
1804 | if (mark_ends) | |
1805 | emit_note (0, NOTE_INSN_BLOCK_END); | |
1806 | else | |
1807 | /* Get rid of the beginning-mark if we don't make an end-mark. */ | |
1808 | NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED; | |
1809 | ||
1810 | if (thisblock->exit_label) | |
1811 | { | |
1812 | do_pending_stack_adjust (); | |
1813 | emit_label (thisblock->exit_label); | |
1814 | } | |
1815 | ||
1816 | if (dont_jump_in | |
1817 | || thisblock->data.block.stack_level != 0 | |
1818 | || thisblock->data.block.cleanups != 0) | |
1819 | { | |
1820 | struct label_chain *chain; | |
1821 | ||
1822 | /* Any labels in this block are no longer valid to go to. | |
1823 | Mark them to cause an error message. */ | |
1824 | for (chain = thisblock->data.block.label_chain; chain; chain = chain->next) | |
1825 | { | |
1826 | TREE_PACKED (chain->label) = 1; | |
1827 | /* If any goto without a fixup came to this label, | |
1828 | that must be an error, because gotos without fixups | |
1829 | come from outside all saved stack-levels and all cleanups. */ | |
1830 | if (TREE_ADDRESSABLE (chain->label)) | |
1831 | error_with_decl (chain->label, | |
1832 | "label `%s' used before containing binding contour"); | |
1833 | } | |
1834 | } | |
1835 | ||
1836 | /* Restore stack level in effect before the block | |
1837 | (only if variable-size objects allocated). */ | |
1838 | ||
1839 | if (thisblock->data.block.stack_level != 0 | |
1840 | || thisblock->data.block.cleanups != 0) | |
1841 | { | |
1842 | /* Perform any cleanups associated with the block. */ | |
1843 | ||
1844 | expand_cleanups (thisblock->data.block.cleanups, 0); | |
1845 | ||
1846 | /* Restore the stack level. */ | |
1847 | ||
1848 | if (thisblock->data.block.stack_level != 0) | |
1849 | { | |
1850 | do_pending_stack_adjust (); | |
1851 | emit_move_insn (stack_pointer_rtx, | |
1852 | thisblock->data.block.stack_level); | |
1853 | } | |
1854 | ||
1855 | /* Any gotos out of this block must also do these things. | |
1856 | Also report any gotos with fixups that came to labels in this level. */ | |
1857 | fixup_gotos (thisblock, | |
1858 | thisblock->data.block.stack_level, | |
1859 | thisblock->data.block.cleanups, | |
1860 | thisblock->data.block.first_insn, | |
1861 | dont_jump_in); | |
1862 | } | |
1863 | ||
1864 | /* If doing stupid register allocation, make sure lives of all | |
1865 | register variables declared here extend thru end of scope. */ | |
1866 | ||
1867 | if (obey_regdecls) | |
1868 | for (decl = vars; decl; decl = TREE_CHAIN (decl)) | |
1869 | { | |
1870 | rtx rtl = DECL_RTL (decl); | |
1871 | if (TREE_CODE (decl) == VAR_DECL && rtl != 0) | |
1872 | use_variable (rtl); | |
1873 | } | |
1874 | ||
1875 | /* Restore block_stack level for containing block. */ | |
1876 | ||
1877 | stack_block_stack = thisblock->data.block.innermost_stack_block; | |
1878 | POPSTACK (block_stack); | |
1879 | } | |
1880 | \f | |
1881 | /* Generate RTL for the automatic variable declaration DECL. | |
1882 | (Other kinds of declarations are simply ignored if seen here.) | |
1883 | CLEANUP is an expression to be executed at exit from this binding contour; | |
1884 | for example, in C++, it might call the destructor for this variable. | |
1885 | ||
1886 | If CLEANUP contains any SAVE_EXPRs, then you must preevaluate them | |
1887 | either before or after calling `expand_decl' but before compiling | |
1888 | any subsequent expressions. This is because CLEANUP may be expanded | |
1889 | more than once, on different branches of execution. | |
1890 | For the same reason, CLEANUP may not contain a CALL_EXPR | |
1891 | except as its topmost node--else `preexpand_calls' would get confused. | |
1892 | ||
1893 | If CLEANUP is nonzero and DECL is zero, we record a cleanup | |
1894 | that is not associated with any particular variable. | |
1895 | ||
1896 | There is no special support here for C++ constructors. | |
1897 | They should be handled by the proper code in DECL_INITIAL. */ | |
1898 | ||
1899 | void | |
1900 | expand_decl (decl, cleanup) | |
1901 | register tree decl; | |
1902 | tree cleanup; | |
1903 | { | |
1904 | struct nesting *thisblock = block_stack; | |
1905 | tree type; | |
1906 | ||
1907 | /* Record the cleanup if there is one. */ | |
1908 | ||
1909 | if (cleanup != 0) | |
1910 | { | |
1911 | thisblock->data.block.cleanups | |
1912 | = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups); | |
1913 | /* If this block has a cleanup, it belongs in stack_block_stack. */ | |
1914 | stack_block_stack = thisblock; | |
1915 | } | |
1916 | ||
1917 | if (decl == NULL_TREE) | |
1918 | { | |
1919 | /* This was a cleanup with no variable. */ | |
1920 | if (cleanup == 0) | |
1921 | abort (); | |
1922 | return; | |
1923 | } | |
1924 | ||
1925 | type = TREE_TYPE (decl); | |
1926 | ||
1927 | /* Aside from that, only automatic variables need any expansion done. | |
1928 | Static and external variables, and external functions, | |
1929 | will be handled by `assemble_variable' (called from finish_decl). | |
1930 | TYPE_DECL and CONST_DECL require nothing. | |
1931 | PARM_DECLs are handled in `assign_parms'. */ | |
1932 | ||
1933 | if (TREE_CODE (decl) != VAR_DECL) | |
1934 | return; | |
1935 | if (TREE_STATIC (decl) || TREE_EXTERNAL (decl)) | |
1936 | return; | |
1937 | ||
1938 | /* Create the RTL representation for the variable. */ | |
1939 | ||
1940 | if (type == error_mark_node) | |
1941 | DECL_RTL (decl) = gen_rtx (MEM, BLKmode, const0_rtx); | |
1942 | else if (DECL_SIZE (decl) == 0) | |
1943 | /* Variable with incomplete type. */ | |
1944 | { | |
1945 | if (DECL_INITIAL (decl) == 0) | |
1946 | /* Error message was already done; now avoid a crash. */ | |
1947 | DECL_RTL (decl) = assign_stack_local (DECL_MODE (decl), 0); | |
1948 | else | |
1949 | /* An initializer is going to decide the size of this array. | |
1950 | Until we know the size, represent its address with a reg. */ | |
1951 | DECL_RTL (decl) = gen_rtx (MEM, BLKmode, gen_reg_rtx (Pmode)); | |
1952 | } | |
1953 | else if (DECL_MODE (decl) != BLKmode | |
1954 | /* If -ffloat-store, don't put explicit float vars | |
1955 | into regs. */ | |
1956 | && !(flag_float_store | |
1957 | && TREE_CODE (type) == REAL_TYPE) | |
1958 | && ! TREE_VOLATILE (decl) | |
1959 | && ! TREE_ADDRESSABLE (decl) | |
1960 | && (TREE_REGDECL (decl) || ! obey_regdecls)) | |
1961 | { | |
1962 | /* Automatic variable that can go in a register. */ | |
1963 | DECL_RTL (decl) = gen_reg_rtx (DECL_MODE (decl)); | |
1964 | if (TREE_CODE (type) == POINTER_TYPE) | |
1965 | mark_reg_pointer (DECL_RTL (decl)); | |
1966 | REG_USERVAR_P (DECL_RTL (decl)) = 1; | |
1967 | } | |
1968 | else if (TREE_LITERAL (DECL_SIZE (decl))) | |
1969 | { | |
1970 | rtx oldaddr = 0; | |
1971 | rtx addr; | |
1972 | ||
1973 | /* If we previously made RTL for this decl, it must be an array | |
1974 | whose size was determined by the initializer. | |
1975 | The old address was a register; set that register now | |
1976 | to the proper address. */ | |
1977 | if (DECL_RTL (decl) != 0) | |
1978 | { | |
1979 | if (GET_CODE (DECL_RTL (decl)) != MEM | |
1980 | || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG) | |
1981 | abort (); | |
1982 | oldaddr = XEXP (DECL_RTL (decl), 0); | |
1983 | } | |
1984 | ||
1985 | /* Variable of fixed size that goes on the stack. */ | |
1986 | DECL_RTL (decl) | |
1987 | = assign_stack_local (DECL_MODE (decl), | |
1988 | (TREE_INT_CST_LOW (DECL_SIZE (decl)) | |
1989 | * DECL_SIZE_UNIT (decl) | |
1990 | + BITS_PER_UNIT - 1) | |
1991 | / BITS_PER_UNIT); | |
1992 | if (oldaddr) | |
1993 | { | |
1994 | addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr); | |
1995 | emit_move_insn (oldaddr, addr); | |
1996 | } | |
1997 | ||
1998 | /* If this is a memory ref that contains aggregate components, | |
1999 | mark it as such for cse and loop optimize. */ | |
2000 | MEM_IN_STRUCT_P (DECL_RTL (decl)) | |
2001 | = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE | |
2002 | || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE | |
2003 | || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE); | |
2004 | #if 0 | |
2005 | /* If this is in memory because of -ffloat-store, | |
2006 | set the volatile bit, to prevent optimizations from | |
2007 | undoing the effects. */ | |
2008 | if (flag_float_store && TREE_CODE (type) == REAL_TYPE) | |
2009 | MEM_VOLATILE_P (DECL_RTL (decl)) = 1; | |
2010 | #endif | |
2011 | } | |
2012 | else | |
2013 | /* Dynamic-size object: must push space on the stack. */ | |
2014 | { | |
2015 | rtx address, size; | |
2016 | ||
2017 | frame_pointer_needed = 1; | |
2018 | ||
2019 | /* Record the stack pointer on entry to block, if have | |
2020 | not already done so. */ | |
2021 | if (thisblock->data.block.stack_level == 0) | |
2022 | { | |
2023 | do_pending_stack_adjust (); | |
2024 | thisblock->data.block.stack_level | |
2025 | = copy_to_reg (stack_pointer_rtx); | |
2026 | stack_block_stack = thisblock; | |
2027 | } | |
2028 | ||
2029 | /* Compute the variable's size, in bytes. */ | |
2030 | size = expand_expr (convert_units (DECL_SIZE (decl), | |
2031 | DECL_SIZE_UNIT (decl), | |
2032 | BITS_PER_UNIT), | |
2033 | 0, VOIDmode, 0); | |
2034 | ||
2035 | /* Round it up to this machine's required stack boundary. */ | |
2036 | #ifdef STACK_BOUNDARY | |
2037 | /* Avoid extra code if we can prove it's a multiple already. */ | |
2038 | if (DECL_SIZE_UNIT (decl) % STACK_BOUNDARY) | |
2039 | { | |
2040 | #ifdef STACK_POINTER_OFFSET | |
2041 | /* Avoid extra code if we can prove that adding STACK_POINTER_OFFSET | |
2042 | will not give this address invalid alignment. */ | |
2043 | if (DECL_ALIGN (decl) > ((STACK_POINTER_OFFSET * BITS_PER_UNIT) % STACK_BOUNDARY)) | |
2044 | size = plus_constant (size, | |
2045 | STACK_POINTER_OFFSET % (STACK_BOUNDARY / BITS_PER_UNIT)); | |
2046 | #endif | |
2047 | size = round_push (size); | |
2048 | } | |
2049 | #endif /* STACK_BOUNDARY */ | |
2050 | ||
2051 | /* Make space on the stack, and get an rtx for the address of it. */ | |
2052 | #ifdef STACK_GROWS_DOWNWARD | |
2053 | anti_adjust_stack (size); | |
2054 | #endif | |
2055 | address = copy_to_reg (stack_pointer_rtx); | |
2056 | #ifdef STACK_POINTER_OFFSET | |
2057 | { | |
2058 | /* If the contents of the stack pointer reg are offset from the | |
2059 | actual top-of-stack address, add the offset here. */ | |
2060 | rtx sp_offset = gen_rtx (CONST_INT, VOIDmode, STACK_POINTER_OFFSET); | |
2061 | #ifdef STACK_BOUNDARY | |
2062 | #ifdef STACK_GROWS_DOWNWARD | |
2063 | int direction = 1; | |
2064 | #else /* not STACK_GROWS_DOWNWARD */ | |
2065 | int direction = 0; | |
2066 | #endif /* not STACK_GROWS_DOWNWARD */ | |
2067 | if (DECL_ALIGN (decl) > ((STACK_POINTER_OFFSET * BITS_PER_UNIT) % STACK_BOUNDARY)) | |
2068 | sp_offset = plus_constant (sp_offset, | |
2069 | (STACK_POINTER_OFFSET | |
2070 | % (STACK_BOUNDARY / BITS_PER_UNIT) | |
2071 | * direction)); | |
2072 | #endif /* STACK_BOUNDARY */ | |
2073 | emit_insn (gen_add2_insn (address, sp_offset)); | |
2074 | } | |
2075 | #endif /* STACK_POINTER_OFFSET */ | |
2076 | #ifndef STACK_GROWS_DOWNWARD | |
2077 | anti_adjust_stack (size); | |
2078 | #endif | |
2079 | ||
2080 | /* Some systems require a particular insn to refer to the stack | |
2081 | to make the pages exist. */ | |
2082 | #ifdef HAVE_probe | |
2083 | if (HAVE_probe) | |
2084 | emit_insn (gen_probe ()); | |
2085 | #endif | |
2086 | ||
2087 | /* Reference the variable indirect through that rtx. */ | |
2088 | DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address); | |
2089 | } | |
2090 | ||
2091 | if (TREE_VOLATILE (decl)) | |
2092 | MEM_VOLATILE_P (DECL_RTL (decl)) = 1; | |
2093 | if (TREE_READONLY (decl)) | |
2094 | RTX_UNCHANGING_P (DECL_RTL (decl)) = 1; | |
2095 | ||
2096 | /* If doing stupid register allocation, make sure life of any | |
2097 | register variable starts here, at the start of its scope. */ | |
2098 | ||
2099 | if (obey_regdecls) | |
2100 | use_variable (DECL_RTL (decl)); | |
2101 | } | |
2102 | \f | |
2103 | /* Emit code to perform the initialization of a declaration DECL. */ | |
2104 | ||
2105 | void | |
2106 | expand_decl_init (decl) | |
2107 | tree decl; | |
2108 | { | |
2109 | if (TREE_STATIC (decl)) | |
2110 | return; | |
2111 | ||
2112 | /* Compute and store the initial value now. */ | |
2113 | ||
2114 | if (DECL_INITIAL (decl) == error_mark_node) | |
2115 | { | |
2116 | enum tree_code code = TREE_CODE (TREE_TYPE (decl)); | |
2117 | if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE | |
2118 | || code == POINTER_TYPE) | |
2119 | expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node), | |
2120 | 0, 0); | |
2121 | emit_queue (); | |
2122 | } | |
2123 | else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST) | |
2124 | { | |
2125 | emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl)); | |
2126 | expand_assignment (decl, DECL_INITIAL (decl), 0, 0); | |
2127 | emit_queue (); | |
2128 | } | |
2129 | } | |
2130 | ||
2131 | /* DECL is an anonymous union. CLEANUP is a cleanup for DECL. | |
2132 | DECL_ELTS is the list of elements that belong to DECL's type. | |
2133 | In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */ | |
2134 | ||
2135 | void | |
2136 | expand_anon_union_decl (decl, cleanup, decl_elts) | |
2137 | tree decl, cleanup, decl_elts; | |
2138 | { | |
2139 | struct nesting *thisblock = block_stack; | |
2140 | rtx x; | |
2141 | ||
2142 | expand_decl (decl, cleanup); | |
2143 | x = DECL_RTL (decl); | |
2144 | ||
2145 | while (decl_elts) | |
2146 | { | |
2147 | tree decl_elt = TREE_VALUE (decl_elts); | |
2148 | tree cleanup_elt = TREE_PURPOSE (decl_elts); | |
2149 | ||
2150 | DECL_RTL (decl_elt) | |
2151 | = (GET_MODE (x) != BLKmode | |
2152 | /* | |
2153 | #error broken | |
2154 | /* ??? This is incorrect if X is a MEM. | |
2155 | (SUBREG (MEM)) is not allowed at rtl generation time. */ | |
2156 | ? gen_rtx (SUBREG, TYPE_MODE (TREE_TYPE (decl_elt)), x, 0) | |
2157 | : x); | |
2158 | ||
2159 | /* Record the cleanup if there is one. */ | |
2160 | ||
2161 | if (cleanup != 0) | |
2162 | thisblock->data.block.cleanups | |
2163 | = temp_tree_cons (decl_elt, cleanup_elt, | |
2164 | thisblock->data.block.cleanups); | |
2165 | ||
2166 | decl_elts = TREE_CHAIN (decl_elts); | |
2167 | } | |
2168 | } | |
2169 | \f | |
2170 | /* Expand a list of cleanups LIST. | |
2171 | Elements may be expressions or may be nested lists. | |
2172 | ||
2173 | If DONT_DO is nonnull, then any list-element | |
2174 | whose TREE_PURPOSE matches DONT_DO is omitted. | |
2175 | This is sometimes used to avoid a cleanup associated with | |
2176 | a value that is being returned out of the scope. */ | |
2177 | ||
2178 | static void | |
2179 | expand_cleanups (list, dont_do) | |
2180 | tree list; | |
2181 | tree dont_do; | |
2182 | { | |
2183 | tree tail; | |
2184 | for (tail = list; tail; tail = TREE_CHAIN (tail)) | |
2185 | if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do) | |
2186 | { | |
2187 | if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST) | |
2188 | expand_cleanups (TREE_VALUE (tail), dont_do); | |
2189 | else | |
2190 | expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0); | |
2191 | } | |
2192 | } | |
2193 | ||
2194 | /* Expand a list of cleanups for a goto fixup. | |
2195 | The expansion is put into the insn chain after the insn *BEFORE_JUMP | |
2196 | and *BEFORE_JUMP is set to the insn that now comes before the jump. */ | |
2197 | ||
2198 | static void | |
2199 | fixup_cleanups (list, before_jump) | |
2200 | tree list; | |
2201 | rtx *before_jump; | |
2202 | { | |
2203 | rtx beyond_jump = get_last_insn (); | |
2204 | rtx new_before_jump; | |
2205 | ||
2206 | expand_cleanups (list, 0); | |
2207 | new_before_jump = get_last_insn (); | |
2208 | ||
2209 | reorder_insns (NEXT_INSN (beyond_jump), new_before_jump, *before_jump); | |
2210 | *before_jump = new_before_jump; | |
2211 | } | |
2212 | ||
2213 | /* Move all cleanups from the current block_stack | |
2214 | to the containing block_stack, where they are assumed to | |
2215 | have been created. If anything can cause a temporary to | |
2216 | be created, but not expanded for more than one level of | |
2217 | block_stacks, then this code will have to change. */ | |
2218 | ||
2219 | void | |
2220 | move_cleanups_up () | |
2221 | { | |
2222 | struct nesting *block = block_stack; | |
2223 | struct nesting *outer = block->next; | |
2224 | ||
2225 | outer->data.block.cleanups | |
2226 | = chainon (block->data.block.cleanups, | |
2227 | outer->data.block.cleanups); | |
2228 | block->data.block.cleanups = 0; | |
2229 | } | |
2230 | ||
2231 | int | |
2232 | this_contour_has_cleanups_p () | |
2233 | { | |
2234 | return block_stack && block_stack->data.block.cleanups != 0; | |
2235 | } | |
2236 | \f | |
2237 | /* Enter a case (Pascal) or switch (C) statement. | |
2238 | Push a block onto case_stack and nesting_stack | |
2239 | to accumulate the case-labels that are seen | |
2240 | and to record the labels generated for the statement. | |
2241 | ||
2242 | EXIT_FLAG is nonzero if `exit_something' should exit this case stmt. | |
2243 | Otherwise, this construct is transparent for `exit_something'. | |
2244 | ||
2245 | EXPR is the index-expression to be dispatched on. | |
2246 | TYPE is its nominal type. We could simply convert EXPR to this type, | |
2247 | but instead we take short cuts. */ | |
2248 | ||
2249 | void | |
2250 | expand_start_case (exit_flag, expr, type) | |
2251 | int exit_flag; | |
2252 | tree expr; | |
2253 | tree type; | |
2254 | { | |
2255 | register struct nesting *thiscase | |
2256 | = (struct nesting *) xmalloc (sizeof (struct nesting)); | |
2257 | ||
2258 | /* Make an entry on case_stack for the case we are entering. */ | |
2259 | ||
2260 | thiscase->next = case_stack; | |
2261 | thiscase->all = nesting_stack; | |
2262 | thiscase->depth = ++nesting_depth; | |
2263 | thiscase->exit_label = exit_flag ? gen_label_rtx () : 0; | |
2264 | thiscase->data.case_stmt.case_list = 0; | |
2265 | thiscase->data.case_stmt.index_expr = expr; | |
2266 | thiscase->data.case_stmt.nominal_type = type; | |
2267 | thiscase->data.case_stmt.default_label = 0; | |
2268 | thiscase->data.case_stmt.num_ranges = 0; | |
2269 | case_stack = thiscase; | |
2270 | nesting_stack = thiscase; | |
2271 | ||
2272 | do_pending_stack_adjust (); | |
2273 | ||
2274 | /* Make sure case_stmt.start points to something that won't | |
2275 | need any transformation before expand_end_case. */ | |
2276 | emit_note (0, NOTE_INSN_DELETED); | |
2277 | ||
2278 | thiscase->data.case_stmt.start = get_last_insn (); | |
2279 | } | |
2280 | ||
2281 | /* Start a "dummy case statement" within which case labels are invalid | |
2282 | and are not connected to any larger real case statement. | |
2283 | This can be used if you don't want to let a case statement jump | |
2284 | into the middle of certain kinds of constructs. */ | |
2285 | ||
2286 | void | |
2287 | expand_start_case_dummy () | |
2288 | { | |
2289 | register struct nesting *thiscase | |
2290 | = (struct nesting *) xmalloc (sizeof (struct nesting)); | |
2291 | ||
2292 | /* Make an entry on case_stack for the dummy. */ | |
2293 | ||
2294 | thiscase->next = case_stack; | |
2295 | thiscase->all = nesting_stack; | |
2296 | thiscase->depth = ++nesting_depth; | |
2297 | thiscase->exit_label = 0; | |
2298 | thiscase->data.case_stmt.case_list = 0; | |
2299 | thiscase->data.case_stmt.start = 0; | |
2300 | thiscase->data.case_stmt.nominal_type = 0; | |
2301 | thiscase->data.case_stmt.default_label = 0; | |
2302 | thiscase->data.case_stmt.num_ranges = 0; | |
2303 | case_stack = thiscase; | |
2304 | nesting_stack = thiscase; | |
2305 | } | |
2306 | ||
2307 | /* End a dummy case statement. */ | |
2308 | ||
2309 | void | |
2310 | expand_end_case_dummy () | |
2311 | { | |
2312 | POPSTACK (case_stack); | |
2313 | } | |
2314 | \f | |
2315 | /* Accumulate one case or default label inside a case or switch statement. | |
2316 | VALUE is the value of the case (a null pointer, for a default label). | |
2317 | ||
2318 | If not currently inside a case or switch statement, return 1 and do | |
2319 | nothing. The caller will print a language-specific error message. | |
2320 | If VALUE is a duplicate or overlaps, return 2 and do nothing. | |
2321 | If VALUE is out of range, return 3 and do nothing. | |
2322 | Return 0 on success. | |
2323 | ||
2324 | Extended to handle range statements, should they ever | |
2325 | be adopted. */ | |
2326 | ||
2327 | int | |
2328 | pushcase (value, label) | |
2329 | register tree value; | |
2330 | register tree label; | |
2331 | { | |
2332 | register struct case_node **l; | |
2333 | register struct case_node *n; | |
2334 | tree index_type; | |
2335 | tree nominal_type; | |
2336 | ||
2337 | /* Fail if not inside a real case statement. */ | |
2338 | if (! (case_stack && case_stack->data.case_stmt.start)) | |
2339 | return 1; | |
2340 | ||
2341 | index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr); | |
2342 | nominal_type = case_stack->data.case_stmt.nominal_type; | |
2343 | ||
2344 | /* If the index is erroneous, avoid more problems: pretend to succeed. */ | |
2345 | if (index_type == error_mark_node) | |
2346 | return 0; | |
2347 | ||
2348 | /* Convert VALUE to the type in which the comparisons are nominally done. */ | |
2349 | if (value != 0) | |
2350 | value = convert (nominal_type, value); | |
2351 | ||
2352 | /* Fail if this value is out of range for the actual type of the index | |
2353 | (which may be narrower than NOMINAL_TYPE). */ | |
2354 | if (value != 0 && ! int_fits_type_p (value, index_type)) | |
2355 | return 3; | |
2356 | ||
2357 | /* Fail if this is a duplicate or overlaps another entry. */ | |
2358 | if (value == 0) | |
2359 | { | |
2360 | if (case_stack->data.case_stmt.default_label != 0) | |
2361 | return 2; | |
2362 | case_stack->data.case_stmt.default_label = label; | |
2363 | } | |
2364 | else | |
2365 | { | |
2366 | /* Find the elt in the chain before which to insert the new value, | |
2367 | to keep the chain sorted in increasing order. | |
2368 | But report an error if this element is a duplicate. */ | |
2369 | for (l = &case_stack->data.case_stmt.case_list; | |
2370 | /* Keep going past elements distinctly less than VALUE. */ | |
2371 | *l != 0 && tree_int_cst_lt ((*l)->high, value); | |
2372 | l = &(*l)->right) | |
2373 | ; | |
2374 | if (*l) | |
2375 | { | |
2376 | /* Element we will insert before must be distinctly greater; | |
2377 | overlap means error. */ | |
2378 | if (! tree_int_cst_lt (value, (*l)->low)) | |
2379 | return 2; | |
2380 | } | |
2381 | ||
2382 | /* Add this label to the chain, and succeed. | |
2383 | Copy VALUE so it is on temporary rather than momentary | |
2384 | obstack and will thus survive till the end of the case statement. */ | |
2385 | n = (struct case_node *) oballoc (sizeof (struct case_node)); | |
2386 | n->left = 0; | |
2387 | n->right = *l; | |
2388 | n->high = n->low = copy_node (value); | |
2389 | n->code_label = label; | |
2390 | n->test_label = 0; | |
2391 | *l = n; | |
2392 | } | |
2393 | ||
2394 | expand_label (label); | |
2395 | return 0; | |
2396 | } | |
2397 | ||
2398 | /* Like pushcase but this case applies to all values | |
2399 | between VALUE1 and VALUE2 (inclusive). | |
2400 | The return value is the same as that of pushcase | |
2401 | but there is one additional error code: | |
2402 | 4 means the specified range was empty. | |
2403 | ||
2404 | Note that this does not currently work, since expand_end_case | |
2405 | has yet to be extended to handle RANGE_EXPRs. */ | |
2406 | ||
2407 | int | |
2408 | pushcase_range (value1, value2, label) | |
2409 | register tree value1, value2; | |
2410 | register tree label; | |
2411 | { | |
2412 | register struct case_node **l; | |
2413 | register struct case_node *n; | |
2414 | tree index_type; | |
2415 | tree nominal_type; | |
2416 | ||
2417 | /* Fail if not inside a real case statement. */ | |
2418 | if (! (case_stack && case_stack->data.case_stmt.start)) | |
2419 | return 1; | |
2420 | ||
2421 | index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr); | |
2422 | nominal_type = case_stack->data.case_stmt.nominal_type; | |
2423 | ||
2424 | /* If the index is erroneous, avoid more problems: pretend to succeed. */ | |
2425 | if (index_type == error_mark_node) | |
2426 | return 0; | |
2427 | ||
2428 | /* Convert VALUEs to type in which the comparisons are nominally done. */ | |
2429 | if (value1 != 0) | |
2430 | value1 = convert (nominal_type, value1); | |
2431 | if (value2 != 0) | |
2432 | value2 = convert (nominal_type, value2); | |
2433 | ||
2434 | /* Fail if these values are out of range. */ | |
2435 | if (value1 != 0 && ! int_fits_type_p (value1, index_type)) | |
2436 | return 3; | |
2437 | ||
2438 | if (value2 != 0 && ! int_fits_type_p (value2, index_type)) | |
2439 | return 3; | |
2440 | ||
2441 | /* Fail if the range is empty. */ | |
2442 | if (tree_int_cst_lt (value2, value1)) | |
2443 | return 4; | |
2444 | ||
2445 | /* If the bounds are equal, turn this into the one-value case. */ | |
2446 | if (tree_int_cst_equal (value1, value2)) | |
2447 | return pushcase (value1, label); | |
2448 | ||
2449 | /* Find the elt in the chain before which to insert the new value, | |
2450 | to keep the chain sorted in increasing order. | |
2451 | But report an error if this element is a duplicate. */ | |
2452 | for (l = &case_stack->data.case_stmt.case_list; | |
2453 | /* Keep going past elements distinctly less than this range. */ | |
2454 | *l != 0 && tree_int_cst_lt ((*l)->high, value1); | |
2455 | l = &(*l)->right) | |
2456 | ; | |
2457 | if (*l) | |
2458 | { | |
2459 | /* Element we will insert before must be distinctly greater; | |
2460 | overlap means error. */ | |
2461 | if (! tree_int_cst_lt (value2, (*l)->low)) | |
2462 | return 2; | |
2463 | } | |
2464 | ||
2465 | /* Add this label to the chain, and succeed. | |
2466 | Copy VALUE1, VALUE2 so they are on temporary rather than momentary | |
2467 | obstack and will thus survive till the end of the case statement. */ | |
2468 | ||
2469 | n = (struct case_node *) oballoc (sizeof (struct case_node)); | |
2470 | n->left = 0; | |
2471 | n->right = *l; | |
2472 | n->low = copy_node (value1); | |
2473 | n->high = copy_node (value2); | |
2474 | n->code_label = label; | |
2475 | n->test_label = 0; | |
2476 | *l = n; | |
2477 | ||
2478 | expand_label (label); | |
2479 | ||
2480 | case_stack->data.case_stmt.num_ranges++; | |
2481 | ||
2482 | return 0; | |
2483 | } | |
2484 | \f | |
2485 | /* Check that all enumeration literals are covered by the case | |
2486 | expressions of a switch. Also, warn if there are any extra | |
2487 | switch cases that are *not* elements of the enumerated type. */ | |
2488 | ||
2489 | static void | |
2490 | check_for_full_enumeration_handling (type) | |
2491 | tree type; | |
2492 | { | |
2493 | register struct case_node *n; | |
2494 | register tree chain; | |
2495 | ||
2496 | /* The time complexity of this loop is currently O(N * M), with | |
2497 | N being the number of enumerals in the enumerated type, and | |
2498 | M being the number of case expressions in the switch. */ | |
2499 | ||
2500 | for (chain = TYPE_VALUES (type); | |
2501 | chain; | |
2502 | chain = TREE_CHAIN (chain)) | |
2503 | { | |
2504 | /* Find a match between enumeral and case expression, if possible. | |
2505 | Quit looking when we've gone too far (since case expressions | |
2506 | are kept sorted in ascending order). Warn about enumerals not | |
2507 | handled in the switch statement case expression list. */ | |
2508 | ||
2509 | for (n = case_stack->data.case_stmt.case_list; | |
2510 | n && tree_int_cst_lt (n->high, TREE_VALUE (chain)); | |
2511 | n = n->right) | |
2512 | ; | |
2513 | ||
2514 | if (!(n && tree_int_cst_equal (n->low, TREE_VALUE (chain)))) | |
2515 | warning ("enumerated value `%s' not handled in switch", | |
2516 | IDENTIFIER_POINTER (TREE_PURPOSE (chain))); | |
2517 | } | |
2518 | ||
2519 | /* Now we go the other way around; we warn if there are case | |
2520 | expressions that don't correspond to enumerals. This can | |
2521 | occur since C and C++ don't enforce type-checking of | |
2522 | assignments to enumeration variables. */ | |
2523 | ||
2524 | for (n = case_stack->data.case_stmt.case_list; n; n = n->right) | |
2525 | { | |
2526 | for (chain = TYPE_VALUES (type); | |
2527 | chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain)); | |
2528 | chain = TREE_CHAIN (chain)) | |
2529 | ; | |
2530 | ||
2531 | if (!chain) | |
2532 | warning ("case value `%d' not in enumerated type `%s'", | |
2533 | TREE_INT_CST_LOW (n->low), | |
2534 | IDENTIFIER_POINTER (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE | |
2535 | ? TYPE_NAME (type) | |
2536 | : DECL_NAME (TYPE_NAME (type)))); | |
2537 | } | |
2538 | } | |
2539 | \f | |
2540 | /* Terminate a case (Pascal) or switch (C) statement | |
2541 | in which CASE_INDEX is the expression to be tested. | |
2542 | Generate the code to test it and jump to the right place. */ | |
2543 | ||
2544 | void | |
2545 | expand_end_case (orig_index) | |
2546 | tree orig_index; | |
2547 | { | |
2548 | tree minval, maxval, range; | |
2549 | rtx default_label = 0; | |
2550 | register struct case_node *n; | |
2551 | int count; | |
2552 | rtx index; | |
2553 | rtx table_label = gen_label_rtx (); | |
2554 | int ncases; | |
2555 | rtx *labelvec; | |
2556 | register int i; | |
2557 | rtx before_case; | |
2558 | register struct nesting *thiscase = case_stack; | |
2559 | tree index_expr = thiscase->data.case_stmt.index_expr; | |
2560 | int unsignedp = TREE_UNSIGNED (TREE_TYPE (index_expr)); | |
2561 | ||
2562 | do_pending_stack_adjust (); | |
2563 | ||
2564 | /* An ERROR_MARK occurs for various reasons including invalid data type. */ | |
2565 | if (TREE_TYPE (index_expr) != error_mark_node) | |
2566 | { | |
2567 | /* If switch expression was an enumerated type, check that all | |
2568 | enumeration literals are covered by the cases. | |
2569 | No sense trying this if there's a default case, however. */ | |
2570 | ||
2571 | if (!thiscase->data.case_stmt.default_label | |
2572 | && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE | |
2573 | && TREE_CODE (index_expr) != INTEGER_CST | |
2574 | && warn_switch) | |
2575 | check_for_full_enumeration_handling (TREE_TYPE (orig_index)); | |
2576 | ||
2577 | /* If we don't have a default-label, create one here, | |
2578 | after the body of the switch. */ | |
2579 | if (thiscase->data.case_stmt.default_label == 0) | |
2580 | { | |
2581 | thiscase->data.case_stmt.default_label | |
2582 | = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE); | |
2583 | expand_label (thiscase->data.case_stmt.default_label); | |
2584 | } | |
2585 | default_label = label_rtx (thiscase->data.case_stmt.default_label); | |
2586 | ||
2587 | before_case = get_last_insn (); | |
2588 | ||
2589 | /* Simplify the case-list before we count it. */ | |
2590 | group_case_nodes (thiscase->data.case_stmt.case_list); | |
2591 | ||
2592 | /* Get upper and lower bounds of case values. | |
2593 | Also convert all the case values to the index expr's data type. */ | |
2594 | ||
2595 | count = 0; | |
2596 | for (n = thiscase->data.case_stmt.case_list; n; n = n->right) | |
2597 | { | |
2598 | /* Check low and high label values are integers. */ | |
2599 | if (TREE_CODE (n->low) != INTEGER_CST) | |
2600 | abort (); | |
2601 | if (TREE_CODE (n->high) != INTEGER_CST) | |
2602 | abort (); | |
2603 | ||
2604 | n->low = convert (TREE_TYPE (index_expr), n->low); | |
2605 | n->high = convert (TREE_TYPE (index_expr), n->high); | |
2606 | ||
2607 | /* Count the elements and track the largest and smallest | |
2608 | of them (treating them as signed even if they are not). */ | |
2609 | if (count++ == 0) | |
2610 | { | |
2611 | minval = n->low; | |
2612 | maxval = n->high; | |
2613 | } | |
2614 | else | |
2615 | { | |
2616 | if (INT_CST_LT (n->low, minval)) | |
2617 | minval = n->low; | |
2618 | if (INT_CST_LT (maxval, n->high)) | |
2619 | maxval = n->high; | |
2620 | } | |
2621 | /* A range counts double, since it requires two compares. */ | |
2622 | if (! tree_int_cst_equal (n->low, n->high)) | |
2623 | count++; | |
2624 | } | |
2625 | ||
2626 | /* Compute span of values. */ | |
2627 | if (count != 0) | |
2628 | range = combine (MINUS_EXPR, maxval, minval); | |
2629 | ||
2630 | if (count == 0 || TREE_CODE (TREE_TYPE (index_expr)) == ERROR_MARK) | |
2631 | { | |
2632 | expand_expr (index_expr, const0_rtx, VOIDmode, 0); | |
2633 | emit_queue (); | |
2634 | emit_jump (default_label); | |
2635 | } | |
2636 | /* If range of values is much bigger than number of values, | |
2637 | make a sequence of conditional branches instead of a dispatch. | |
2638 | If the switch-index is a constant, do it this way | |
2639 | because we can optimize it. */ | |
2640 | else if (TREE_INT_CST_HIGH (range) != 0 | |
2641 | #ifdef HAVE_casesi | |
2642 | || count < 4 | |
2643 | #else | |
2644 | /* If machine does not have a case insn that compares the | |
2645 | bounds, this means extra overhead for dispatch tables | |
2646 | which raises the threshold for using them. */ | |
2647 | || count < 5 | |
2648 | #endif | |
2649 | || (unsigned) (TREE_INT_CST_LOW (range)) > 10 * count | |
2650 | || TREE_CODE (index_expr) == INTEGER_CST) | |
2651 | { | |
2652 | index = expand_expr (index_expr, 0, VOIDmode, 0); | |
2653 | ||
2654 | /* If the index is a short or char that we do not have | |
2655 | an insn to handle comparisons directly, convert it to | |
2656 | a full integer now, rather than letting each comparison | |
2657 | generate the conversion. */ | |
2658 | ||
2659 | if ((GET_MODE (index) == QImode || GET_MODE (index) == HImode) | |
2660 | && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code | |
2661 | == CODE_FOR_nothing)) | |
2662 | index = convert_to_mode (SImode, index, unsignedp); | |
2663 | ||
2664 | emit_queue (); | |
2665 | do_pending_stack_adjust (); | |
2666 | ||
2667 | index = protect_from_queue (index, 0); | |
2668 | if (GET_CODE (index) == MEM) | |
2669 | index = copy_to_reg (index); | |
2670 | if (GET_CODE (index) == CONST_INT | |
2671 | || TREE_CODE (index_expr) == INTEGER_CST) | |
2672 | { | |
2673 | /* Make a tree node with the proper constant value | |
2674 | if we don't already have one. */ | |
2675 | if (TREE_CODE (index_expr) != INTEGER_CST) | |
2676 | { | |
2677 | index_expr | |
2678 | = build_int_2 (INTVAL (index), | |
2679 | !unsignedp && INTVAL (index) >= 0 ? 0 : -1); | |
2680 | index_expr = convert (TREE_TYPE (index_expr), index_expr); | |
2681 | } | |
2682 | ||
2683 | /* For constant index expressions we need only | |
2684 | issue a unconditional branch to the appropriate | |
2685 | target code. The job of removing any unreachable | |
2686 | code is left to the optimisation phase if the | |
2687 | "-O" option is specified. */ | |
2688 | for (n = thiscase->data.case_stmt.case_list; | |
2689 | n; | |
2690 | n = n->right) | |
2691 | { | |
2692 | if (! tree_int_cst_lt (index_expr, n->low) | |
2693 | && ! tree_int_cst_lt (n->high, index_expr)) | |
2694 | break; | |
2695 | } | |
2696 | if (n) | |
2697 | emit_jump (label_rtx (n->code_label)); | |
2698 | else | |
2699 | emit_jump (default_label); | |
2700 | } | |
2701 | else | |
2702 | { | |
2703 | /* If the index expression is not constant we generate | |
2704 | a binary decision tree to select the appropriate | |
2705 | target code. This is done as follows: | |
2706 | ||
2707 | The list of cases is rearranged into a binary tree, | |
2708 | nearly optimal assuming equal probability for each case. | |
2709 | ||
2710 | The tree is transformed into RTL, eliminating | |
2711 | redundant test conditions at the same time. | |
2712 | ||
2713 | If program flow could reach the end of the | |
2714 | decision tree an unconditional jump to the | |
2715 | default code is emitted. */ | |
2716 | balance_case_nodes (&thiscase->data.case_stmt.case_list, 0); | |
2717 | emit_case_nodes (index, thiscase->data.case_stmt.case_list, | |
2718 | default_label, unsignedp); | |
2719 | emit_jump_if_reachable (default_label); | |
2720 | } | |
2721 | } | |
2722 | else | |
2723 | { | |
2724 | #ifdef HAVE_casesi | |
2725 | /* Convert the index to SImode. */ | |
2726 | if (TYPE_MODE (TREE_TYPE (index_expr)) == DImode) | |
2727 | { | |
2728 | index_expr = build (MINUS_EXPR, TREE_TYPE (index_expr), | |
2729 | index_expr, minval); | |
2730 | minval = integer_zero_node; | |
2731 | } | |
2732 | if (TYPE_MODE (TREE_TYPE (index_expr)) != SImode) | |
2733 | index_expr = convert (type_for_size (GET_MODE_BITSIZE (SImode), 0), | |
2734 | index_expr); | |
2735 | index = expand_expr (index_expr, 0, VOIDmode, 0); | |
2736 | emit_queue (); | |
2737 | index = protect_from_queue (index, 0); | |
2738 | do_pending_stack_adjust (); | |
2739 | ||
2740 | emit_jump_insn (gen_casesi (index, expand_expr (minval, 0, VOIDmode, 0), | |
2741 | expand_expr (range, 0, VOIDmode, 0), | |
2742 | table_label, default_label)); | |
2743 | #else | |
2744 | #ifdef HAVE_tablejump | |
2745 | index_expr = convert (type_for_size (GET_MODE_BITSIZE (SImode), 0), | |
2746 | build (MINUS_EXPR, TREE_TYPE (index_expr), | |
2747 | index_expr, minval)); | |
2748 | index = expand_expr (index_expr, 0, VOIDmode, 0); | |
2749 | emit_queue (); | |
2750 | index = protect_from_queue (index, 0); | |
2751 | do_pending_stack_adjust (); | |
2752 | ||
2753 | do_tablejump (index, | |
2754 | gen_rtx (CONST_INT, VOIDmode, TREE_INT_CST_LOW (range)), | |
2755 | table_label, default_label); | |
2756 | #else | |
2757 | lossage; | |
2758 | #endif /* not HAVE_tablejump */ | |
2759 | #endif /* not HAVE_casesi */ | |
2760 | ||
2761 | /* Get table of labels to jump to, in order of case index. */ | |
2762 | ||
2763 | ncases = TREE_INT_CST_LOW (range) + 1; | |
2764 | labelvec = (rtx *) alloca (ncases * sizeof (rtx)); | |
2765 | bzero (labelvec, ncases * sizeof (rtx)); | |
2766 | ||
2767 | for (n = thiscase->data.case_stmt.case_list; n; n = n->right) | |
2768 | { | |
2769 | register int i | |
2770 | = TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (minval); | |
2771 | ||
2772 | while (i + TREE_INT_CST_LOW (minval) | |
2773 | <= TREE_INT_CST_LOW (n->high)) | |
2774 | labelvec[i++] | |
2775 | = gen_rtx (LABEL_REF, Pmode, label_rtx (n->code_label)); | |
2776 | } | |
2777 | ||
2778 | /* Fill in the gaps with the default. */ | |
2779 | for (i = 0; i < ncases; i++) | |
2780 | if (labelvec[i] == 0) | |
2781 | labelvec[i] = gen_rtx (LABEL_REF, Pmode, default_label); | |
2782 | ||
2783 | /* Output the table */ | |
2784 | emit_label (table_label); | |
2785 | ||
2786 | #ifdef CASE_VECTOR_PC_RELATIVE | |
2787 | emit_jump_insn (gen_rtx (ADDR_DIFF_VEC, CASE_VECTOR_MODE, | |
2788 | gen_rtx (LABEL_REF, Pmode, table_label), | |
2789 | gen_rtvec_v (ncases, labelvec))); | |
2790 | #else | |
2791 | emit_jump_insn (gen_rtx (ADDR_VEC, CASE_VECTOR_MODE, | |
2792 | gen_rtvec_v (ncases, labelvec))); | |
2793 | #endif | |
2794 | /* If the case insn drops through the table, | |
2795 | after the table we must jump to the default-label. | |
2796 | Otherwise record no drop-through after the table. */ | |
2797 | #ifdef CASE_DROPS_THROUGH | |
2798 | emit_jump (default_label); | |
2799 | #else | |
2800 | emit_barrier (); | |
2801 | #endif | |
2802 | } | |
2803 | ||
2804 | reorder_insns (NEXT_INSN (before_case), get_last_insn (), | |
2805 | thiscase->data.case_stmt.start); | |
2806 | } | |
2807 | if (thiscase->exit_label) | |
2808 | emit_label (thiscase->exit_label); | |
2809 | ||
2810 | POPSTACK (case_stack); | |
2811 | } | |
2812 | ||
2813 | /* Generate code to jump to LABEL if OP1 and OP2 are equal. */ | |
2814 | ||
2815 | static void | |
2816 | do_jump_if_equal (op1, op2, label, unsignedp) | |
2817 | rtx op1, op2, label; | |
2818 | int unsignedp; | |
2819 | { | |
2820 | if (GET_CODE (op1) == CONST_INT | |
2821 | && GET_CODE (op2) == CONST_INT) | |
2822 | { | |
2823 | if (INTVAL (op1) == INTVAL (op2)) | |
2824 | emit_jump (label); | |
2825 | } | |
2826 | else | |
2827 | { | |
2828 | emit_cmp_insn (op1, op2, 0, unsignedp, 0); | |
2829 | emit_jump_insn (gen_beq (label)); | |
2830 | } | |
2831 | } | |
2832 | \f | |
2833 | /* Scan an ordered list of case nodes | |
2834 | combining those with consecutive values or ranges. | |
2835 | ||
2836 | Eg. three separate entries 1: 2: 3: become one entry 1..3: */ | |
2837 | ||
2838 | static void | |
2839 | group_case_nodes (head) | |
2840 | case_node_ptr head; | |
2841 | { | |
2842 | case_node_ptr node = head; | |
2843 | ||
2844 | while (node) | |
2845 | { | |
2846 | rtx lb = next_real_insn (label_rtx (node->code_label)); | |
2847 | case_node_ptr np = node; | |
2848 | ||
2849 | /* Try to group the successors of NODE with NODE. */ | |
2850 | while (((np = np->right) != 0) | |
2851 | /* Do they jump to the same place? */ | |
2852 | && next_real_insn (label_rtx (np->code_label)) == lb | |
2853 | /* Are their ranges consecutive? */ | |
2854 | && tree_int_cst_equal (np->low, | |
2855 | combine (PLUS_EXPR, node->high, | |
2856 | build_int_2 (1, 0))) | |
2857 | /* An overflow is not consecutive. */ | |
2858 | && tree_int_cst_lt (node->high, | |
2859 | combine (PLUS_EXPR, node->high, | |
2860 | build_int_2 (1, 0)))) | |
2861 | { | |
2862 | node->high = np->high; | |
2863 | } | |
2864 | /* NP is the first node after NODE which can't be grouped with it. | |
2865 | Delete the nodes in between, and move on to that node. */ | |
2866 | node->right = np; | |
2867 | node = np; | |
2868 | } | |
2869 | } | |
2870 | ||
2871 | /* Take an ordered list of case nodes | |
2872 | and transform them into a near optimal binary tree, | |
2873 | on the assumtion that any target code selection value is as | |
2874 | likely as any other. | |
2875 | ||
2876 | The transformation is performed by splitting the ordered | |
2877 | list into two equal sections plus a pivot. The parts are | |
2878 | then attached to the pivot as left and right branches. Each | |
2879 | branch is is then transformed recursively. */ | |
2880 | ||
2881 | static void | |
2882 | balance_case_nodes (head, parent) | |
2883 | case_node_ptr *head; | |
2884 | case_node_ptr parent; | |
2885 | { | |
2886 | register case_node_ptr np; | |
2887 | ||
2888 | np = *head; | |
2889 | if (np) | |
2890 | { | |
2891 | int i = 0; | |
2892 | int ranges = 0; | |
2893 | register case_node_ptr *npp; | |
2894 | case_node_ptr left; | |
2895 | ||
2896 | /* Count the number of entries on branch. | |
2897 | Also count the ranges. */ | |
2898 | while (np) | |
2899 | { | |
2900 | if (!tree_int_cst_equal (np->low, np->high)) | |
2901 | ranges++; | |
2902 | i++; | |
2903 | np = np->right; | |
2904 | } | |
2905 | if (i > 2) | |
2906 | { | |
2907 | /* Split this list if it is long enough for that to help. */ | |
2908 | npp = head; | |
2909 | left = *npp; | |
2910 | /* If there are just three nodes, split at the middle one. */ | |
2911 | if (i == 3) | |
2912 | npp = &(*npp)->right; | |
2913 | else | |
2914 | { | |
2915 | /* Find the place in the list that bisects the list's total cost, | |
2916 | where ranges count as 2. | |
2917 | Here I gets half the total cost. */ | |
2918 | i = (i + ranges + 1) / 2; | |
2919 | while (1) | |
2920 | { | |
2921 | /* Skip nodes while their cost does not reach that amount. */ | |
2922 | if (!tree_int_cst_equal ((*npp)->low, (*npp)->high)) | |
2923 | i--; | |
2924 | i--; | |
2925 | if (i <= 0) | |
2926 | break; | |
2927 | npp = &(*npp)->right; | |
2928 | } | |
2929 | } | |
2930 | *head = np = *npp; | |
2931 | *npp = 0; | |
2932 | np->parent = parent; | |
2933 | np->left = left; | |
2934 | ||
2935 | /* Optimize each of the two split parts. */ | |
2936 | balance_case_nodes (&np->left, np); | |
2937 | balance_case_nodes (&np->right, np); | |
2938 | } | |
2939 | else | |
2940 | { | |
2941 | /* Else leave this branch as one level, | |
2942 | but fill in `parent' fields. */ | |
2943 | np = *head; | |
2944 | np->parent = parent; | |
2945 | for (; np->right; np = np->right) | |
2946 | np->right->parent = np; | |
2947 | } | |
2948 | } | |
2949 | } | |
2950 | \f | |
2951 | /* Search the parent sections of the case node tree | |
2952 | to see if a test for the lower bound of NODE would be redundant. | |
2953 | ||
2954 | The instructions to synthesis the case decision tree are | |
2955 | output in the same order as nodes are processed so it is | |
2956 | known that if a parent node checks the range of the current | |
2957 | node minus one that the current node is bounded at its lower | |
2958 | span. Thus the test would be redundant. */ | |
2959 | ||
2960 | static int | |
2961 | node_has_low_bound (node) | |
2962 | case_node_ptr node; | |
2963 | { | |
2964 | tree low_minus_one; | |
2965 | case_node_ptr pnode; | |
2966 | ||
2967 | if (node->left) | |
2968 | { | |
2969 | low_minus_one = combine (MINUS_EXPR, node->low, build_int_2 (1, 0)); | |
2970 | /* Avoid the screw case of overflow where low_minus_one is > low. */ | |
2971 | if (tree_int_cst_lt (low_minus_one, node->low)) | |
2972 | for (pnode = node->parent; pnode; pnode = pnode->parent) | |
2973 | { | |
2974 | if (tree_int_cst_equal (low_minus_one, pnode->high)) | |
2975 | return 1; | |
2976 | /* If a parent node has a left branch we know that none | |
2977 | of its parents can have a high bound of our target | |
2978 | minus one so we abort the search. */ | |
2979 | if (node->left) | |
2980 | break; | |
2981 | } | |
2982 | } | |
2983 | return 0; | |
2984 | } | |
2985 | ||
2986 | /* Search the parent sections of the case node tree | |
2987 | to see if a test for the upper bound of NODE would be redundant. | |
2988 | ||
2989 | The instructions to synthesis the case decision tree are | |
2990 | output in the same order as nodes are processed so it is | |
2991 | known that if a parent node checks the range of the current | |
2992 | node plus one that the current node is bounded at its upper | |
2993 | span. Thus the test would be redundant. */ | |
2994 | ||
2995 | static int | |
2996 | node_has_high_bound (node) | |
2997 | case_node_ptr node; | |
2998 | { | |
2999 | tree high_plus_one; | |
3000 | case_node_ptr pnode; | |
3001 | ||
3002 | if (node->right == 0) | |
3003 | { | |
3004 | high_plus_one = combine (PLUS_EXPR, node->high, build_int_2 (1, 0)); | |
3005 | /* Avoid the screw case of overflow where high_plus_one is > high. */ | |
3006 | if (tree_int_cst_lt (node->high, high_plus_one)) | |
3007 | for (pnode = node->parent; pnode; pnode = pnode->parent) | |
3008 | { | |
3009 | if (tree_int_cst_equal (high_plus_one, pnode->low)) | |
3010 | return 1; | |
3011 | /* If a parent node has a right branch we know that none | |
3012 | of its parents can have a low bound of our target | |
3013 | plus one so we abort the search. */ | |
3014 | if (node->right) | |
3015 | break; | |
3016 | } | |
3017 | } | |
3018 | return 0; | |
3019 | } | |
3020 | ||
3021 | /* Search the parent sections of the | |
3022 | case node tree to see if both tests for the upper and lower | |
3023 | bounds of NODE would be redundant. */ | |
3024 | ||
3025 | static int | |
3026 | node_is_bounded (node) | |
3027 | case_node_ptr node; | |
3028 | { | |
3029 | if (node->left || node->right) | |
3030 | return 0; | |
3031 | return node_has_low_bound (node) && node_has_high_bound (node); | |
3032 | } | |
3033 | ||
3034 | /* Emit an unconditional jump to LABEL unless it would be dead code. */ | |
3035 | ||
3036 | static void | |
3037 | emit_jump_if_reachable (label) | |
3038 | rtx label; | |
3039 | { | |
3040 | rtx last_insn; | |
3041 | ||
3042 | if (GET_CODE (get_last_insn ()) != BARRIER) | |
3043 | emit_jump (label); | |
3044 | } | |
3045 | \f | |
3046 | /* Emit step-by-step code to select a case for the value of INDEX. | |
3047 | The thus generated decision tree follows the form of the | |
3048 | case-node binary tree NODE, whose nodes represent test conditions. | |
3049 | UNSIGNEDP is nonzero if we should do unsigned comparisons. | |
3050 | ||
3051 | Care is taken to prune redundant tests from the decision tree | |
3052 | by detecting any boundary conditions already checked by | |
3053 | emitted rtx. (See node_has_high_bound, node_has_low_bound | |
3054 | and node_is_bounded, above.) | |
3055 | ||
3056 | Where the test conditions can be shown to be redundant we emit | |
3057 | an unconditional jump to the target code. As a further | |
3058 | optimization, the subordinates of a tree node are examined to | |
3059 | check for bounded nodes. In this case conditional and/or | |
3060 | unconditional jumps as a result of the boundary check for the | |
3061 | current node are arranged to target the subordinates associated | |
3062 | code for out of bound conditions on the current node node. */ | |
3063 | ||
3064 | static void | |
3065 | emit_case_nodes (index, node, default_label, unsignedp) | |
3066 | rtx index; | |
3067 | case_node_ptr node; | |
3068 | rtx default_label; | |
3069 | int unsignedp; | |
3070 | { | |
3071 | /* If INDEX has an unsigned type, we must make unsigned branches. */ | |
3072 | typedef rtx rtx_function (); | |
3073 | rtx_function *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt; | |
3074 | rtx_function *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge; | |
3075 | rtx_function *gen_blt_pat = unsignedp ? gen_bltu : gen_blt; | |
3076 | rtx_function *gen_ble_pat = unsignedp ? gen_bleu : gen_ble; | |
3077 | ||
3078 | if (node->test_label) | |
3079 | { | |
3080 | /* If this test node requires a label it follows that | |
3081 | it must be preceeded by an unconditional branch. | |
3082 | If control can pass to this point we can assume that | |
3083 | a "br default" is in order. */ | |
3084 | emit_jump_if_reachable (default_label); | |
3085 | expand_label (node->test_label); | |
3086 | } | |
3087 | if (tree_int_cst_equal (node->low, node->high)) | |
3088 | { | |
3089 | /* Node is single valued. */ | |
3090 | do_jump_if_equal (index, expand_expr (node->low, 0, VOIDmode, 0), | |
3091 | label_rtx (node->code_label), unsignedp); | |
3092 | if (node->right) | |
3093 | { | |
3094 | if (node->left) | |
3095 | { | |
3096 | /* This node has children on either side. */ | |
3097 | emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0), 0, unsignedp, 0); | |
3098 | ||
3099 | if (node_is_bounded (node->right)) | |
3100 | { | |
3101 | emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label))); | |
3102 | if (node_is_bounded (node->left)) | |
3103 | emit_jump (label_rtx (node->left->code_label)); | |
3104 | else | |
3105 | emit_case_nodes (index, node->left, | |
3106 | default_label, unsignedp); | |
3107 | } | |
3108 | else | |
3109 | { | |
3110 | if (node_is_bounded (node->left)) | |
3111 | emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label))); | |
3112 | else | |
3113 | { | |
3114 | node->right->test_label = | |
3115 | build_decl (LABEL_DECL, NULL_TREE, NULL_TREE); | |
3116 | emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->test_label))); | |
3117 | emit_case_nodes (index, node->left, | |
3118 | default_label, unsignedp); | |
3119 | } | |
3120 | emit_case_nodes (index, node->right, | |
3121 | default_label, unsignedp); | |
3122 | } | |
3123 | } | |
3124 | else | |
3125 | { | |
3126 | /* Here we have a right child but no left | |
3127 | so we issue conditional branch to default | |
3128 | and process the right child. */ | |
3129 | ||
3130 | /* Omit the conditional branch to default | |
3131 | if we it avoid only one right child; | |
3132 | it costs too much space to save so little time. */ | |
3133 | if (node->right->right && !node_has_low_bound (node)) | |
3134 | { | |
3135 | emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0), 0, unsignedp, 0); | |
3136 | emit_jump_insn ((*gen_blt_pat) (default_label)); | |
3137 | } | |
3138 | if (node_is_bounded (node->right)) | |
3139 | emit_jump (label_rtx (node->right->code_label)); | |
3140 | else | |
3141 | emit_case_nodes (index, node->right, default_label, unsignedp); | |
3142 | } | |
3143 | } | |
3144 | else if (node->left) | |
3145 | { | |
3146 | if (node_is_bounded (node->left)) | |
3147 | emit_jump (label_rtx (node->left->code_label)); | |
3148 | else | |
3149 | emit_case_nodes (index, node->left, default_label, unsignedp); | |
3150 | } | |
3151 | } | |
3152 | else | |
3153 | { | |
3154 | /* Node is a range. */ | |
3155 | if (node->right) | |
3156 | { | |
3157 | if (node->left) | |
3158 | { | |
3159 | emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0), 0, unsignedp, 0); | |
3160 | if (node_is_bounded (node->right)) | |
3161 | { | |
3162 | /* Right hand node is fully bounded so we can | |
3163 | eliminate any testing and branch directly | |
3164 | to the target code. */ | |
3165 | emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label))); | |
3166 | } | |
3167 | else | |
3168 | { | |
3169 | /* Right hand node requires testing so create | |
3170 | a label to put on the cmp code. */ | |
3171 | node->right->test_label = | |
3172 | build_decl (LABEL_DECL, NULL_TREE, NULL_TREE); | |
3173 | emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->test_label))); | |
3174 | } | |
3175 | emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0), 0, unsignedp, 0); | |
3176 | emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label))); | |
3177 | if (node_is_bounded (node->left)) | |
3178 | { | |
3179 | /* Left hand node is fully bounded so we can | |
3180 | eliminate any testing and branch directly | |
3181 | to the target code. */ | |
3182 | emit_jump (label_rtx (node->left->code_label)); | |
3183 | } | |
3184 | else | |
3185 | emit_case_nodes (index, node->left, default_label, unsignedp); | |
3186 | /* If right node has been given a test label above | |
3187 | we must process it now. */ | |
3188 | if (node->right->test_label) | |
3189 | emit_case_nodes (index, node->right, default_label, unsignedp); | |
3190 | } | |
3191 | else | |
3192 | { | |
3193 | if (!node_has_low_bound (node)) | |
3194 | { | |
3195 | emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0), 0, unsignedp, 0); | |
3196 | emit_jump_insn ((*gen_blt_pat) (default_label)); | |
3197 | } | |
3198 | emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0), 0, unsignedp, 0); | |
3199 | emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label))); | |
3200 | if (node_is_bounded (node->right)) | |
3201 | { | |
3202 | /* Right hand node is fully bounded so we can | |
3203 | eliminate any testing and branch directly | |
3204 | to the target code. */ | |
3205 | emit_jump (label_rtx (node->right->code_label)); | |
3206 | } | |
3207 | else | |
3208 | emit_case_nodes (index, node->right, default_label, unsignedp); | |
3209 | } | |
3210 | } | |
3211 | else if (node->left) | |
3212 | { | |
3213 | if (!node_has_high_bound (node)) | |
3214 | { | |
3215 | emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0), 0, unsignedp, 0); | |
3216 | emit_jump_insn ((*gen_bgt_pat) (default_label)); | |
3217 | } | |
3218 | emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0), 0, unsignedp, 0); | |
3219 | emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label))); | |
3220 | if (node_is_bounded (node->left)) | |
3221 | { | |
3222 | /* Left hand node is fully bounded so we can | |
3223 | eliminate any testing and branch directly | |
3224 | to the target code. */ | |
3225 | emit_jump (label_rtx (node->left->code_label)); | |
3226 | } | |
3227 | else | |
3228 | emit_case_nodes (index, node->left, default_label, unsignedp); | |
3229 | } | |
3230 | else | |
3231 | { | |
3232 | /* Node has no children so we check low and | |
3233 | high bounds to remove redundant tests. In practice | |
3234 | only one of the limits may be bounded or the parent | |
3235 | node will have emmited a jump to our target code. */ | |
3236 | if (!node_has_high_bound (node)) | |
3237 | { | |
3238 | emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0), 0, unsignedp, 0); | |
3239 | emit_jump_insn ((*gen_bgt_pat) (default_label)); | |
3240 | } | |
3241 | if (!node_has_low_bound (node)) | |
3242 | { | |
3243 | emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0), 0, unsignedp, 0); | |
3244 | emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label))); | |
3245 | } | |
3246 | /* We allow the default case to drop through since | |
3247 | it will picked up by calls to `jump_if_reachable' | |
3248 | either on the next test label or at the end of | |
3249 | the decision tree emission. */ | |
3250 | } | |
3251 | } | |
3252 | } | |
3253 | \f | |
3254 | /* Allocate fixed slots in the stack frame of the current function. */ | |
3255 | ||
3256 | /* Return size needed for stack frame based on slots so far allocated. */ | |
3257 | ||
3258 | int | |
3259 | get_frame_size () | |
3260 | { | |
3261 | #ifdef FRAME_GROWS_DOWNWARD | |
3262 | return -frame_offset + STARTING_FRAME_OFFSET; | |
3263 | #else | |
3264 | return frame_offset - STARTING_FRAME_OFFSET; | |
3265 | #endif | |
3266 | } | |
3267 | ||
3268 | /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it | |
3269 | with machine mode MODE. */ | |
3270 | ||
3271 | rtx | |
3272 | assign_stack_local (mode, size) | |
3273 | enum machine_mode mode; | |
3274 | int size; | |
3275 | { | |
3276 | register rtx x, addr; | |
3277 | int bigend_correction = 0; | |
3278 | ||
3279 | frame_pointer_needed = 1; | |
3280 | ||
3281 | /* Make each stack slot a multiple of the main allocation unit. */ | |
3282 | size = (((size + (BIGGEST_ALIGNMENT / BITS_PER_UNIT) - 1) | |
3283 | / (BIGGEST_ALIGNMENT / BITS_PER_UNIT)) | |
3284 | * (BIGGEST_ALIGNMENT / BITS_PER_UNIT)); | |
3285 | ||
3286 | /* On a big-endian machine, if we are allocating more space than we will use, | |
3287 | use the least significant bytes of those that are allocated. */ | |
3288 | #ifdef BYTES_BIG_ENDIAN | |
3289 | if (mode != BLKmode) | |
3290 | bigend_correction = size - GET_MODE_SIZE (mode); | |
3291 | #endif | |
3292 | ||
3293 | #ifdef FRAME_GROWS_DOWNWARD | |
3294 | frame_offset -= size; | |
3295 | #endif | |
3296 | addr = gen_rtx (PLUS, Pmode, frame_pointer_rtx, | |
3297 | gen_rtx (CONST_INT, VOIDmode, | |
3298 | (frame_offset + bigend_correction))); | |
3299 | #ifndef FRAME_GROWS_DOWNWARD | |
3300 | frame_offset += size; | |
3301 | #endif | |
3302 | ||
3303 | if (! memory_address_p (mode, addr)) | |
3304 | invalid_stack_slot = 1; | |
3305 | ||
3306 | x = gen_rtx (MEM, mode, addr); | |
3307 | ||
3308 | stack_slot_list = gen_rtx (EXPR_LIST, VOIDmode, x, stack_slot_list); | |
3309 | ||
3310 | return x; | |
3311 | } | |
3312 | ||
3313 | /* Retroactively move an auto variable from a register to a stack slot. | |
3314 | This is done when an address-reference to the variable is seen. */ | |
3315 | ||
3316 | void | |
3317 | put_var_into_stack (decl) | |
3318 | tree decl; | |
3319 | { | |
3320 | register rtx reg = DECL_RTL (decl); | |
3321 | register rtx new; | |
3322 | ||
3323 | /* No need to do anything if decl has no rtx yet | |
3324 | since in that case caller is setting TREE_ADDRESSABLE | |
3325 | and a stack slot will be assigned when the rtl is made. */ | |
3326 | if (reg == 0) | |
3327 | return; | |
3328 | if (GET_CODE (reg) != REG) | |
3329 | return; | |
3330 | ||
3331 | new = parm_stack_loc (reg); | |
3332 | if (new == 0) | |
3333 | new = assign_stack_local (GET_MODE (reg), GET_MODE_SIZE (GET_MODE (reg))); | |
3334 | ||
3335 | XEXP (reg, 0) = XEXP (new, 0); | |
3336 | /* `volatil' bit means one thing for MEMs, another entirely for REGs. */ | |
3337 | REG_USERVAR_P (reg) = 0; | |
3338 | PUT_CODE (reg, MEM); | |
3339 | ||
3340 | /* If this is a memory ref that contains aggregate components, | |
3341 | mark it as such for cse and loop optimize. */ | |
3342 | MEM_IN_STRUCT_P (reg) | |
3343 | = (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE | |
3344 | || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE | |
3345 | || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE); | |
3346 | ||
3347 | fixup_var_refs (reg); | |
3348 | } | |
3349 | \f | |
3350 | static void | |
3351 | fixup_var_refs (var) | |
3352 | rtx var; | |
3353 | { | |
3354 | extern rtx sequence_stack; | |
3355 | rtx stack = sequence_stack; | |
3356 | tree pending; | |
3357 | ||
3358 | stack = sequence_stack; | |
3359 | ||
3360 | /* Must scan all insns for stack-refs that exceed the limit. */ | |
3361 | fixup_var_refs_insns (var, get_insns (), stack == 0); | |
3362 | ||
3363 | /* Scan all pending sequences too. */ | |
3364 | for (; stack; stack = XEXP (XEXP (stack, 1), 1)) | |
3365 | { | |
3366 | push_to_sequence (XEXP (stack, 0)); | |
3367 | fixup_var_refs_insns (var, XEXP (stack, 0), | |
3368 | XEXP (XEXP (stack, 1), 1) == 0); | |
3369 | /* Update remembered end of sequence | |
3370 | in case we added an insn at the end. */ | |
3371 | XEXP (XEXP (stack, 1), 0) = get_last_insn (); | |
3372 | end_sequence (); | |
3373 | } | |
3374 | ||
3375 | /* Scan all waiting RTL_EXPRs too. */ | |
3376 | for (pending = rtl_expr_chain; pending; pending = TREE_CHAIN (pending)) | |
3377 | { | |
3378 | rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending)); | |
3379 | if (seq != const0_rtx && seq != 0) | |
3380 | { | |
3381 | push_to_sequence (seq); | |
3382 | fixup_var_refs_insns (var, seq, 0); | |
3383 | end_sequence (); | |
3384 | } | |
3385 | } | |
3386 | } | |
3387 | ||
3388 | /* Scan the insn-chain starting with INSN for refs to VAR | |
3389 | and fix them up. TOPLEVEL is nonzero if this chain is the | |
3390 | main chain of insns for the current function. */ | |
3391 | ||
3392 | static void | |
3393 | fixup_var_refs_insns (var, insn, toplevel) | |
3394 | rtx var; | |
3395 | rtx insn; | |
3396 | int toplevel; | |
3397 | { | |
3398 | while (insn) | |
3399 | { | |
3400 | rtx next = NEXT_INSN (insn); | |
3401 | rtx note; | |
3402 | if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN | |
3403 | || GET_CODE (insn) == JUMP_INSN) | |
3404 | { | |
3405 | /* The insn to load VAR from a home in the arglist | |
3406 | is now a no-op. When we see it, just delete it. */ | |
3407 | if (toplevel | |
3408 | && GET_CODE (PATTERN (insn)) == SET | |
3409 | && SET_DEST (PATTERN (insn)) == var | |
3410 | && rtx_equal_p (SET_SRC (PATTERN (insn)), var)) | |
3411 | { | |
3412 | next = delete_insn (insn); | |
3413 | if (insn == last_parm_insn) | |
3414 | last_parm_insn = PREV_INSN (next); | |
3415 | } | |
3416 | else | |
3417 | fixup_var_refs_1 (var, PATTERN (insn), insn); | |
3418 | /* Also fix up any invalid exprs in the REG_NOTES of this insn. | |
3419 | But don't touch other insns referred to by reg-notes; | |
3420 | we will get them elsewhere. */ | |
3421 | for (note = REG_NOTES (insn); note; note = XEXP (note, 1)) | |
3422 | if (GET_CODE (note) != INSN_LIST) | |
3423 | XEXP (note, 0) = walk_fixup_memory_subreg (XEXP (note, 0), insn); | |
3424 | } | |
3425 | insn = next; | |
3426 | } | |
3427 | } | |
3428 | \f | |
3429 | static rtx | |
3430 | fixup_var_refs_1 (var, x, insn) | |
3431 | register rtx var; | |
3432 | register rtx x; | |
3433 | rtx insn; | |
3434 | { | |
3435 | register int i; | |
3436 | RTX_CODE code = GET_CODE (x); | |
3437 | register char *fmt; | |
3438 | register rtx tem; | |
3439 | ||
3440 | switch (code) | |
3441 | { | |
3442 | case MEM: | |
3443 | if (var == x) | |
3444 | { | |
3445 | x = fixup_stack_1 (x, insn); | |
3446 | tem = gen_reg_rtx (GET_MODE (x)); | |
3447 | /* Put new insn before a CALL, before any USEs before it. */ | |
3448 | if (GET_CODE (insn) == CALL_INSN) | |
3449 | while (PREV_INSN (insn) != 0 && GET_CODE (PREV_INSN (insn)) == INSN | |
3450 | && GET_CODE (PATTERN (PREV_INSN (insn))) == USE) | |
3451 | insn = PREV_INSN (insn); | |
3452 | emit_insn_before (gen_move_insn (tem, x), insn); | |
3453 | return tem; | |
3454 | } | |
3455 | break; | |
3456 | ||
3457 | case REG: | |
3458 | case CC0: | |
3459 | case PC: | |
3460 | case CONST_INT: | |
3461 | case CONST: | |
3462 | case SYMBOL_REF: | |
3463 | case LABEL_REF: | |
3464 | case CONST_DOUBLE: | |
3465 | return x; | |
3466 | ||
3467 | case SIGN_EXTRACT: | |
3468 | case ZERO_EXTRACT: | |
3469 | /* Note that in some cases those types of expressions are altered | |
3470 | by optimize_bit_field, and do not survive to get here. */ | |
3471 | case SUBREG: | |
3472 | tem = x; | |
3473 | while (GET_CODE (tem) == SUBREG || GET_CODE (tem) == SIGN_EXTRACT | |
3474 | || GET_CODE (tem) == ZERO_EXTRACT) | |
3475 | tem = XEXP (tem, 0); | |
3476 | if (tem == var) | |
3477 | { | |
3478 | x = fixup_stack_1 (x, insn); | |
3479 | tem = gen_reg_rtx (GET_MODE (x)); | |
3480 | if (GET_CODE (x) == SUBREG) | |
3481 | x = fixup_memory_subreg (x, insn); | |
3482 | emit_insn_before (gen_move_insn (tem, x), insn); | |
3483 | return tem; | |
3484 | } | |
3485 | break; | |
3486 | ||
3487 | case SET: | |
3488 | /* First do special simplification of bit-field references. */ | |
3489 | if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT | |
3490 | || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT) | |
3491 | optimize_bit_field (x, insn, 0); | |
3492 | if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT | |
3493 | || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT) | |
3494 | optimize_bit_field (x, insn, 0); | |
3495 | ||
3496 | { | |
3497 | rtx dest = SET_DEST (x); | |
3498 | rtx src = SET_SRC (x); | |
3499 | rtx outerdest = dest; | |
3500 | rtx outersrc = src; | |
3501 | ||
3502 | while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART | |
3503 | || GET_CODE (dest) == SIGN_EXTRACT | |
3504 | || GET_CODE (dest) == ZERO_EXTRACT) | |
3505 | dest = XEXP (dest, 0); | |
3506 | while (GET_CODE (src) == SUBREG | |
3507 | || GET_CODE (src) == SIGN_EXTRACT | |
3508 | || GET_CODE (src) == ZERO_EXTRACT) | |
3509 | src = XEXP (src, 0); | |
3510 | ||
3511 | /* If VAR does not appear at the top level of the SET | |
3512 | just scan the lower levels of the tree. */ | |
3513 | ||
3514 | if (src != var && dest != var) | |
3515 | break; | |
3516 | ||
3517 | /* Clean up (SUBREG:SI (MEM:mode ...) 0) | |
3518 | that may appear inside a SIGN_EXTRACT or ZERO_EXTRACT. | |
3519 | This was legitimate when the MEM was a REG. */ | |
3520 | ||
3521 | if ((GET_CODE (outerdest) == SIGN_EXTRACT | |
3522 | || GET_CODE (outerdest) == ZERO_EXTRACT) | |
3523 | && GET_CODE (XEXP (outerdest, 0)) == SUBREG | |
3524 | && SUBREG_REG (XEXP (outerdest, 0)) == var) | |
3525 | XEXP (outerdest, 0) = fixup_memory_subreg (XEXP (outerdest, 0), insn); | |
3526 | ||
3527 | if ((GET_CODE (outersrc) == SIGN_EXTRACT | |
3528 | || GET_CODE (outersrc) == ZERO_EXTRACT) | |
3529 | && GET_CODE (XEXP (outersrc, 0)) == SUBREG | |
3530 | && SUBREG_REG (XEXP (outersrc, 0)) == var) | |
3531 | XEXP (outersrc, 0) = fixup_memory_subreg (XEXP (outersrc, 0), insn); | |
3532 | ||
3533 | /* Make sure that the machine's SIGN_EXTRACT and ZERO_EXTRACT insns | |
3534 | accept a memory operand. */ | |
3535 | #ifdef HAVE_extzv | |
3536 | if (GET_CODE (outersrc) == ZERO_EXTRACT | |
3537 | && ! ((*insn_operand_predicate[(int) CODE_FOR_extzv][0]) | |
3538 | (XEXP (outersrc, 0), VOIDmode))) | |
3539 | XEXP (outersrc, 0) = src | |
3540 | = fixup_var_refs_1 (var, XEXP (outersrc, 0), insn); | |
3541 | #endif | |
3542 | #ifdef HAVE_extv | |
3543 | if (GET_CODE (outersrc) == SIGN_EXTRACT | |
3544 | && ! ((*insn_operand_predicate[(int) CODE_FOR_extv][0]) | |
3545 | (XEXP (outersrc, 0), VOIDmode))) | |
3546 | XEXP (outersrc, 0) = src | |
3547 | = fixup_var_refs_1 (var, XEXP (outersrc, 0), insn); | |
3548 | #endif | |
3549 | #ifdef HAVE_insv | |
3550 | if (GET_CODE (outerdest) == ZERO_EXTRACT | |
3551 | && ! ((*insn_operand_predicate[(int) CODE_FOR_insv][0]) | |
3552 | (XEXP (outerdest, 0), VOIDmode))) | |
3553 | { | |
3554 | rtx tem = gen_reg_rtx (GET_MODE (XEXP (outerdest, 0))); | |
3555 | ||
3556 | emit_insn_before (gen_move_insn (tem, XEXP (outerdest, 0)), insn); | |
3557 | emit_insn_after (gen_move_insn (XEXP (outerdest, 0), tem), insn); | |
3558 | dest = XEXP (outerdest, 0) = tem; | |
3559 | } | |
3560 | #endif | |
3561 | ||
3562 | /* Make sure a MEM inside a SIGN_EXTRACT has QImode | |
3563 | since that's what bit-field insns want. */ | |
3564 | ||
3565 | if ((GET_CODE (outerdest) == SIGN_EXTRACT | |
3566 | || GET_CODE (outerdest) == ZERO_EXTRACT) | |
3567 | && GET_CODE (XEXP (outerdest, 0)) == MEM | |
3568 | && GET_MODE (XEXP (outerdest, 0)) != QImode) | |
3569 | { | |
3570 | XEXP (outerdest, 0) = copy_rtx (XEXP (outerdest, 0)); | |
3571 | PUT_MODE (XEXP (outerdest, 0), QImode); | |
3572 | /* Adjust the address so the bit field starts within the byte | |
3573 | addressed. This helps certain optimization patterns. */ | |
3574 | if (GET_CODE (XEXP (outerdest, 2)) == CONST_INT | |
3575 | && offsettable_memref_p (XEXP (outerdest, 0))) | |
3576 | { | |
3577 | int count = INTVAL (XEXP (outerdest, 2)); | |
3578 | XEXP (outerdest, 0) | |
3579 | = adj_offsettable_operand (XEXP (outerdest, 0), | |
3580 | count / GET_MODE_BITSIZE (QImode)); | |
3581 | XEXP (outerdest, 2) | |
3582 | = gen_rtx (CONST_INT, VOIDmode, | |
3583 | count % GET_MODE_BITSIZE (QImode)); | |
3584 | } | |
3585 | } | |
3586 | ||
3587 | if ((GET_CODE (outersrc) == SIGN_EXTRACT | |
3588 | || GET_CODE (outersrc) == ZERO_EXTRACT) | |
3589 | && GET_CODE (XEXP (outersrc, 0)) == MEM | |
3590 | && GET_MODE (XEXP (outersrc, 0)) != QImode) | |
3591 | { | |
3592 | XEXP (outersrc, 0) = copy_rtx (XEXP (outersrc, 0)); | |
3593 | PUT_MODE (XEXP (outersrc, 0), QImode); | |
3594 | /* Adjust the address so the bit field starts within the byte | |
3595 | addressed. This helps certain optimization patterns. */ | |
3596 | if (GET_CODE (XEXP (outersrc, 2)) == CONST_INT | |
3597 | && offsettable_memref_p (XEXP (outersrc, 0))) | |
3598 | { | |
3599 | int count = INTVAL (XEXP (outersrc, 2)); | |
3600 | XEXP (outersrc, 0) | |
3601 | = adj_offsettable_operand (XEXP (outersrc, 0), | |
3602 | count / GET_MODE_BITSIZE (QImode)); | |
3603 | XEXP (outersrc, 2) | |
3604 | = gen_rtx (CONST_INT, VOIDmode, | |
3605 | count % GET_MODE_BITSIZE (QImode)); | |
3606 | } | |
3607 | } | |
3608 | ||
3609 | /* STRICT_LOW_PART is a no-op on memory references | |
3610 | and it can cause combinations to be unrecognizable, | |
3611 | so eliminate it. */ | |
3612 | ||
3613 | if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART) | |
3614 | SET_DEST (x) = XEXP (SET_DEST (x), 0); | |
3615 | ||
3616 | /* An insn to copy VAR into or out of a register | |
3617 | must be left alone, to avoid an infinite loop here. | |
3618 | But do fix up the address of VAR's stack slot if nec, | |
3619 | and fix up SUBREGs containing VAR | |
3620 | (since they are now memory subregs). */ | |
3621 | ||
3622 | if (GET_CODE (SET_SRC (x)) == REG || GET_CODE (SET_DEST (x)) == REG | |
3623 | || (GET_CODE (SET_SRC (x)) == SUBREG | |
3624 | && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG) | |
3625 | || (GET_CODE (SET_DEST (x)) == SUBREG | |
3626 | && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG)) | |
3627 | { | |
3628 | if (src == var && GET_CODE (SET_SRC (x)) == SUBREG) | |
3629 | SET_SRC (x) = fixup_memory_subreg (SET_SRC (x), insn); | |
3630 | if (dest == var && GET_CODE (SET_DEST (x)) == SUBREG) | |
3631 | SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn); | |
3632 | return fixup_stack_1 (x, insn); | |
3633 | } | |
3634 | ||
3635 | /* Otherwise, storing into VAR must be handled specially | |
3636 | by storing into a temporary and copying that into VAR | |
3637 | with a new insn after this one. */ | |
3638 | ||
3639 | if (dest == var) | |
3640 | { | |
3641 | rtx temp; | |
3642 | rtx fixeddest; | |
3643 | tem = SET_DEST (x); | |
3644 | /* STRICT_LOW_PART can be discarded, around a MEM. */ | |
3645 | if (GET_CODE (tem) == STRICT_LOW_PART) | |
3646 | tem = XEXP (tem, 0); | |
3647 | /* Convert (SUBREG (MEM)) to a MEM in a changed mode. */ | |
3648 | if (GET_CODE (tem) == SUBREG) | |
3649 | tem = fixup_memory_subreg (tem, insn); | |
3650 | fixeddest = fixup_stack_1 (tem, insn); | |
3651 | temp = gen_reg_rtx (GET_MODE (tem)); | |
3652 | emit_insn_after (gen_move_insn (fixeddest, temp), insn); | |
3653 | SET_DEST (x) = temp; | |
3654 | } | |
3655 | } | |
3656 | } | |
3657 | ||
3658 | /* Nothing special about this RTX; fix its operands. */ | |
3659 | ||
3660 | fmt = GET_RTX_FORMAT (code); | |
3661 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
3662 | { | |
3663 | if (fmt[i] == 'e') | |
3664 | XEXP (x, i) = fixup_var_refs_1 (var, XEXP (x, i), insn); | |
3665 | if (fmt[i] == 'E') | |
3666 | { | |
3667 | register int j; | |
3668 | for (j = 0; j < XVECLEN (x, i); j++) | |
3669 | XVECEXP (x, i, j) | |
3670 | = fixup_var_refs_1 (var, XVECEXP (x, i, j), insn); | |
3671 | } | |
3672 | } | |
3673 | return x; | |
3674 | } | |
3675 | \f | |
3676 | /* Given X, an rtx of the form (SUBREG:m1 (MEM:m2 addr)), | |
3677 | return an rtx (MEM:m1 newaddr) which is equivalent. | |
3678 | If any insns must be emitted to compute NEWADDR, put them before INSN. */ | |
3679 | ||
3680 | static rtx | |
3681 | fixup_memory_subreg (x, insn) | |
3682 | rtx x; | |
3683 | rtx insn; | |
3684 | { | |
3685 | int offset = SUBREG_WORD (x) * UNITS_PER_WORD; | |
3686 | rtx addr = XEXP (SUBREG_REG (x), 0); | |
3687 | enum machine_mode mode = GET_MODE (x); | |
3688 | rtx saved, result; | |
3689 | ||
3690 | #ifdef BYTES_BIG_ENDIAN | |
3691 | offset += (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) | |
3692 | - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))); | |
3693 | #endif | |
3694 | addr = plus_constant (addr, offset); | |
3695 | if (memory_address_p (mode, addr)) | |
3696 | return change_address (SUBREG_REG (x), mode, addr); | |
3697 | saved = start_sequence (); | |
3698 | result = change_address (SUBREG_REG (x), mode, addr); | |
3699 | emit_insn_before (gen_sequence (), insn); | |
3700 | end_sequence (saved); | |
3701 | return result; | |
3702 | } | |
3703 | ||
3704 | /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X. | |
3705 | Replace subexpressions of X in place. | |
3706 | If X itself is a (SUBREG (MEM ...) ...), return the replacement expression. | |
3707 | Otherwise return X, with its contents possibly altered. | |
3708 | ||
3709 | If any insns must be emitted to compute NEWADDR, put them before INSN. */ | |
3710 | ||
3711 | static rtx | |
3712 | walk_fixup_memory_subreg (x, insn) | |
3713 | register rtx x; | |
3714 | rtx insn; | |
3715 | { | |
3716 | register enum rtx_code code; | |
3717 | register char *fmt; | |
3718 | register int i; | |
3719 | ||
3720 | if (x == 0) | |
3721 | return 0; | |
3722 | ||
3723 | code = GET_CODE (x); | |
3724 | ||
3725 | if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM) | |
3726 | return fixup_memory_subreg (x, insn); | |
3727 | ||
3728 | /* Nothing special about this RTX; fix its operands. */ | |
3729 | ||
3730 | fmt = GET_RTX_FORMAT (code); | |
3731 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
3732 | { | |
3733 | if (fmt[i] == 'e') | |
3734 | XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn); | |
3735 | if (fmt[i] == 'E') | |
3736 | { | |
3737 | register int j; | |
3738 | for (j = 0; j < XVECLEN (x, i); j++) | |
3739 | XVECEXP (x, i, j) | |
3740 | = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn); | |
3741 | } | |
3742 | } | |
3743 | return x; | |
3744 | } | |
3745 | \f | |
3746 | #if 0 | |
3747 | /* Fix up any references to stack slots that are invalid memory addresses | |
3748 | because they exceed the maximum range of a displacement. */ | |
3749 | ||
3750 | void | |
3751 | fixup_stack_slots () | |
3752 | { | |
3753 | register rtx insn; | |
3754 | ||
3755 | /* Did we generate a stack slot that is out of range | |
3756 | or otherwise has an invalid address? */ | |
3757 | if (invalid_stack_slot) | |
3758 | { | |
3759 | /* Yes. Must scan all insns for stack-refs that exceed the limit. */ | |
3760 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | |
3761 | if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN | |
3762 | || GET_CODE (insn) == JUMP_INSN) | |
3763 | fixup_stack_1 (PATTERN (insn), insn); | |
3764 | } | |
3765 | } | |
3766 | #endif | |
3767 | ||
3768 | /* For each memory ref within X, if it refers to a stack slot | |
3769 | with an out of range displacement, put the address in a temp register | |
3770 | (emitting new insns before INSN to load these registers) | |
3771 | and alter the memory ref to use that register. | |
3772 | Replace each such MEM rtx with a copy, to avoid clobberage. */ | |
3773 | ||
3774 | static rtx | |
3775 | fixup_stack_1 (x, insn) | |
3776 | rtx x; | |
3777 | rtx insn; | |
3778 | { | |
3779 | register int i; | |
3780 | register RTX_CODE code = GET_CODE (x); | |
3781 | register char *fmt; | |
3782 | ||
3783 | if (code == MEM) | |
3784 | { | |
3785 | register rtx ad = XEXP (x, 0); | |
3786 | /* If we have address of a stack slot but it's not valid | |
3787 | (displacement is too large), compute the sum in a register. */ | |
3788 | if (GET_CODE (ad) == PLUS | |
3789 | && XEXP (ad, 0) == frame_pointer_rtx | |
3790 | && GET_CODE (XEXP (ad, 1)) == CONST_INT) | |
3791 | { | |
3792 | rtx temp; | |
3793 | if (memory_address_p (GET_MODE (x), ad)) | |
3794 | return x; | |
3795 | temp = gen_reg_rtx (GET_MODE (ad)); | |
3796 | emit_insn_before (gen_move_insn (temp, ad), insn); | |
3797 | return change_address (x, VOIDmode, temp); | |
3798 | } | |
3799 | return x; | |
3800 | } | |
3801 | ||
3802 | fmt = GET_RTX_FORMAT (code); | |
3803 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
3804 | { | |
3805 | if (fmt[i] == 'e') | |
3806 | XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn); | |
3807 | if (fmt[i] == 'E') | |
3808 | { | |
3809 | register int j; | |
3810 | for (j = 0; j < XVECLEN (x, i); j++) | |
3811 | XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn); | |
3812 | } | |
3813 | } | |
3814 | return x; | |
3815 | } | |
3816 | \f | |
3817 | /* Optimization: a bit-field instruction whose field | |
3818 | happens to be a byte or halfword in memory | |
3819 | can be changed to a move instruction. | |
3820 | ||
3821 | We call here when INSN is an insn to examine or store into a bit-field. | |
3822 | BODY is the SET-rtx to be altered. | |
3823 | ||
3824 | EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0. | |
3825 | (Currently this is called only from stmt.c, and EQUIV_MEM is always 0.) */ | |
3826 | ||
3827 | static void | |
3828 | optimize_bit_field (body, insn, equiv_mem) | |
3829 | rtx body; | |
3830 | rtx insn; | |
3831 | rtx *equiv_mem; | |
3832 | { | |
3833 | register rtx bitfield; | |
3834 | int destflag; | |
3835 | ||
3836 | if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT | |
3837 | || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT) | |
3838 | bitfield = SET_DEST (body), destflag = 1; | |
3839 | else | |
3840 | bitfield = SET_SRC (body), destflag = 0; | |
3841 | ||
3842 | /* First check that the field being stored has constant size and position | |
3843 | and is in fact a byte or halfword suitably aligned. */ | |
3844 | ||
3845 | if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT | |
3846 | && GET_CODE (XEXP (bitfield, 2)) == CONST_INT | |
3847 | && (INTVAL (XEXP (bitfield, 1)) == GET_MODE_BITSIZE (QImode) | |
3848 | || INTVAL (XEXP (bitfield, 1)) == GET_MODE_BITSIZE (HImode)) | |
3849 | && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0) | |
3850 | { | |
3851 | register rtx memref = 0; | |
3852 | ||
3853 | /* Now check that the containing word is memory, not a register, | |
3854 | and that it is safe to change the machine mode and to | |
3855 | add something to the address. */ | |
3856 | ||
3857 | if (GET_CODE (XEXP (bitfield, 0)) == MEM) | |
3858 | memref = XEXP (bitfield, 0); | |
3859 | else if (GET_CODE (XEXP (bitfield, 0)) == REG | |
3860 | && equiv_mem != 0) | |
3861 | memref = equiv_mem[REGNO (XEXP (bitfield, 0))]; | |
3862 | else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG | |
3863 | && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM) | |
3864 | memref = SUBREG_REG (XEXP (bitfield, 0)); | |
3865 | else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG | |
3866 | && equiv_mem != 0 | |
3867 | && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG) | |
3868 | memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))]; | |
3869 | ||
3870 | if (memref | |
3871 | && ! mode_dependent_address_p (XEXP (memref, 0)) | |
3872 | && offsettable_address_p (0, GET_MODE (bitfield), XEXP (memref, 0))) | |
3873 | { | |
3874 | /* Now adjust the address, first for any subreg'ing | |
3875 | that we are now getting rid of, | |
3876 | and then for which byte of the word is wanted. */ | |
3877 | ||
3878 | register int offset | |
3879 | = INTVAL (XEXP (bitfield, 2)) / GET_MODE_BITSIZE (QImode); | |
3880 | if (GET_CODE (XEXP (bitfield, 0)) == SUBREG) | |
3881 | { | |
3882 | offset += SUBREG_WORD (XEXP (bitfield, 0)) * UNITS_PER_WORD; | |
3883 | #ifdef BYTES_BIG_ENDIAN | |
3884 | offset -= (MIN (UNITS_PER_WORD, | |
3885 | GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0)))) | |
3886 | - MIN (UNITS_PER_WORD, | |
3887 | GET_MODE_SIZE (GET_MODE (memref)))); | |
3888 | #endif | |
3889 | } | |
3890 | ||
3891 | memref = gen_rtx (MEM, | |
3892 | (INTVAL (XEXP (bitfield, 1)) == GET_MODE_BITSIZE (QImode) | |
3893 | ? QImode : HImode), | |
3894 | XEXP (memref, 0)); | |
3895 | ||
3896 | /* Store this memory reference where | |
3897 | we found the bit field reference. */ | |
3898 | ||
3899 | if (destflag) | |
3900 | { | |
3901 | SET_DEST (body) | |
3902 | = adj_offsettable_operand (memref, offset); | |
3903 | if (! CONSTANT_ADDRESS_P (SET_SRC (body))) | |
3904 | { | |
3905 | rtx src = SET_SRC (body); | |
3906 | while (GET_CODE (src) == SUBREG | |
3907 | && SUBREG_WORD (src) == 0) | |
3908 | src = SUBREG_REG (src); | |
3909 | if (GET_MODE (src) != GET_MODE (memref)) | |
3910 | src = gen_lowpart (GET_MODE (memref), SET_SRC (body)); | |
3911 | SET_SRC (body) = src; | |
3912 | } | |
3913 | else if (GET_MODE (SET_SRC (body)) != VOIDmode | |
3914 | && GET_MODE (SET_SRC (body)) != GET_MODE (memref)) | |
3915 | /* This shouldn't happen because anything that didn't have | |
3916 | one of these modes should have got converted explicitly | |
3917 | and then referenced through a subreg. | |
3918 | This is so because the original bit-field was | |
3919 | handled by agg_mode and so its tree structure had | |
3920 | the same mode that memref now has. */ | |
3921 | abort (); | |
3922 | } | |
3923 | else | |
3924 | { | |
3925 | rtx dest = SET_DEST (body); | |
3926 | ||
3927 | while (GET_CODE (dest) == SUBREG | |
3928 | && SUBREG_WORD (dest) == 0) | |
3929 | dest = SUBREG_REG (dest); | |
3930 | SET_DEST (body) = dest; | |
3931 | ||
3932 | memref = adj_offsettable_operand (memref, offset); | |
3933 | if (GET_MODE (dest) == GET_MODE (memref)) | |
3934 | SET_SRC (body) = memref; | |
3935 | else | |
3936 | { | |
3937 | /* Convert the mem ref to the destination mode. */ | |
3938 | rtx last = get_last_insn (); | |
3939 | rtx newreg = gen_reg_rtx (GET_MODE (dest)); | |
3940 | convert_move (newreg, memref, | |
3941 | GET_CODE (SET_SRC (body)) == ZERO_EXTRACT); | |
3942 | /* Put the conversion before the insn being fixed. */ | |
3943 | reorder_insns (NEXT_INSN (last), get_last_insn (), | |
3944 | PREV_INSN (insn)); | |
3945 | SET_SRC (body) = newreg; | |
3946 | } | |
3947 | } | |
3948 | ||
3949 | /* Cause the insn to be re-recognized. */ | |
3950 | ||
3951 | INSN_CODE (insn) = -1; | |
3952 | } | |
3953 | } | |
3954 | } | |
3955 | \f | |
3956 | /* 1 + last pseudo register number used for loading a copy | |
3957 | of a parameter of this function. */ | |
3958 | ||
3959 | static int max_parm_reg; | |
3960 | ||
3961 | /* Vector indexed by REGNO, containing location on stack in which | |
3962 | to put the parm which is nominally in pseudo register REGNO, | |
3963 | if we discover that that parm must go in the stack. */ | |
3964 | static rtx *parm_reg_stack_loc; | |
3965 | ||
3966 | int | |
3967 | max_parm_reg_num () | |
3968 | { | |
3969 | return max_parm_reg; | |
3970 | } | |
3971 | ||
3972 | /* Return the first insn following those generated by `assign_parms'. */ | |
3973 | ||
3974 | rtx | |
3975 | get_first_nonparm_insn () | |
3976 | { | |
3977 | if (last_parm_insn) | |
3978 | return NEXT_INSN (last_parm_insn); | |
3979 | return get_insns (); | |
3980 | } | |
3981 | ||
3982 | /* Get the stack home of a REG rtx that is one of this function's parameters. | |
3983 | This is called rather than assign a new stack slot as a local. | |
3984 | Return 0 if there is no existing stack home suitable for such use. */ | |
3985 | ||
3986 | static rtx | |
3987 | parm_stack_loc (reg) | |
3988 | rtx reg; | |
3989 | { | |
3990 | if (REGNO (reg) < max_parm_reg) | |
3991 | return parm_reg_stack_loc[REGNO (reg)]; | |
3992 | return 0; | |
3993 | } | |
3994 | ||
3995 | /* Return 1 if EXP returns an aggregate value, for which an address | |
3996 | must be passed to the function or returned by the function. */ | |
3997 | ||
3998 | int | |
3999 | aggregate_value_p (exp) | |
4000 | tree exp; | |
4001 | { | |
4002 | if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode) | |
4003 | return 1; | |
4004 | if (RETURN_IN_MEMORY (TREE_TYPE (exp))) | |
4005 | return 1; | |
4006 | if (flag_pcc_struct_return | |
4007 | && (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE | |
4008 | || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE)) | |
4009 | return 1; | |
4010 | return 0; | |
4011 | } | |
4012 | ||
4013 | /* Convert a mem ref into one with a valid memory address. | |
4014 | Pass through anything else unchanged. */ | |
4015 | ||
4016 | rtx | |
4017 | validize_mem (ref) | |
4018 | rtx ref; | |
4019 | { | |
4020 | if (GET_CODE (ref) != MEM) | |
4021 | return ref; | |
4022 | if (memory_address_p (GET_MODE (ref), XEXP (ref, 0))) | |
4023 | return ref; | |
4024 | /* Don't alter REF itself, since that is probably a stack slot. */ | |
4025 | return gen_rtx (MEM, GET_MODE (ref), | |
4026 | memory_address (GET_MODE (ref), XEXP (ref, 0))); | |
4027 | } | |
4028 | \f | |
4029 | /* Assign RTL expressions to the function's parameters. | |
4030 | This may involve copying them into registers and using | |
4031 | those registers as the RTL for them. */ | |
4032 | ||
4033 | static void | |
4034 | assign_parms (fndecl) | |
4035 | tree fndecl; | |
4036 | { | |
4037 | register tree parm; | |
4038 | register rtx entry_parm; | |
4039 | register rtx stack_parm; | |
4040 | register CUMULATIVE_ARGS args_so_far; | |
4041 | enum machine_mode passed_mode, nominal_mode; | |
4042 | /* Total space needed so far for args on the stack, | |
4043 | given as a constant and a tree-expression. */ | |
4044 | struct args_size stack_args_size; | |
4045 | int first_parm_offset = FIRST_PARM_OFFSET (fndecl); | |
4046 | int first_parm_caller_offset | |
4047 | #ifdef FIRST_PARM_CALLER_OFFSET | |
4048 | = FIRST_PARM_CALLER_OFFSET (fndecl); | |
4049 | #else | |
4050 | = first_parm_offset; | |
4051 | #endif | |
4052 | tree fntype = TREE_TYPE (fndecl); | |
4053 | /* This is used for the arg pointer when referring to stack args. */ | |
4054 | rtx internal_arg_pointer; | |
4055 | ||
4056 | int nparmregs | |
4057 | = list_length (DECL_ARGUMENTS (fndecl)) + FIRST_PSEUDO_REGISTER; | |
4058 | ||
4059 | /* Nonzero if function takes extra anonymous args. | |
4060 | This means the last named arg must be on the stack | |
4061 | right before the anonymous ones. | |
4062 | Also nonzero if the first arg is named `__builtin_va_alist', | |
4063 | which is used on some machines for old-fashioned non-ANSI varargs.h; | |
4064 | this too should be stuck onto the stack as if it had arrived there. */ | |
4065 | int vararg | |
4066 | = ((DECL_ARGUMENTS (fndecl) != 0 | |
4067 | && DECL_NAME (DECL_ARGUMENTS (fndecl)) | |
4068 | && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (DECL_ARGUMENTS (fndecl))), | |
4069 | "__builtin_va_alist"))) | |
4070 | || | |
4071 | (TYPE_ARG_TYPES (fntype) != 0 | |
4072 | && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype))) | |
4073 | != void_type_node))); | |
4074 | int arg_pointer_copied = 0; | |
4075 | ||
4076 | #if ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM | |
4077 | internal_arg_pointer = arg_pointer_rtx; | |
4078 | #else | |
4079 | /* If the arg pointer reg is not a fixed reg, | |
4080 | make a copy of it, and address parms via the copy. */ | |
4081 | if (fixed_regs[ARG_POINTER_REGNUM]) | |
4082 | internal_arg_pointer = arg_pointer_rtx; | |
4083 | else | |
4084 | { | |
4085 | internal_arg_pointer = copy_to_reg (arg_pointer_rtx); | |
4086 | arg_pointer_copied = 1; | |
4087 | } | |
4088 | #endif | |
4089 | ||
4090 | stack_args_size.constant = 0; | |
4091 | stack_args_size.var = 0; | |
4092 | ||
4093 | /* If struct value address comes on the stack, count it in size of args. */ | |
4094 | if (aggregate_value_p (DECL_RESULT (fndecl)) | |
4095 | && GET_CODE (struct_value_incoming_rtx) == MEM) | |
4096 | { | |
4097 | #ifdef FIRST_PARM_CALLER_OFFSET | |
4098 | /* Make the right thing happen on the sparc | |
4099 | in a function with a struct value and struct arg. */ | |
4100 | if (first_parm_caller_offset < 0) | |
4101 | first_parm_offset += GET_MODE_SIZE (Pmode); | |
4102 | else | |
4103 | #endif | |
4104 | stack_args_size.constant += GET_MODE_SIZE (Pmode); | |
4105 | } | |
4106 | ||
4107 | parm_reg_stack_loc = (rtx *) oballoc (nparmregs * sizeof (rtx)); | |
4108 | bzero (parm_reg_stack_loc, nparmregs * sizeof (rtx)); | |
4109 | ||
4110 | INIT_CUMULATIVE_ARGS (args_so_far, fntype); | |
4111 | ||
4112 | for (parm = DECL_ARGUMENTS (fndecl); parm; parm = TREE_CHAIN (parm)) | |
4113 | { | |
4114 | int aggregate | |
4115 | = (TREE_CODE (TREE_TYPE (parm)) == ARRAY_TYPE | |
4116 | || TREE_CODE (TREE_TYPE (parm)) == RECORD_TYPE | |
4117 | || TREE_CODE (TREE_TYPE (parm)) == UNION_TYPE); | |
4118 | struct args_size stack_offset; | |
4119 | rtx stack_offset_rtx; | |
4120 | enum direction where_pad; | |
4121 | ||
4122 | DECL_OFFSET (parm) = -1; | |
4123 | ||
4124 | if (TREE_TYPE (parm) == error_mark_node | |
4125 | /* This can happen after weird syntax errors | |
4126 | or if an enum type is defined among the parms. */ | |
4127 | || TREE_CODE (parm) != PARM_DECL | |
4128 | || DECL_ARG_TYPE (parm) == NULL) | |
4129 | { | |
4130 | DECL_RTL (parm) = gen_rtx (MEM, BLKmode, const0_rtx); | |
4131 | TREE_USED (parm) = 1; | |
4132 | continue; | |
4133 | } | |
4134 | ||
4135 | /* Find mode of arg as it is passed, and mode of arg | |
4136 | as it should be during execution of this function. */ | |
4137 | passed_mode = TYPE_MODE (DECL_ARG_TYPE (parm)); | |
4138 | nominal_mode = TYPE_MODE (TREE_TYPE (parm)); | |
4139 | ||
4140 | /* Get this parm's offset as an rtx. */ | |
4141 | stack_offset = stack_args_size; | |
4142 | stack_offset.constant += first_parm_offset; | |
4143 | ||
4144 | /* If this argument needs more than the usual parm alignment, do | |
4145 | extrinsic padding to reach that alignment. */ | |
4146 | ||
4147 | #ifdef MAX_PARM_BOUNDARY | |
4148 | /* If MAX_PARM_BOUNDARY is not defined, it means that the usual | |
4149 | alignment requirements are relaxed for parms, and that no parm | |
4150 | needs more alignment than PARM_BOUNDARY, regardless of data type. */ | |
4151 | ||
4152 | if (PARM_BOUNDARY < TYPE_ALIGN (DECL_ARG_TYPE (parm))) | |
4153 | { | |
4154 | int boundary = PARM_BOUNDARY; | |
4155 | ||
4156 | /* Determine the boundary to pad up to. */ | |
4157 | if (TYPE_ALIGN (DECL_ARG_TYPE (parm)) > boundary) | |
4158 | boundary = TYPE_ALIGN (DECL_ARG_TYPE (parm)); | |
4159 | if (boundary > MAX_PARM_BOUNDARY) | |
4160 | boundary = MAX_PARM_BOUNDARY; | |
4161 | ||
4162 | /* If the previous args don't reach such a boundary, | |
4163 | advance to the next one. */ | |
4164 | boundary /= BITS_PER_UNIT; | |
4165 | stack_offset.constant += boundary - 1; | |
4166 | stack_offset.constant &= ~(boundary - 1); | |
4167 | stack_args_size.constant += boundary - 1; | |
4168 | stack_args_size.constant &= ~(boundary - 1); | |
4169 | ||
4170 | if (stack_offset.var != 0) | |
4171 | abort (); /* This case not implemented yet */ | |
4172 | } | |
4173 | #endif /* MAX_PARM_BOUNDARY */ | |
4174 | ||
4175 | /* Find out if the parm needs intrinsic padding (up to PARM_BOUNDARY), | |
4176 | and whether above or below. */ | |
4177 | ||
4178 | where_pad | |
4179 | = FUNCTION_ARG_PADDING (passed_mode, | |
4180 | expand_expr (size_in_bytes (DECL_ARG_TYPE (parm)), | |
4181 | 0, VOIDmode, 0)); | |
4182 | ||
4183 | /* If arg should be padded below, adjust the stack address upward. | |
4184 | This padding is considered part of the space occupied by the | |
4185 | argument. It pads only up to PARM_BOUNDARY, and it does not | |
4186 | depend on the previous arguments, since they are assumed to | |
4187 | occupy a multiple of PARM_BOUNDARY. */ | |
4188 | ||
4189 | if (where_pad == downward) | |
4190 | { | |
4191 | if (passed_mode != BLKmode) | |
4192 | { | |
4193 | if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY) | |
4194 | stack_offset.constant | |
4195 | += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1) | |
4196 | / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT) | |
4197 | - GET_MODE_SIZE (passed_mode)); | |
4198 | } | |
4199 | else | |
4200 | { | |
4201 | tree sizetree = size_in_bytes (DECL_ARG_TYPE (parm)); | |
4202 | /* Round the size up to multiple of PARM_BOUNDARY bits. */ | |
4203 | tree s1 = convert_units (sizetree, BITS_PER_UNIT, PARM_BOUNDARY); | |
4204 | tree s2 = convert_units (s1, PARM_BOUNDARY, BITS_PER_UNIT); | |
4205 | /* Add it in. */ | |
4206 | ADD_PARM_SIZE (stack_offset, s2); | |
4207 | SUB_PARM_SIZE (stack_offset, sizetree); | |
4208 | } | |
4209 | } | |
4210 | ||
4211 | /* Let machine desc say which reg (if any) the parm arrives in. | |
4212 | 0 means it arrives on the stack. */ | |
4213 | entry_parm = 0; | |
4214 | /* Variable-size args, and args following such, are never in regs. */ | |
4215 | if (TREE_CODE (TYPE_SIZE (TREE_TYPE (parm))) == INTEGER_CST | |
4216 | || stack_offset.var != 0) | |
4217 | { | |
4218 | /* Set LAST_NAMED if this is last named arg before some | |
4219 | anonymous args. We treat it as if it were anonymous too. */ | |
4220 | int last_named = (TREE_CHAIN (parm) == 0 && vararg); | |
4221 | #ifdef FUNCTION_INCOMING_ARG | |
4222 | entry_parm | |
4223 | = FUNCTION_INCOMING_ARG (args_so_far, passed_mode, | |
4224 | DECL_ARG_TYPE (parm), ! last_named); | |
4225 | #else | |
4226 | entry_parm | |
4227 | = FUNCTION_ARG (args_so_far, passed_mode, DECL_ARG_TYPE (parm), | |
4228 | ! last_named); | |
4229 | #endif | |
4230 | } | |
4231 | ||
4232 | #ifdef REG_PARM_STACK_SPACE | |
4233 | /* If we arrive at a stack parm while still counting space for reg parms, | |
4234 | skip up to the offset for the first stack parm. */ | |
4235 | if (entry_parm == 0 | |
4236 | && stack_args_size.constant + first_parm_caller_offset < 0) | |
4237 | { | |
4238 | int adjustment | |
4239 | = -(stack_args_size.constant + first_parm_caller_offset); | |
4240 | stack_args_size.constant += adjustment; | |
4241 | stack_offset.constant += adjustment; | |
4242 | } | |
4243 | #endif | |
4244 | ||
4245 | stack_offset_rtx = ARGS_SIZE_RTX (stack_offset); | |
4246 | ||
4247 | /* Determine parm's home in the stack, | |
4248 | in case it arrives in the stack or we should pretend it did. */ | |
4249 | /* Note that this is not necessarily a valid address. | |
4250 | We make it valid later when it is used. | |
4251 | It is necessary for the DECL_RTL to be an explicit stack slot, | |
4252 | but not necessary for it to be valid. */ | |
4253 | stack_parm | |
4254 | = gen_rtx (MEM, passed_mode, | |
4255 | gen_rtx (PLUS, Pmode, | |
4256 | internal_arg_pointer, | |
4257 | stack_offset_rtx)); | |
4258 | ||
4259 | /* If this is a memory ref that contains aggregate components, | |
4260 | mark it as such for cse and loop optimize. */ | |
4261 | MEM_IN_STRUCT_P (stack_parm) = aggregate; | |
4262 | ||
4263 | /* If this parm was passed part in regs and part in memory, | |
4264 | pretend it arrived entirely in memory | |
4265 | by pushing the register-part onto the stack. | |
4266 | ||
4267 | In the special case of a DImode or DFmode that is split, | |
4268 | we could put it together in a pseudoreg directly, | |
4269 | but for now that's not worth bothering with. */ | |
4270 | ||
4271 | if (entry_parm) | |
4272 | { | |
4273 | int nregs = 0; | |
4274 | int i; | |
4275 | #ifdef FUNCTION_ARG_PARTIAL_NREGS | |
4276 | nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, passed_mode, | |
4277 | DECL_ARG_TYPE (parm), 1); | |
4278 | #endif | |
4279 | ||
4280 | #if 0 /* Replaced by new calling convention | |
4281 | which actually passes these args on the stack. */ | |
4282 | /* If this is the last named arg and anonymous args follow, | |
4283 | likewise pretend this arg arrived on the stack | |
4284 | so varargs can find the anonymous args following it. */ | |
4285 | if (TREE_CHAIN (parm) == 0 && vararg) | |
4286 | { | |
4287 | if (GET_MODE (entry_parm) == BLKmode) | |
4288 | nregs = GET_MODE_SIZE (GET_MODE (entry_parm)) / UNITS_PER_WORD; | |
4289 | else | |
4290 | nregs = (int_size_in_bytes (DECL_ARG_TYPE (parm)) | |
4291 | / UNITS_PER_WORD); | |
4292 | } | |
4293 | #endif /* 0 */ | |
4294 | ||
4295 | if (nregs > 0) | |
4296 | { | |
4297 | rtx valid_stack_parm = validize_mem (stack_parm); | |
4298 | current_function_pretend_args_size | |
4299 | = (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1) | |
4300 | / (PARM_BOUNDARY / BITS_PER_UNIT) | |
4301 | * (PARM_BOUNDARY / BITS_PER_UNIT)); | |
4302 | ||
4303 | i = nregs; | |
4304 | while (--i >= 0) | |
4305 | emit_move_insn (gen_rtx (MEM, SImode, | |
4306 | plus_constant (XEXP (valid_stack_parm, 0), | |
4307 | i * GET_MODE_SIZE (SImode))), | |
4308 | gen_rtx (REG, SImode, REGNO (entry_parm) + i)); | |
4309 | entry_parm = stack_parm; | |
4310 | } | |
4311 | } | |
4312 | ||
4313 | /* If we didn't decide this parm came in a register, | |
4314 | by default it came on the stack. */ | |
4315 | if (entry_parm == 0) | |
4316 | entry_parm = stack_parm; | |
4317 | ||
4318 | /* For a stack parm, record in DECL_OFFSET the arglist offset | |
4319 | of the parm at the time it is passed (before conversion). */ | |
4320 | if (entry_parm == stack_parm) | |
4321 | DECL_OFFSET (parm) = stack_offset.constant * BITS_PER_UNIT; | |
4322 | ||
4323 | /* If there is actually space on the stack for this parm, | |
4324 | count it in stack_args_size; otherwise set stack_parm to 0 | |
4325 | to indicate there is no preallocated stack slot for the parm. */ | |
4326 | ||
4327 | if (entry_parm == stack_parm | |
4328 | #ifdef REG_PARM_STACK_SPACE | |
4329 | /* On some machines, even if a parm value arrives in a register | |
4330 | there is still an (uninitialized) stack slot allocated for it. */ | |
4331 | || 1 | |
4332 | #endif | |
4333 | ) | |
4334 | { | |
4335 | tree sizetree = size_in_bytes (DECL_ARG_TYPE (parm)); | |
4336 | #ifdef PUSH_ROUNDING | |
4337 | /* If this arg will be pushed with a push instruction, | |
4338 | note how that will add to its size. */ | |
4339 | if (DECL_MODE (parm) != BLKmode) | |
4340 | { | |
4341 | int old_bytes = int_size_in_bytes (DECL_ARG_TYPE (parm)); | |
4342 | sizetree = build_int_2 (PUSH_ROUNDING (old_bytes), 0); | |
4343 | } | |
4344 | #endif | |
4345 | if (where_pad != none) | |
4346 | { | |
4347 | /* Round the size up to multiple of PARM_BOUNDARY bits. */ | |
4348 | tree s1 = convert_units (sizetree, BITS_PER_UNIT, PARM_BOUNDARY); | |
4349 | sizetree = convert_units (s1, PARM_BOUNDARY, BITS_PER_UNIT); | |
4350 | } | |
4351 | /* Add it in. */ | |
4352 | ADD_PARM_SIZE (stack_args_size, sizetree); | |
4353 | } | |
4354 | else | |
4355 | /* No stack slot was pushed for this parm. */ | |
4356 | stack_parm = 0; | |
4357 | ||
4358 | /* Now adjust STACK_PARM to the mode and precise location | |
4359 | where this parameter should live during execution, | |
4360 | if we discover that it must live in the stack during execution. | |
4361 | To make debuggers happier on big-endian machines, we store | |
4362 | the value in the last bytes of the space available. */ | |
4363 | ||
4364 | if (nominal_mode != BLKmode && nominal_mode != passed_mode | |
4365 | && stack_parm != 0) | |
4366 | { | |
4367 | #ifdef BYTES_BIG_ENDIAN | |
4368 | if (GET_MODE_SIZE (nominal_mode) < UNITS_PER_WORD) | |
4369 | { | |
4370 | stack_offset.constant | |
4371 | += GET_MODE_SIZE (passed_mode) | |
4372 | - GET_MODE_SIZE (nominal_mode); | |
4373 | stack_offset_rtx = ARGS_SIZE_RTX (stack_offset); | |
4374 | } | |
4375 | #endif | |
4376 | ||
4377 | stack_parm | |
4378 | = gen_rtx (MEM, nominal_mode, | |
4379 | gen_rtx (PLUS, Pmode, | |
4380 | arg_pointer_rtx, | |
4381 | stack_offset_rtx)); | |
4382 | ||
4383 | /* If this is a memory ref that contains aggregate components, | |
4384 | mark it as such for cse and loop optimize. */ | |
4385 | MEM_IN_STRUCT_P (stack_parm) = aggregate; | |
4386 | } | |
4387 | ||
4388 | /* ENTRY_PARM is an RTX for the parameter as it arrives, | |
4389 | in the mode in which it arrives. | |
4390 | STACK_PARM is an RTX for a stack slot where the parameter can live | |
4391 | during the function (in case we want to put it there). | |
4392 | STACK_PARM is 0 if no stack slot was pushed for it. | |
4393 | ||
4394 | Now output code if necessary to convert ENTRY_PARM to | |
4395 | the type in which this function declares it, | |
4396 | and store that result in an appropriate place, | |
4397 | which may be a pseudo reg, may be STACK_PARM, | |
4398 | or may be a local stack slot if STACK_PARM is 0. | |
4399 | ||
4400 | Set DECL_RTL to that place. */ | |
4401 | ||
4402 | if (nominal_mode == BLKmode) | |
4403 | { | |
4404 | /* If a BLKmode arrives in registers, copy it to a stack slot. */ | |
4405 | if (GET_CODE (entry_parm) == REG) | |
4406 | { | |
4407 | #if 0 /* This was probably wrong, but save it just in case. */ | |
4408 | rtx unpadded_stack_parm; | |
4409 | ||
4410 | /* Determine parm's home in the stack. */ | |
4411 | ||
4412 | if (stack_parm == 0) | |
4413 | unpadded_stack_parm | |
4414 | = assign_stack_local (GET_MODE (entry_parm), | |
4415 | int_size_in_bytes (TREE_TYPE (parm))); | |
4416 | else | |
4417 | unpadded_stack_parm | |
4418 | = gen_rtx (MEM, passed_mode, | |
4419 | memory_address (passed_mode, | |
4420 | gen_rtx (PLUS, Pmode, | |
4421 | internal_arg_pointer, | |
4422 | ARGS_SIZE_RTX (unpadded_stack_offset)))); | |
4423 | ||
4424 | /* Here we use unpadded_stack_parm because we assume | |
4425 | that downward padding is used on big-endian machines | |
4426 | where we would want to make the real data in the reg | |
4427 | (which is in the low bits) end up at the padded address. */ | |
4428 | #endif | |
4429 | if (stack_parm == 0) | |
4430 | stack_parm | |
4431 | = assign_stack_local (GET_MODE (entry_parm), | |
4432 | int_size_in_bytes (TREE_TYPE (parm))); | |
4433 | ||
4434 | move_block_from_reg (REGNO (entry_parm), | |
4435 | validize_mem (stack_parm), | |
4436 | ((int_size_in_bytes (TREE_TYPE (parm)) | |
4437 | + UNITS_PER_WORD - 1) | |
4438 | / UNITS_PER_WORD)); | |
4439 | } | |
4440 | DECL_RTL (parm) = stack_parm; | |
4441 | } | |
4442 | else if (! ((obey_regdecls && ! TREE_REGDECL (parm) | |
4443 | && ! TREE_INLINE (fndecl)) | |
4444 | /* layout_decl may set this. */ | |
4445 | || TREE_ADDRESSABLE (parm) | |
4446 | || TREE_VOLATILE (parm) | |
4447 | /* If -ffloat-store specified, don't put explicit | |
4448 | float variables into registers. */ | |
4449 | || (flag_float_store | |
4450 | && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))) | |
4451 | { | |
4452 | /* Store the parm in a pseudoregister during the function. */ | |
4453 | register rtx parmreg = gen_reg_rtx (nominal_mode); | |
4454 | ||
4455 | REG_USERVAR_P (parmreg) = 1; | |
4456 | DECL_RTL (parm) = parmreg; | |
4457 | ||
4458 | /* Copy the value into the register. */ | |
4459 | if (GET_MODE (parmreg) != GET_MODE (entry_parm)) | |
4460 | convert_move (parmreg, validize_mem (entry_parm), 0); | |
4461 | else | |
4462 | emit_move_insn (parmreg, validize_mem (entry_parm)); | |
4463 | ||
4464 | /* In any case, record the parm's desired stack location | |
4465 | in case we later discover it must live in the stack. */ | |
4466 | if (REGNO (parmreg) >= nparmregs) | |
4467 | { | |
4468 | rtx *new; | |
4469 | nparmregs = REGNO (parmreg) + 5; | |
4470 | new = (rtx *) oballoc (nparmregs * sizeof (rtx)); | |
4471 | bcopy (parm_reg_stack_loc, new, nparmregs * sizeof (rtx)); | |
4472 | parm_reg_stack_loc = new; | |
4473 | } | |
4474 | parm_reg_stack_loc[REGNO (parmreg)] = stack_parm; | |
4475 | ||
4476 | /* Mark the register as eliminable if we did no conversion | |
4477 | and it was copied from memory at a fixed offset, | |
4478 | and the arg pointer was not copied to a pseudo-reg. | |
4479 | If the arg pointer is a pseudo reg, such memory-equivalences | |
4480 | as we make here would screw up life analysis for it. */ | |
4481 | if (nominal_mode == passed_mode | |
4482 | && GET_CODE (entry_parm) == MEM | |
4483 | && stack_offset.var == 0 | |
4484 | && ! arg_pointer_copied) | |
4485 | REG_NOTES (get_last_insn ()) | |
4486 | = gen_rtx (EXPR_LIST, REG_EQUIV, | |
4487 | entry_parm, REG_NOTES (get_last_insn ())); | |
4488 | ||
4489 | /* For pointer data type, suggest pointer register. */ | |
4490 | if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE) | |
4491 | mark_reg_pointer (parmreg); | |
4492 | } | |
4493 | else | |
4494 | { | |
4495 | /* Value must be stored in the stack slot STACK_PARM | |
4496 | during function execution. */ | |
4497 | ||
4498 | if (passed_mode != nominal_mode) | |
4499 | /* Conversion is required. */ | |
4500 | entry_parm = convert_to_mode (nominal_mode, | |
4501 | validize_mem (entry_parm), 0); | |
4502 | ||
4503 | if (entry_parm != stack_parm) | |
4504 | { | |
4505 | if (stack_parm == 0) | |
4506 | stack_parm = assign_stack_local (GET_MODE (entry_parm), | |
4507 | GET_MODE_SIZE (GET_MODE (entry_parm))); | |
4508 | emit_move_insn (validize_mem (stack_parm), | |
4509 | validize_mem (entry_parm)); | |
4510 | } | |
4511 | ||
4512 | DECL_RTL (parm) = stack_parm; | |
4513 | frame_pointer_needed = 1; | |
4514 | } | |
4515 | ||
4516 | if (TREE_VOLATILE (parm)) | |
4517 | MEM_VOLATILE_P (DECL_RTL (parm)) = 1; | |
4518 | if (TREE_READONLY (parm)) | |
4519 | RTX_UNCHANGING_P (DECL_RTL (parm)) = 1; | |
4520 | ||
4521 | /* Update info on where next arg arrives in registers. */ | |
4522 | ||
4523 | FUNCTION_ARG_ADVANCE (args_so_far, passed_mode, DECL_ARG_TYPE (parm), 1); | |
4524 | } | |
4525 | ||
4526 | max_parm_reg = max_reg_num (); | |
4527 | last_parm_insn = get_last_insn (); | |
4528 | ||
4529 | current_function_args_size = stack_args_size.constant; | |
4530 | ||
4531 | stack_args_size.constant += first_parm_offset; | |
4532 | current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size); | |
4533 | } | |
4534 | \f | |
4535 | /* Allocation of space for returned structure values. | |
4536 | During the rtl generation pass, `get_structure_value_addr' | |
4537 | is called from time to time to request the address of a block in our | |
4538 | stack frame in which called functions will store the structures | |
4539 | they are returning. The same space is used for all of these blocks. | |
4540 | ||
4541 | We allocate these blocks like stack locals. We keep reusing | |
4542 | the same block until a bigger one is needed. */ | |
4543 | ||
4544 | /* Length in bytes of largest structure value returned by | |
4545 | any function called so far in this function. */ | |
4546 | static int max_structure_value_size; | |
4547 | ||
4548 | /* An rtx for the addr we are currently using for structure values. | |
4549 | This is typically (PLUS (REG:SI stackptr) (CONST_INT...)). */ | |
4550 | static rtx structure_value; | |
4551 | ||
4552 | rtx | |
4553 | get_structure_value_addr (sizex) | |
4554 | rtx sizex; | |
4555 | { | |
4556 | register int size; | |
4557 | if (GET_CODE (sizex) != CONST_INT) | |
4558 | abort (); | |
4559 | size = INTVAL (sizex); | |
4560 | ||
4561 | /* Round up to a multiple of the main allocation unit. */ | |
4562 | size = (((size + (BIGGEST_ALIGNMENT / BITS_PER_UNIT) - 1) | |
4563 | / (BIGGEST_ALIGNMENT / BITS_PER_UNIT)) | |
4564 | * (BIGGEST_ALIGNMENT / BITS_PER_UNIT)); | |
4565 | ||
4566 | /* If this size is bigger than space we know to use, | |
4567 | get a bigger piece of space. */ | |
4568 | if (size > max_structure_value_size) | |
4569 | { | |
4570 | max_structure_value_size = size; | |
4571 | structure_value = assign_stack_local (BLKmode, size); | |
4572 | if (GET_CODE (structure_value) == MEM) | |
4573 | structure_value = XEXP (structure_value, 0); | |
4574 | } | |
4575 | ||
4576 | return structure_value; | |
4577 | } | |
4578 | ||
4579 | /* Push and pop the current structure value block. */ | |
4580 | ||
4581 | void | |
4582 | push_structure_value (rtx_ptr, size_ptr) | |
4583 | rtx *rtx_ptr; | |
4584 | int *size_ptr; | |
4585 | { | |
4586 | *rtx_ptr = structure_value; | |
4587 | *size_ptr = max_structure_value_size; | |
4588 | max_structure_value_size = 0; | |
4589 | structure_value = 0; | |
4590 | } | |
4591 | ||
4592 | void | |
4593 | pop_structure_value (rtx_value, size) | |
4594 | rtx rtx_value; | |
4595 | int size; | |
4596 | { | |
4597 | structure_value = rtx_value; | |
4598 | max_structure_value_size = size; | |
4599 | } | |
4600 | ||
4601 | \f | |
4602 | /* Walk the tree of LET_STMTs describing the binding levels within a function | |
4603 | and warn about uninitialized variables. | |
4604 | This is done after calling flow_analysis and before global_alloc | |
4605 | clobbers the pseudo-regs to hard regs. */ | |
4606 | ||
4607 | void | |
4608 | uninitialized_vars_warning (block) | |
4609 | tree block; | |
4610 | { | |
4611 | register tree decl, sub; | |
4612 | for (decl = STMT_VARS (block); decl; decl = TREE_CHAIN (decl)) | |
4613 | { | |
4614 | if (TREE_CODE (decl) == VAR_DECL | |
4615 | /* These warnings are unreliable for and aggregates | |
4616 | because assigning the fields one by one can fail to convince | |
4617 | flow.c that the entire aggregate was initialized. | |
4618 | Unions are troublesome because members may be shorter. */ | |
4619 | && TREE_CODE (TREE_TYPE (decl)) != RECORD_TYPE | |
4620 | && TREE_CODE (TREE_TYPE (decl)) != UNION_TYPE | |
4621 | && TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE | |
4622 | && DECL_RTL (decl) != 0 | |
4623 | && GET_CODE (DECL_RTL (decl)) == REG | |
4624 | && regno_uninitialized (REGNO (DECL_RTL (decl)))) | |
4625 | warning_with_decl (decl, | |
4626 | "`%s' may be used uninitialized in this function"); | |
4627 | if (TREE_CODE (decl) == VAR_DECL | |
4628 | && DECL_RTL (decl) != 0 | |
4629 | && GET_CODE (DECL_RTL (decl)) == REG | |
4630 | && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl)))) | |
4631 | warning_with_decl (decl, | |
4632 | "variable `%s' may be clobbered by `longjmp'"); | |
4633 | } | |
4634 | for (sub = STMT_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub)) | |
4635 | uninitialized_vars_warning (sub); | |
4636 | } | |
4637 | ||
4638 | /* If this function call setjmp, put all vars into the stack | |
4639 | unless they were declared `register'. */ | |
4640 | ||
4641 | void | |
4642 | setjmp_protect (block) | |
4643 | tree block; | |
4644 | { | |
4645 | register tree decl, sub; | |
4646 | for (decl = STMT_VARS (block); decl; decl = TREE_CHAIN (decl)) | |
4647 | if ((TREE_CODE (decl) == VAR_DECL | |
4648 | || TREE_CODE (decl) == PARM_DECL) | |
4649 | && DECL_RTL (decl) != 0 | |
4650 | && GET_CODE (DECL_RTL (decl)) == REG | |
4651 | && ! TREE_REGDECL (decl)) | |
4652 | put_var_into_stack (decl); | |
4653 | for (sub = STMT_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub)) | |
4654 | setjmp_protect (sub); | |
4655 | } | |
4656 | \f | |
4657 | /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node) | |
4658 | and initialize static variables for generating RTL for the statements | |
4659 | of the function. */ | |
4660 | ||
4661 | void | |
4662 | init_function_start (subr, filename, line) | |
4663 | tree subr; | |
4664 | char *filename; | |
4665 | int line; | |
4666 | { | |
4667 | this_function = subr; | |
4668 | cse_not_expected = ! optimize; | |
4669 | ||
4670 | /* We have not yet found a reason why a frame pointer cannot | |
4671 | be omitted for this function in particular, but maybe we know | |
4672 | a priori that it is required. | |
4673 | `flag_omit_frame_pointer' has its main effect here. */ | |
4674 | frame_pointer_needed = FRAME_POINTER_REQUIRED || ! flag_omit_frame_pointer; | |
4675 | ||
4676 | /* Caller save not needed yet. */ | |
4677 | caller_save_needed = 0; | |
4678 | ||
4679 | /* No gotos have been expanded yet. */ | |
4680 | goto_fixup_chain = 0; | |
4681 | ||
4682 | /* No stack slots have been made yet. */ | |
4683 | stack_slot_list = 0; | |
4684 | ||
4685 | /* No invalid stack slots have been made yet. */ | |
4686 | invalid_stack_slot = 0; | |
4687 | ||
4688 | /* No parm regs have been allocated. | |
4689 | (This is important for output_inline_function.) */ | |
4690 | max_parm_reg = FIRST_PSEUDO_REGISTER; | |
4691 | ||
4692 | /* Initialize the RTL mechanism. */ | |
4693 | init_emit (write_symbols); | |
4694 | ||
4695 | /* Initialize the queue of pending postincrement and postdecrements, | |
4696 | and some other info in expr.c. */ | |
4697 | init_expr (); | |
4698 | ||
4699 | init_const_rtx_hash_table (); | |
4700 | ||
4701 | /* Decide whether function should try to pop its args on return. */ | |
4702 | ||
4703 | current_function_pops_args = RETURN_POPS_ARGS (TREE_TYPE (subr)); | |
4704 | ||
4705 | current_function_name = DECL_PRINT_NAME (subr); | |
4706 | ||
4707 | /* Nonzero if this is a nested function that uses a static chain. */ | |
4708 | ||
4709 | current_function_needs_context | |
4710 | = (DECL_CONTEXT (current_function_decl) != 0 | |
4711 | && TREE_CODE (DECL_CONTEXT (current_function_decl)) == LET_STMT); | |
4712 | ||
4713 | /* Set if a call to setjmp is seen. */ | |
4714 | ||
4715 | current_function_calls_setjmp = 0; | |
4716 | current_function_calls_alloca = 0; | |
4717 | ||
4718 | current_function_returns_pcc_struct = 0; | |
4719 | current_function_returns_struct = 0; | |
4720 | ||
4721 | /* No space assigned yet for structure values. */ | |
4722 | max_structure_value_size = -1; | |
4723 | structure_value = 0; | |
4724 | ||
4725 | /* We are not currently within any block, conditional, loop or case. */ | |
4726 | block_stack = 0; | |
4727 | loop_stack = 0; | |
4728 | case_stack = 0; | |
4729 | cond_stack = 0; | |
4730 | nesting_stack = 0; | |
4731 | nesting_depth = 0; | |
4732 | ||
4733 | block_start_count = 0; | |
4734 | ||
4735 | /* We have not yet needed to make a label to jump to for tail-recursion. */ | |
4736 | tail_recursion_label = 0; | |
4737 | ||
4738 | /* No stack slots allocated yet. */ | |
4739 | frame_offset = STARTING_FRAME_OFFSET; | |
4740 | ||
4741 | /* No SAVE_EXPRs in this function yet. */ | |
4742 | save_expr_regs = 0; | |
4743 | ||
4744 | /* No RTL_EXPRs in this function yet. */ | |
4745 | rtl_expr_chain = 0; | |
4746 | ||
4747 | /* Within function body, compute a type's size as soon it is laid out. */ | |
4748 | immediate_size_expand++; | |
4749 | ||
4750 | init_pending_stack_adjust (); | |
4751 | inhibit_defer_pop = 0; | |
4752 | current_function_pretend_args_size = 0; | |
4753 | ||
4754 | /* Prevent ever trying to delete the first instruction of a function. | |
4755 | Also tell final how to output a linenum before the function prologue. */ | |
4756 | emit_line_note (filename, line); | |
4757 | /* Make sure first insn is a note even if we don't want linenums. | |
4758 | This makes sure the first insn will never be deleted. | |
4759 | Also, final expects a note to appear there. */ | |
4760 | emit_note (0, NOTE_INSN_DELETED); | |
4761 | /* Indicate the beginning of the function body, | |
4762 | as opposed to parm setup. */ | |
4763 | emit_note (0, NOTE_INSN_FUNCTION_BEG); | |
4764 | ||
4765 | /* Set flags used by final.c. */ | |
4766 | if (aggregate_value_p (DECL_RESULT (subr))) | |
4767 | { | |
4768 | #ifdef PCC_STATIC_STRUCT_RETURN | |
4769 | if (flag_pcc_struct_return) | |
4770 | current_function_returns_pcc_struct = 1; | |
4771 | else | |
4772 | #endif | |
4773 | current_function_returns_struct = 1; | |
4774 | } | |
4775 | } | |
4776 | ||
4777 | /* Start the RTL for a new function, and set variables used for | |
4778 | emitting RTL. | |
4779 | SUBR is the FUNCTION_DECL node. | |
4780 | PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with | |
4781 | the function's parameters, which must be run at any return statement. */ | |
4782 | ||
4783 | void | |
4784 | expand_function_start (subr, parms_have_cleanups) | |
4785 | tree subr; | |
4786 | int parms_have_cleanups; | |
4787 | { | |
4788 | register int i; | |
4789 | tree tem; | |
4790 | ||
4791 | /* Make sure volatile mem refs aren't considered | |
4792 | valid operands of arithmetic insns. */ | |
4793 | init_recog (); | |
4794 | ||
4795 | /* If the parameters of this function need cleaning up, get a label | |
4796 | for the beginning of the code which executes those cleanups. This must | |
4797 | be done before doing anything with return_label. */ | |
4798 | if (parms_have_cleanups) | |
4799 | cleanup_label = gen_label_rtx (); | |
4800 | else | |
4801 | cleanup_label = 0; | |
4802 | ||
4803 | /* Make the label for return statements to jump to, if this machine | |
4804 | does not have a one-instruction return and uses an epilogue, | |
4805 | or if it returns a structure, or if it has parm cleanups. */ | |
4806 | #ifdef HAVE_return | |
4807 | if (cleanup_label == 0 && HAVE_return | |
4808 | && ! current_function_returns_pcc_struct | |
4809 | && ! (current_function_returns_struct && ! optimize)) | |
4810 | return_label = 0; | |
4811 | else | |
4812 | return_label = gen_label_rtx (); | |
4813 | #else | |
4814 | return_label = gen_label_rtx (); | |
4815 | #endif | |
4816 | ||
4817 | /* Initialize rtx used to return the value. */ | |
4818 | /* Do this before assign_parms so that we copy the struct value address | |
4819 | before any library calls that assign parms might generate. */ | |
4820 | ||
4821 | /* Decide whether to return the value in memory or in a register. */ | |
4822 | if (aggregate_value_p (DECL_RESULT (subr))) | |
4823 | { | |
4824 | /* Returning something that won't go in a register. */ | |
4825 | register rtx value_address; | |
4826 | ||
4827 | #ifdef PCC_STATIC_STRUCT_RETURN | |
4828 | if (flag_pcc_struct_return) | |
4829 | { | |
4830 | int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr))); | |
4831 | value_address = assemble_static_space (size); | |
4832 | current_function_returns_pcc_struct = 1; | |
4833 | } | |
4834 | else | |
4835 | #endif | |
4836 | { | |
4837 | /* Expect to be passed the address of a place to store the value. */ | |
4838 | value_address = gen_reg_rtx (Pmode); | |
4839 | emit_move_insn (value_address, struct_value_incoming_rtx); | |
4840 | current_function_returns_struct = 1; | |
4841 | } | |
4842 | DECL_RTL (DECL_RESULT (subr)) | |
4843 | = gen_rtx (MEM, DECL_MODE (DECL_RESULT (subr)), | |
4844 | value_address); | |
4845 | } | |
4846 | else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode) | |
4847 | /* If return mode is void, this decl rtl should not be used. */ | |
4848 | DECL_RTL (DECL_RESULT (subr)) = 0; | |
4849 | else if (parms_have_cleanups) | |
4850 | /* If function will end with cleanup code for parms, | |
4851 | compute the return values into a pseudo reg, | |
4852 | which we will copy into the true return register | |
4853 | after the cleanups are done. */ | |
4854 | DECL_RTL (DECL_RESULT (subr)) | |
4855 | = gen_reg_rtx (DECL_MODE (DECL_RESULT (subr))); | |
4856 | else | |
4857 | /* Scalar, returned in a register. */ | |
4858 | { | |
4859 | #ifdef FUNCTION_OUTGOING_VALUE | |
4860 | DECL_RTL (DECL_RESULT (subr)) | |
4861 | = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr); | |
4862 | #else | |
4863 | DECL_RTL (DECL_RESULT (subr)) | |
4864 | = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (subr)), subr); | |
4865 | #endif | |
4866 | ||
4867 | current_function_returns_pointer | |
4868 | = (TREE_CODE (DECL_RESULT_TYPE (subr)) == POINTER_TYPE); | |
4869 | ||
4870 | /* Mark this reg as the function's return value. */ | |
4871 | if (GET_CODE (DECL_RTL (DECL_RESULT (subr))) == REG) | |
4872 | REG_FUNCTION_VALUE_P (DECL_RTL (DECL_RESULT (subr))) = 1; | |
4873 | } | |
4874 | ||
4875 | /* Initialize rtx for parameters and local variables. | |
4876 | In some cases this requires emitting insns. */ | |
4877 | ||
4878 | assign_parms (subr); | |
4879 | ||
4880 | /* If doing stupid allocation, mark parms as born here. */ | |
4881 | ||
4882 | if (GET_CODE (get_last_insn ()) != NOTE) | |
4883 | emit_note (0, NOTE_INSN_DELETED); | |
4884 | parm_birth_insn = get_last_insn (); | |
4885 | ||
4886 | if (obey_regdecls) | |
4887 | { | |
4888 | for (i = FIRST_PSEUDO_REGISTER; i < max_parm_reg; i++) | |
4889 | use_variable (regno_reg_rtx[i]); | |
4890 | } | |
4891 | ||
4892 | /* After the parm initializations is where the tail-recursion label | |
4893 | should go, if we end up needing one. */ | |
4894 | tail_recursion_reentry = get_last_insn (); | |
4895 | ||
4896 | /* Evaluate now the sizes of any types declared among the arguments. */ | |
4897 | for (tem = get_pending_sizes (); tem; tem = TREE_CHAIN (tem)) | |
4898 | expand_expr (TREE_VALUE (tem), 0, VOIDmode, 0); | |
4899 | ||
4900 | /* Make sure there is a line number after the function entry setup code. | |
4901 | There normally is one anyway, from the following statement, | |
4902 | but there could fail to be one if there is no newline here. */ | |
4903 | force_next_line_note (); | |
4904 | } | |
4905 | ||
4906 | /* Generate RTL for the end of the current function. | |
4907 | FILENAME and LINE are the current position in the source file. */ | |
4908 | ||
4909 | /* ??? Nobody seems to emit the cleanup_label and the cleanups themselves. */ | |
4910 | ||
4911 | void | |
4912 | expand_function_end (filename, line) | |
4913 | char *filename; | |
4914 | int line; | |
4915 | { | |
4916 | register int i; | |
4917 | rtx decl; | |
4918 | extern rtx sequence_stack; | |
4919 | ||
4920 | #if 0 /* I think unused parms are legitimate enough. */ | |
4921 | /* Warn about unused parms. */ | |
4922 | if (warn_unused) | |
4923 | for (decl = DECL_ARGUMENTS (current_function_decl); | |
4924 | decl; decl = TREE_CHAIN (decl)) | |
4925 | if (! TREE_USED (decl) && TREE_CODE (decl) == VAR_DECL) | |
4926 | warning_with_decl (decl, "unused parameter `%s'"); | |
4927 | #endif | |
4928 | ||
4929 | /* End any sequences that failed to be closed due to syntax errors. */ | |
4930 | while (sequence_stack) | |
4931 | end_sequence (0); | |
4932 | ||
4933 | /* Outside function body, can't compute type's actual size | |
4934 | until next function's body starts. */ | |
4935 | immediate_size_expand--; | |
4936 | ||
4937 | /* If doing stupid register allocation, | |
4938 | mark register parms as dying here. */ | |
4939 | ||
4940 | if (obey_regdecls) | |
4941 | { | |
4942 | rtx tem; | |
4943 | for (i = FIRST_PSEUDO_REGISTER; i < max_parm_reg; i++) | |
4944 | use_variable (regno_reg_rtx[i]); | |
4945 | ||
4946 | /* Likewise for the regs of all the SAVE_EXPRs in the function. */ | |
4947 | ||
4948 | for (tem = save_expr_regs; tem; tem = XEXP (tem, 1)) | |
4949 | { | |
4950 | /* ??? Tiemann thinks this does not work. */ | |
4951 | use_variable (XEXP (tem, 0)); | |
4952 | use_variable_after (XEXP (tem, 0), parm_birth_insn); | |
4953 | } | |
4954 | } | |
4955 | ||
4956 | clear_pending_stack_adjust (); | |
4957 | do_pending_stack_adjust (); | |
4958 | ||
4959 | /* Mark the end of the function body. | |
4960 | If control reaches this insn, the function can drop through | |
4961 | without returning a value. */ | |
4962 | emit_note (0, NOTE_INSN_FUNCTION_END); | |
4963 | ||
4964 | /* Output a linenumber for the end of the function. | |
4965 | SDB depends on this. */ | |
4966 | emit_line_note_force (filename, line); | |
4967 | ||
4968 | /* Output the label for the actual return from the function, | |
4969 | if one is expected. This happens either because a function epilogue | |
4970 | is used instead of a return instruction, or because a return was done | |
4971 | with a goto in order to run local cleanups, or because of pcc-style | |
4972 | structure returning. */ | |
4973 | ||
4974 | if (return_label) | |
4975 | emit_label (return_label); | |
4976 | ||
4977 | /* If we had calls to alloca, and this machine needs | |
4978 | an accurate stack pointer to exit the function, | |
4979 | insert some code to save and restore the stack pointer. */ | |
4980 | #ifdef EXIT_IGNORE_STACK | |
4981 | if (! EXIT_IGNORE_STACK) | |
4982 | #endif | |
4983 | if (current_function_calls_alloca) | |
4984 | { | |
4985 | rtx tem = gen_reg_rtx (Pmode); | |
4986 | emit_insn_after (gen_rtx (SET, VOIDmode, tem, stack_pointer_rtx), | |
4987 | parm_birth_insn); | |
4988 | emit_insn (gen_rtx (SET, VOIDmode, stack_pointer_rtx, tem)); | |
4989 | } | |
4990 | ||
4991 | /* If scalar return value was computed in a pseudo-reg, | |
4992 | copy that to the hard return register. */ | |
4993 | if (DECL_RTL (DECL_RESULT (current_function_decl)) != 0 | |
4994 | && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG | |
4995 | && (REGNO (DECL_RTL (DECL_RESULT (current_function_decl))) | |
4996 | >= FIRST_PSEUDO_REGISTER)) | |
4997 | { | |
4998 | rtx real_decl_result; | |
4999 | ||
5000 | #ifdef FUNCTION_OUTGOING_VALUE | |
5001 | real_decl_result | |
5002 | = FUNCTION_OUTGOING_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)), | |
5003 | current_function_decl); | |
5004 | #else | |
5005 | real_decl_result | |
5006 | = FUNCTION_VALUE (TREE_TYPE (DECL_RESULT (current_function_decl)), | |
5007 | current_function_decl); | |
5008 | #endif | |
5009 | REG_FUNCTION_VALUE_P (real_decl_result) = 1; | |
5010 | emit_move_insn (real_decl_result, | |
5011 | DECL_RTL (DECL_RESULT (current_function_decl))); | |
5012 | emit_insn (gen_rtx (USE, VOIDmode, real_decl_result)); | |
5013 | } | |
5014 | ||
5015 | /* If returning a structure, arrange to return the address of the value | |
5016 | in a place where debuggers expect to find it. */ | |
5017 | /* If returning a structure PCC style, | |
5018 | the caller also depends on this value. | |
5019 | And current_function_returns_pcc_struct is not necessarily set. */ | |
5020 | if (current_function_returns_struct | |
5021 | || current_function_returns_pcc_struct) | |
5022 | { | |
5023 | rtx value_address = XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0); | |
5024 | tree type = TREE_TYPE (DECL_RESULT (current_function_decl)); | |
5025 | #ifdef FUNCTION_OUTGOING_VALUE | |
5026 | rtx outgoing | |
5027 | = FUNCTION_OUTGOING_VALUE (build_pointer_type (type), | |
5028 | current_function_decl); | |
5029 | #else | |
5030 | rtx outgoing | |
5031 | = hard_function_value (build_pointer_type (type), | |
5032 | current_function_decl); | |
5033 | #endif | |
5034 | ||
5035 | REG_FUNCTION_VALUE_P (outgoing) = 1; | |
5036 | emit_move_insn (outgoing, value_address); | |
5037 | use_variable (outgoing); | |
5038 | } | |
5039 | ||
5040 | /* Output a return insn if we are using one. | |
5041 | Otherwise, let the rtl chain end here, to drop through | |
5042 | into the epilogue. */ | |
5043 | ||
5044 | #ifdef HAVE_return | |
5045 | if (HAVE_return) | |
5046 | emit_jump_insn (gen_return ()); | |
5047 | #endif | |
5048 | ||
5049 | /* Fix up any gotos that jumped out to the outermost | |
5050 | binding level of the function. | |
5051 | Must follow emitting RETURN_LABEL. */ | |
5052 | ||
5053 | /* If you have any cleanups to do at this point, | |
5054 | and they need to create temporary variables, | |
5055 | then you will lose. */ | |
5056 | fixup_gotos (0, 0, 0, get_insns (), 0); | |
5057 | } |