Add diclaimer of copyright to _osname() manual page.
[unix-history] / gnu / usr.bin / gcc1 / cc1 / integrate.c
CommitLineData
15637ed4
RG
1/* Procedure integration for GNU CC.
2 Copyright (C) 1988 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@mcc.com)
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 1, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
19the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
20
21
22#include <stdio.h>
23
24#include "config.h"
25#include "rtl.h"
26#include "tree.h"
27#include "flags.h"
28#include "insn-flags.h"
29#include "expr.h"
30
31#include "obstack.h"
32#define obstack_chunk_alloc xmalloc
33#define obstack_chunk_free free
34extern int xmalloc ();
35extern void free ();
36
37extern struct obstack permanent_obstack, maybepermanent_obstack;
38extern struct obstack *rtl_obstack, *saveable_obstack, *current_obstack;
39
40extern rtx stack_slot_list;
41
42#define MIN(x,y) ((x < y) ? x : y)
43
44extern tree pushdecl ();
45extern tree poplevel ();
46
47/* Default max number of insns a function can have and still be inline.
48 This is overridden on RISC machines. */
49#ifndef INTEGRATE_THRESHOLD
50#define INTEGRATE_THRESHOLD(DECL) \
51 (8 * (8 + list_length (DECL_ARGUMENTS (DECL))))
52#endif
53\f
54/* This is the target of the inline function being expanded,
55 or NULL if there is none. */
56static rtx inline_target;
57
58/* We must take special care not to disrupt life too severely
59 when performing procedure integration. One thing that that
60 involves is not creating illegitimate address which reload
61 cannot fix. Since we don't know what the frame pointer is
62 not capable of (in a machine independent way), we create
63 a pseudo-frame pointer which will have to do for now. */
64static rtx inline_fp_rtx;
65
66/* Convert old frame-pointer offsets to new. Parameters which only
67 produce values (no addresses, and are never assigned), map directly
68 to the pseudo-reg of the incoming value. Parameters that are
69 assigned to but do not have their address taken are given a fresh
70 pseudo-register. Parameters that have their address take are
71 given a fresh stack-slot. */
72static rtx *parm_map;
73
74/* ?? Should this be done here?? It is not right now.
75 Keep track of whether a given pseudo-register is the sum
76 of the frame pointer and a const_int (or zero). */
77static char *fp_addr_p;
78
79/* For the local variables of the procdure being integrated that live
80 on the frame, FRAME_POINTER_DELTA says how much to change their
81 offsets by, so that they now live in the correct place on the
82 frame of the function being compiled. */
83static int fp_delta;
84
85/* When an insn is being copied by copy_rtx_and_substitute,
86 this is nonzero if we have copied an ASM_OPERANDS.
87 In that case, it is the original input-operand vector.
88 Likewise in copy_for_inline. */
89static rtvec orig_asm_operands_vector;
90
91/* When an insn is being copied by copy_rtx_and_substitute,
92 this is nonzero if we have copied an ASM_OPERANDS.
93 In that case, it is the copied input-operand vector.
94 Likewise in copy_for_inline. */
95static rtvec copy_asm_operands_vector;
96
97/* Likewise, this is the copied constraints vector. */
98static rtvec copy_asm_constraints_vector;
99
100/* Return a copy of an rtx (as needed), substituting pseudo-register,
101 labels, and frame-pointer offsets as necessary. */
102static rtx copy_rtx_and_substitute ();
103/* Variant, used for memory addresses that are not memory_address_p. */
104static rtx copy_address ();
105
106/* Return the rtx corresponding to a given index in the stack arguments. */
107static rtx access_parm_map ();
108
109static void copy_parm_decls ();
110static void copy_decl_tree ();
111
112static rtx try_fold_cc0 ();
113
114/* We do some simple constant folding optimization. This optimization
115 really exists primarily to save time inlining a function. It
116 also helps users who ask for inline functions without -O. */
117static rtx fold_out_const_cc0 ();
118\f
119/* Zero if the current function (whose FUNCTION_DECL is FNDECL)
120 is safe and reasonable to integrate into other functions.
121 Nonzero means value is a warning message with a single %s
122 for the function's name. */
123
124char *
125function_cannot_inline_p (fndecl)
126 register tree fndecl;
127{
128 register rtx insn;
129 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
130 int max_insns = INTEGRATE_THRESHOLD (fndecl);
131 register int ninsns = 0;
132 register tree parms;
133
134 /* No inlines with varargs. `grokdeclarator' gives a warning
135 message about that if `inline' is specified. This code
136 it put in to catch the volunteers. */
137 if (last && TREE_VALUE (last) != void_type_node)
138 return "varargs function cannot be inline";
139
140 if (current_function_calls_alloca)
141 return "function using alloca cannot be inline";
142
143 /* If its not even close, don't even look. */
144 if (!TREE_INLINE (fndecl) && get_max_uid () > 3 * max_insns)
145 return "function too large to be inline";
146
147 /* We can't inline functions that return structures
148 the old-fashioned PCC way, copying into a static block. */
149#ifdef PCC_STATIC_STRUCT_RETURN
150 if (flag_pcc_struct_return
151 && (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
152 || RETURN_IN_MEMORY (TREE_TYPE (TREE_TYPE (fndecl)))))
153 return "inline functions not supported for this return value type";
154#endif
155
156 /* Don't inline functions which have BLKmode arguments.
157 Don't inline functions that take the address of
158 a parameter and do not specify a function prototype. */
159 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
160 {
161 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
162 return "function with large aggregate parameter cannot be inline";
163 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
164 return "no prototype, and parameter address used; cannot be inline";
165 /* If an aggregate is thought of as "in memory"
166 then its components are referred to by narrower memory refs.
167 If the actual parameter is a reg, these refs can't be translated,
168 esp. since copy_rtx_and_substitute doesn't know whether it is
169 reading or writing. */
170 if ((TREE_CODE (TREE_TYPE (parms)) == RECORD_TYPE
171 || TREE_CODE (TREE_TYPE (parms)) == UNION_TYPE)
172 && GET_CODE (DECL_RTL (parms)) == MEM)
173 return "address of an aggregate parameter is used; cannot be inline";
174 }
175
176 if (!TREE_INLINE (fndecl) && get_max_uid () > max_insns)
177 {
178 for (ninsns = 0, insn = get_first_nonparm_insn (); insn && ninsns < max_insns;
179 insn = NEXT_INSN (insn))
180 {
181 if (GET_CODE (insn) == INSN
182 || GET_CODE (insn) == JUMP_INSN
183 || GET_CODE (insn) == CALL_INSN)
184 ninsns++;
185 }
186
187 if (ninsns >= max_insns)
188 return "function too large to be inline";
189 }
190
191 return 0;
192}
193\f
194/* Variables used within save_for_inline. */
195
196/* Mapping from old pesudo-register to new pseudo-registers.
197 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
198 It is allocated in `save_for_inline' and `expand_inline_function',
199 and deallocated on exit from each of those routines. */
200static rtx *reg_map;
201
202/* Mapping from old code-labels to new code-labels.
203 The first element of this map is label_map[min_labelno].
204 It is allocated in `save_for_inline' and `expand_inline_function',
205 and deallocated on exit from each of those routines. */
206static rtx *label_map;
207
208/* Mapping from old insn uid's to copied insns.
209 It is allocated in `save_for_inline' and `expand_inline_function',
210 and deallocated on exit from each of those routines. */
211static rtx *insn_map;
212
213/* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
214 Zero for a reg that isn't a parm's home.
215 Only reg numbers less than max_parm_reg are mapped here. */
216static tree *parmdecl_map;
217
218/* Keep track of first pseudo-register beyond those that are parms. */
219static int max_parm_reg;
220
221/* Offset from arg ptr to the first parm of this inline function. */
222static int first_parm_offset;
223
224/* On machines that perform a function return with a single
225 instruction, such as the VAX, these return insns must be
226 mapped into branch statements. */
227extern rtx return_label;
228
229/* Copy an rtx for save_for_inline. */
230static rtx copy_for_inline ();
231\f
232/* Make the insns and PARM_DECLs of the current function permanent
233 and record other information in DECL_SAVED_INSNS to allow inlining
234 of this function in subsequent calls. */
235
236void
237save_for_inline (fndecl)
238 tree fndecl;
239{
240 extern rtx *regno_reg_rtx; /* in emit-rtl.c. */
241 extern current_function_args_size;
242
243 rtx first_insn, last_insn, insn;
244 rtx head, copy;
245 tree parms;
246 int max_labelno, min_labelno, i, len;
247 int max_reg;
248 int max_uid;
249
250 /* Make and emit a return-label if we have not already done so. */
251
252 if (return_label == 0)
253 {
254 return_label = gen_label_rtx ();
255 emit_label (return_label);
256 }
257
258 /* Get some bounds on the labels and registers used. */
259
260 max_labelno = max_label_num ();
261 min_labelno = get_first_label_num ();
262 max_parm_reg = max_parm_reg_num ();
263 max_reg = max_reg_num ();
264
265 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
266
267 Set TREE_VOLATILE to 0 if the parm is in a register, otherwise 1.
268 Later we set TREE_READONLY to 0 if the parm is modified inside the fn. */
269
270 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
271 bzero (parmdecl_map, max_parm_reg * sizeof (tree));
272
273 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
274 {
275 rtx p = DECL_RTL (parms);
276
277 if (GET_CODE (p) == REG)
278 {
279 parmdecl_map[REGNO (p)] = parms;
280 TREE_VOLATILE (parms) = 0;
281 }
282 else
283 TREE_VOLATILE (parms) = 1;
284 TREE_READONLY (parms) = 1;
285 }
286
287 /* The list of DECL_SAVES_INSNS, starts off with a header which
288 contains the following information:
289
290 the first insn of the function (not including the insns that copy
291 parameters into registers).
292 the first label used by that function,
293 the last label used by that function,
294 and the total number of registers used. */
295
296 head = gen_inline_header_rtx (NULL, NULL, min_labelno, max_labelno,
297 max_parm_reg, max_reg,
298 current_function_args_size, stack_slot_list);
299 max_uid = INSN_UID (head);
300
301 /* We have now allocated all that needs to be allocated permanently
302 on the rtx obstack. Set our high-water mark, so that we
303 can free the rest of this when the time comes. */
304
305 preserve_data ();
306
307 /* Copy the chain insns of this function.
308 Install the copied chain as the insns of this function,
309 for continued compilation;
310 the original chain is recorded as the DECL_SAVED_INSNS
311 for inlining future calls. */
312
313 /* If there are insns that copy parms from the stack into pseudo registers,
314 those insns are not copied. `expand_inline_function' must
315 emit the correct code to handle such things. */
316
317 insn = get_insns ();
318 if (GET_CODE (insn) != NOTE)
319 abort ();
320 first_insn = rtx_alloc (NOTE);
321 NOTE_SOURCE_FILE (first_insn) = NOTE_SOURCE_FILE (insn);
322 NOTE_LINE_NUMBER (first_insn) = NOTE_LINE_NUMBER (insn);
323 INSN_UID (first_insn) = INSN_UID (insn);
324 PREV_INSN (first_insn) = NULL;
325 NEXT_INSN (first_insn) = NULL;
326 last_insn = first_insn;
327
328 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
329 Make these new rtx's now, and install them in regno_reg_rtx, so they
330 will be the official pseudo-reg rtx's for the rest of compilation. */
331
332 reg_map = (rtx *) alloca ((max_reg + 1) * sizeof (rtx));
333
334 len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion);
335 for (i = max_reg - 1; i >= FIRST_PSEUDO_REGISTER; i--)
336 reg_map[i] = (rtx)obstack_copy (&maybepermanent_obstack, regno_reg_rtx[i], len);
337 bcopy (reg_map + FIRST_PSEUDO_REGISTER,
338 regno_reg_rtx + FIRST_PSEUDO_REGISTER,
339 (max_reg - FIRST_PSEUDO_REGISTER) * sizeof (rtx));
340
341 /* Likewise each label rtx must have a unique rtx as its copy. */
342
343 label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
344 label_map -= min_labelno;
345
346 for (i = min_labelno; i < max_labelno; i++)
347 label_map[i] = gen_label_rtx ();
348
349 /* Record the mapping of old insns to copied insns. */
350
351 insn_map = (rtx *) alloca (max_uid * sizeof (rtx));
352 bzero (insn_map, max_uid * sizeof (rtx));
353
354 /* Now copy the chain of insns. */
355
356 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
357 {
358 orig_asm_operands_vector = 0;
359 copy_asm_operands_vector = 0;
360
361 switch (GET_CODE (insn))
362 {
363 case NOTE:
364 /* No need to keep these. */
365 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
366 continue;
367
368 copy = rtx_alloc (NOTE);
369 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
370 NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn);
371 break;
372
373 case INSN:
374 case CALL_INSN:
375 case JUMP_INSN:
376 copy = rtx_alloc (GET_CODE (insn));
377 PATTERN (copy) = copy_for_inline (PATTERN (insn));
378 INSN_CODE (copy) = -1;
379 LOG_LINKS (copy) = NULL;
380 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
381 break;
382
383 case CODE_LABEL:
384 copy = label_map[CODE_LABEL_NUMBER (insn)];
385 break;
386
387 case BARRIER:
388 copy = rtx_alloc (BARRIER);
389 break;
390
391 default:
392 abort ();
393 }
394 INSN_UID (copy) = INSN_UID (insn);
395 insn_map[INSN_UID (insn)] = copy;
396 NEXT_INSN (last_insn) = copy;
397 PREV_INSN (copy) = last_insn;
398 last_insn = copy;
399 }
400
401 /* Now copy the reg notes of the insns.
402 Do this now because there can be forward references. */
403 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
404 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
405 || GET_CODE (insn) == CALL_INSN)
406 {
407 rtx copy = insn_map[INSN_UID (insn)];
408 REG_NOTES (copy) = copy_for_inline (REG_NOTES (insn));
409 }
410
411 NEXT_INSN (last_insn) = NULL;
412
413 NEXT_INSN (head) = get_first_nonparm_insn ();
414 FIRST_PARM_INSN (head) = get_insns ();
415 DECL_SAVED_INSNS (fndecl) = head;
416 DECL_FRAME_SIZE (fndecl) = get_frame_size ();
417 TREE_INLINE (fndecl) = 1;
418
419 parmdecl_map = 0;
420 label_map = 0;
421 reg_map = 0;
422 return_label = 0;
423
424 set_new_first_and_last_insn (first_insn, last_insn);
425}
426\f
427/* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
428 according to `reg_map' and `label_map'.
429 All other kinds of rtx are copied except those that can never be
430 changed during compilation. */
431
432static rtx
433copy_for_inline (orig)
434 rtx orig;
435{
436 register rtx x = orig;
437 register int i;
438 register enum rtx_code code;
439 register char *format_ptr;
440
441 if (x == 0)
442 return x;
443
444 code = GET_CODE (x);
445
446 /* These types may be freely shared. */
447
448 switch (code)
449 {
450 case QUEUED:
451 case CONST_INT:
452 case CONST_DOUBLE:
453 case SYMBOL_REF:
454 case PC:
455 case CC0:
456 return x;
457
458 case ASM_OPERANDS:
459 /* If a single asm insn contains multiple output operands
460 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
461 We must make sure that the copied insn continues to share it. */
462 if (orig_asm_operands_vector == XVEC (orig, 3))
463 {
464 x = rtx_alloc (ASM_OPERANDS);
465 XSTR (x, 0) = XSTR (orig, 0);
466 XSTR (x, 1) = XSTR (orig, 1);
467 XINT (x, 2) = XINT (orig, 2);
468 XVEC (x, 3) = copy_asm_operands_vector;
469 XVEC (x, 4) = copy_asm_constraints_vector;
470 XSTR (x, 5) = XSTR (orig, 5);
471 XINT (x, 6) = XINT (orig, 6);
472 return x;
473 }
474 break;
475
476 case MEM:
477 /* A MEM is allowed to be shared if its address is constant
478 or is a constant plus one of the special registers. */
479 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
480 return x;
481#if 0 /* This is turned off because it is possible for
482 unshare_all_rtl to copy the address, into memory that won't be saved.
483 Although the MEM can safely be shared, and won't be copied there,
484 the address itself cannot be shared, and may need to be copied. */
485 if (GET_CODE (XEXP (x, 0)) == PLUS
486 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
487 && (REGNO (XEXP (XEXP (x, 0), 0)) == FRAME_POINTER_REGNUM
488 || REGNO (XEXP (XEXP (x, 0), 0)) == ARG_POINTER_REGNUM)
489 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
490#if 0
491 /* This statement was accidentally deleted in the remote past.
492 Reinsert it for 1.37. Don't take the risk now. */
493 return x;
494#endif
495 if (GET_CODE (XEXP (x, 0)) == REG
496 && (REGNO (XEXP (x, 0)) == FRAME_POINTER_REGNUM
497 || REGNO (XEXP (x, 0)) == ARG_POINTER_REGNUM)
498 && CONSTANT_ADDRESS_P (XEXP (x, 1)))
499 return x;
500#endif /* 0 */
501 break;
502
503 case LABEL_REF:
504 {
505 /* Must point to the new insn. */
506 return gen_rtx (LABEL_REF, GET_MODE (orig),
507 label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
508 }
509
510 case REG:
511 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
512 return reg_map [REGNO (x)];
513 else
514 return x;
515
516 /* If a parm that gets modified lives in a pseudo-reg,
517 set its TREE_VOLATILE to prevent certain optimizations. */
518 case SET:
519 {
520 rtx dest = SET_DEST (x);
521
522 if (GET_CODE (dest) == REG
523 && REGNO (dest) < max_parm_reg
524 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
525 && parmdecl_map[REGNO (dest)] != 0)
526 TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
527 }
528 break;
529 }
530
531 /* Replace this rtx with a copy of itself. */
532
533 x = rtx_alloc (code);
534 bcopy (orig, x, (sizeof (*x) - sizeof (x->fld)
535 + sizeof (x->fld[0]) * GET_RTX_LENGTH (code)));
536
537 /* Now scan the subexpressions recursively.
538 We can store any replaced subexpressions directly into X
539 since we know X is not shared! Any vectors in X
540 must be copied if X was copied. */
541
542 format_ptr = GET_RTX_FORMAT (code);
543
544 for (i = 0; i < GET_RTX_LENGTH (code); i++)
545 {
546 switch (*format_ptr++)
547 {
548 case 'e':
549 XEXP (x, i) = copy_for_inline (XEXP (x, i));
550 break;
551
552 case 'u':
553 /* Change any references to old-insns to point to the
554 corresponding copied insns. */
555 XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))];
556 break;
557
558 case 'E':
559 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
560 {
561 register int j;
562
563 XVEC (x, i) = gen_rtvec_v (XVECLEN (x, i), &XVECEXP (x, i, 0));
564 for (j = 0; j < XVECLEN (x, i); j++)
565 XVECEXP (x, i, j)
566 = copy_for_inline (XVECEXP (x, i, j));
567 }
568 break;
569 }
570 }
571
572 if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
573 {
574 orig_asm_operands_vector = XVEC (orig, 3);
575 copy_asm_operands_vector = XVEC (x, 3);
576 copy_asm_constraints_vector = XVEC (x, 4);
577 }
578
579 return x;
580}
581\f
582/* Integrate the procedure defined by FNDECL. Note that this function
583 may wind up calling itself. Since the static variables are not
584 reentrant, we do not assign them until after the possibility
585 or recursion is eliminated.
586
587 If IGNORE is nonzero, do not produce a value.
588 Otherwise store the value in TARGET if it is nonzero and that is convenient.
589
590 Value is:
591 (rtx)-1 if we could not substitute the function
592 0 if we substituted it and it does not produce a value
593 else an rtx for where the value is stored. */
594
595rtx
596expand_inline_function (fndecl, parms, target, ignore, type, structure_value_addr)
597 tree fndecl, parms;
598 rtx target;
599 int ignore;
600 tree type;
601 rtx structure_value_addr;
602{
603 tree formal, actual;
604 rtx header = DECL_SAVED_INSNS (fndecl);
605 rtx insns = FIRST_FUNCTION_INSN (header);
606 rtx parm_insns = FIRST_PARM_INSN (header);
607 rtx insn;
608 int max_regno = MAX_REGNUM (header) + 1;
609 register int i;
610 int min_labelno = FIRST_LABELNO (header);
611 int max_labelno = LAST_LABELNO (header);
612 int nargs;
613 rtx *arg_vec;
614 rtx local_return_label = 0;
615 rtx follows_call = 0;
616 rtx this_struct_value_rtx = 0;
617 /* List of tree_list nodes with parm as purpose and its index as value. */
618 tree must_load_parms = 0;
619
620 if (max_regno < FIRST_PSEUDO_REGISTER)
621 abort ();
622
623 nargs = list_length (DECL_ARGUMENTS (fndecl));
624
625 /* We expect PARMS to have the right length; don't crash if not. */
626 if (list_length (parms) != nargs)
627 return (rtx)-1;
628 /* Also check that the parms type match. Since the appropriate
629 conversions or default promotions have already been applied,
630 the machine modes should match exactly. */
631 for (formal = DECL_ARGUMENTS (fndecl),
632 actual = parms;
633 formal;
634 formal = TREE_CHAIN (formal),
635 actual = TREE_CHAIN (actual))
636 {
637 tree arg = TREE_VALUE (actual);
638 enum machine_mode mode = TYPE_MODE (DECL_ARG_TYPE (formal));
639 if (mode != TYPE_MODE (TREE_TYPE (arg)))
640 return (rtx)-1;
641 /* If they are block mode, the types should match exactly. */
642 if (mode == BLKmode && TREE_TYPE (arg) != TREE_TYPE (formal))
643 return (rtx)-1;
644 }
645
646 /* Make a binding contour to keep inline cleanups called at
647 outer function-scope level from looking like they are shadowing
648 parameter declarations. */
649 pushlevel (0);
650
651 /* Make a fresh binding contour that we can easily remove. */
652 pushlevel (0);
653 expand_start_bindings (0);
654 if (GET_CODE (parm_insns) == NOTE
655 && NOTE_LINE_NUMBER (parm_insns) < 0)
656 emit_note (NOTE_SOURCE_FILE (parm_insns), NOTE_LINE_NUMBER (parm_insns));
657
658 /* Get all the actual args as RTL, and store them in ARG_VEC. */
659
660 arg_vec = (rtx *)alloca (nargs * sizeof (rtx));
661
662 for (formal = DECL_ARGUMENTS (fndecl),
663 actual = parms,
664 i = 0;
665 formal;
666 formal = TREE_CHAIN (formal),
667 actual = TREE_CHAIN (actual),
668 i++)
669 {
670 /* Actual parameter, already converted to DECL_ARG_TYPE (formal). */
671 tree arg = TREE_VALUE (actual);
672 /* Mode of the value supplied. */
673 enum machine_mode tmode = TYPE_MODE (DECL_ARG_TYPE (formal));
674 /* Mode of the variable used within the function. */
675 enum machine_mode imode = TYPE_MODE (TREE_TYPE (formal));
676 rtx copy;
677
678 emit_note (DECL_SOURCE_FILE (formal), DECL_SOURCE_LINE (formal));
679
680 /* Make a place to hold the argument value, still in mode TMODE,
681 and put it in COPY. */
682 if (TREE_ADDRESSABLE (formal))
683 {
684 int size = int_size_in_bytes (DECL_ARG_TYPE (formal));
685 copy = assign_stack_local (tmode, size);
686 if (!memory_address_p (DECL_MODE (formal), XEXP (copy, 0)))
687 copy = change_address (copy, VOIDmode, copy_rtx (XEXP (copy, 0)));
688 store_expr (arg, copy, 0);
689 }
690 else if (! TREE_READONLY (formal)
691 || TREE_VOLATILE (formal))
692 {
693 /* If parm is modified or if it hasn't a pseudo reg,
694 we may not simply substitute the actual value;
695 copy it through a register. */
696 copy = gen_reg_rtx (tmode);
697 store_expr (arg, copy, 0);
698 }
699 else
700 {
701 copy = expand_expr (arg, 0, tmode, 0);
702
703 /* We do not use CONSTANT_ADDRESS_P here because
704 the set of cases where that might make a difference
705 are a subset of the cases that arise even when
706 it is a CONSTANT_ADDRESS_P (i.e., fp_delta
707 gets into the act. */
708 if (GET_CODE (copy) != REG && ! CONSTANT_P (copy))
709 copy = copy_to_reg (copy);
710 }
711 /* If passed mode != nominal mode, COPY is now the passed mode.
712 Convert it to the nominal mode (i.e. truncate it). */
713 if (tmode != imode)
714 copy = convert_to_mode (imode, copy, 0);
715 arg_vec[i] = copy;
716 }
717
718 copy_parm_decls (DECL_ARGUMENTS (fndecl), arg_vec);
719
720 /* Perform postincrements before actually calling the function. */
721 emit_queue ();
722
723 /* clean up stack so that variables might have smaller offsets. */
724 do_pending_stack_adjust ();
725
726 /* Pass the function the address in which to return a structure value. */
727 if (structure_value_addr)
728 {
729 if (GET_CODE (structure_value_addr) == REG
730 && (struct_value_rtx == 0 || GET_CODE (struct_value_rtx) == MEM))
731 this_struct_value_rtx = structure_value_addr;
732 else
733 this_struct_value_rtx = copy_to_mode_reg (Pmode, structure_value_addr);
734 }
735
736 /* Now prepare for copying the insns.
737 Set up reg_map, parm_map and label_map saying how to translate
738 the pseudo-registers, stack-parm references and labels when copying. */
739
740 reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
741 bzero (reg_map, max_regno * sizeof (rtx));
742
743 parm_map = (rtx *)alloca ((FUNCTION_ARGS_SIZE (header) + UNITS_PER_WORD - 1)
744 / UNITS_PER_WORD * sizeof (rtx));
745 bzero (parm_map, ((FUNCTION_ARGS_SIZE (header) + UNITS_PER_WORD - 1)
746 / UNITS_PER_WORD * sizeof (rtx)));
747
748 /* Note that expand_expr (called above) can clobber first_parm_offset. */
749 first_parm_offset = FIRST_PARM_OFFSET (fndecl);
750 parm_map -= first_parm_offset / UNITS_PER_WORD;
751
752 if (DECL_ARGUMENTS (fndecl))
753 {
754 tree decl = DECL_ARGUMENTS (fndecl);
755
756 for (formal = decl, i = 0; formal; formal = TREE_CHAIN (formal), i++)
757 {
758 /* Create an entry in PARM_MAP that says what pseudo register
759 is associated with an address we might compute. */
760 if (DECL_OFFSET (formal) >= 0)
761 {
762 /* This parameter has a home in the stack. */
763 parm_map[DECL_OFFSET (formal) / BITS_PER_WORD] = arg_vec[i];
764 }
765 else
766 {
767 /* Parameter that was passed in a register;
768 does it have a home on the stack (as a local)? */
769 rtx frtx = DECL_RTL (formal);
770 rtx offset = 0;
771 if (GET_CODE (frtx) == MEM)
772 {
773 frtx = XEXP (frtx, 0);
774 if (GET_CODE (frtx) == PLUS)
775 {
776 if (XEXP (frtx, 0) == frame_pointer_rtx
777 && GET_CODE (XEXP (frtx, 1)) == CONST_INT)
778 offset = XEXP (frtx, 1);
779 else if (XEXP (frtx, 1) == frame_pointer_rtx
780 && GET_CODE (XEXP (frtx, 0)) == CONST_INT)
781 offset = XEXP (frtx, 0);
782#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
783 /* If there is a separate arg pointer
784 and REG_PARM_STACK_SPACE is defined,
785 parms passed in regs can be copied
786 to slots reached via the arg pointer. */
787 if (XEXP (frtx, 0) == arg_pointer_rtx
788 && GET_CODE (XEXP (frtx, 1)) == CONST_INT)
789 offset = XEXP (frtx, 1);
790 else if (XEXP (frtx, 1) == arg_pointer_rtx
791 && GET_CODE (XEXP (frtx, 0)) == CONST_INT)
792 offset = XEXP (frtx, 0);
793#endif
794 }
795 if (offset && INTVAL (offset) >= first_parm_offset)
796 parm_map[INTVAL (offset) / UNITS_PER_WORD] = arg_vec[i];
797 else if (offset)
798 must_load_parms
799 = tree_cons (formal, build_int_2 (i, 0),
800 must_load_parms);
801 else if (TREE_TYPE (formal) != error_mark_node)
802 abort ();
803 }
804 else if (GET_CODE (frtx) != REG)
805 abort ();
806 }
807 /* Create an entry in REG_MAP that says what rtx is associated
808 with a pseudo register from the function being inlined. */
809 if (GET_CODE (DECL_RTL (formal)) == REG)
810 reg_map[REGNO (DECL_RTL (formal))] = arg_vec[i];
811 }
812 }
813
814#if 0 /* This was turned off when it was written,
815 because expand_call was changed not to need it. */
816 /* Handle the case where our caller offers a register target
817 but the called function wants to return the value in memory. */
818 if (this_struct_value_rtx == 0
819 && aggregate_value_p (DECL_RESULT (fndecl)))
820 {
821 enum machine_mode mode1 = GET_MODE (DECL_RTL (DECL_RESULT (fndecl)));
822 this_struct_value_rtx
823 = assign_stack_local (mode1, GET_MODE_SIZE (mode1));
824 target = 0;
825 }
826#endif
827
828 /* Make certain that we can accept struct_value_{incoming_rtx,rtx},
829 and map it. */
830 if (this_struct_value_rtx == 0)
831 ;
832 else if (GET_CODE (struct_value_incoming_rtx) == REG)
833 reg_map[REGNO (XEXP (DECL_RTL (DECL_RESULT (fndecl)), 0))]
834 = this_struct_value_rtx;
835 else if (GET_CODE (struct_value_incoming_rtx) == MEM
836 && XEXP (XEXP (struct_value_incoming_rtx, 0), 0) == frame_pointer_rtx
837 && GET_CODE (XEXP (XEXP (struct_value_incoming_rtx, 0), 1)) == CONST_INT)
838 reg_map[REGNO (XEXP (DECL_RTL (DECL_RESULT (fndecl)), 0))]
839 = this_struct_value_rtx;
840#if 0
841 parm_map[INTVAL (XEXP (XEXP (struct_value_incoming_rtx, 0), 1)) / UNITS_PER_WORD]
842 = this_struct_value_rtx;
843#endif
844 else
845 abort ();
846
847 label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
848 label_map -= min_labelno;
849
850 for (i = min_labelno; i < max_labelno; i++)
851 label_map[i] = gen_label_rtx ();
852
853 /* As we copy insns, record the correspondence, so that inter-insn
854 references can be copied into isomorphic structure. */
855
856 insn_map = (rtx *) alloca (INSN_UID (header) * sizeof (rtx));
857 bzero (insn_map, INSN_UID (header) * sizeof (rtx));
858
859 /* Set up a target to translate the inline function's value-register. */
860
861 if (this_struct_value_rtx != 0 || TYPE_MODE (type) == VOIDmode)
862 inline_target = 0;
863 else
864 {
865 /* Machine mode function was declared to return. */
866 enum machine_mode departing_mode = TYPE_MODE (type);
867 /* (Possibly wider) machine mode it actually computes
868 (for the sake of callers that fail to declare it right). */
869 enum machine_mode arriving_mode
870 = TYPE_MODE (DECL_RESULT_TYPE (fndecl));
871
872 /* Don't use MEMs as direct targets because on some machines
873 substituting a MEM for a REG makes invalid insns.
874 Let the combiner substitute the MEM if that is valid. */
875 if (target && GET_CODE (target) == REG
876 && GET_MODE (target) == departing_mode)
877 inline_target = target;
878 else
879 inline_target = target = gen_reg_rtx (departing_mode);
880
881 /* If function's value was promoted before return,
882 avoid machine mode mismatch when we substitute INLINE_TARGET.
883 But TARGET is what we will return to the caller. */
884 if (arriving_mode != departing_mode)
885 inline_target = gen_rtx (SUBREG, arriving_mode, target, 0);
886 }
887
888 /* Make space in current function's stack frame
889 for the stack frame of the inline function.
890 Adjust all frame-pointer references by the difference
891 between the offset to this space
892 and the offset to the equivalent space in the inline
893 function's frame.
894 This difference equals the size of preexisting locals. */
895
896 fp_delta = get_frame_size ();
897#ifdef FRAME_GROWS_DOWNWARD
898 fp_delta = - fp_delta;
899#endif
900
901 inline_fp_rtx
902 = copy_to_mode_reg (Pmode,
903 plus_constant (frame_pointer_rtx, fp_delta));
904
905 /* Now allocate the space for that to point at. */
906
907 assign_stack_local (VOIDmode, DECL_FRAME_SIZE (fndecl));
908
909 /* Load any parms represented as locals with the supplied values.
910 We couldn't do this above where the other parms' values are handled
911 because we need fp_delta to do it right. */
912 while (must_load_parms)
913 {
914 rtx dest = DECL_RTL (TREE_PURPOSE (must_load_parms));
915 int parm_num = TREE_INT_CST_LOW (TREE_VALUE (must_load_parms));
916 emit_insn (gen_move_insn (copy_rtx_and_substitute (dest),
917 arg_vec[parm_num]));
918 must_load_parms = TREE_CHAIN (must_load_parms);
919 }
920
921 /* Now copy the insns one by one. */
922
923 for (insn = insns; insn; insn = NEXT_INSN (insn))
924 {
925 rtx copy, pattern, next = 0;
926
927 orig_asm_operands_vector = 0;
928 copy_asm_operands_vector = 0;
929
930 switch (GET_CODE (insn))
931 {
932 case INSN:
933 pattern = PATTERN (insn);
934
935 /* Special handling for the insn immediately after a CALL_INSN
936 that returned a value:
937 If it does copy the value, we must avoid the usual translation
938 of the return-register into INLINE_TARGET.
939 If it just USEs the value, the inline function expects it to
940 stay in the return-register and be returned,
941 so copy it into INLINE_TARGET. */
942
943 if (follows_call
944 /* Allow a stack-adjust, handled normally, to come in between
945 the call and the value-copying insn. */
946 && ! (GET_CODE (pattern) == SET
947 && SET_DEST (pattern) == stack_pointer_rtx))
948 {
949 if (GET_CODE (pattern) == SET
950 && rtx_equal_p (SET_SRC (pattern), follows_call))
951 /* This insn copies the value: take special care to copy
952 that value to this insn's destination. */
953 {
954 copy = emit_insn (gen_rtx (SET, VOIDmode,
955 copy_rtx_and_substitute (SET_DEST (pattern)),
956 follows_call));
957 RTX_INTEGRATED_P (copy) = 1;
958 follows_call = 0;
959 break;
960 }
961 else if (GET_CODE (pattern) == USE
962 && rtx_equal_p (XEXP (pattern, 0), follows_call))
963 /* This insn does nothing but says the value is expected
964 to flow through to the inline function's return-value.
965 Make that happen, then ignore this insn. */
966 {
967 copy = emit_insn (gen_rtx (SET, VOIDmode, inline_target,
968 follows_call));
969 RTX_INTEGRATED_P (copy) = 1;
970 follows_call = 0;
971 break;
972 }
973 /* If it does neither, this value must be ignored. */
974 follows_call = 0;
975 }
976
977 /* The (USE (REG n)) at return from the function should be ignored
978 since we are changing (REG n) into inline_target. */
979 copy = 0;
980 if (GET_CODE (pattern) == USE
981 && GET_CODE (XEXP (pattern, 0)) == REG
982 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
983 break;
984 /* Ignore setting a function value that we don't want to use. */
985 if (inline_target == 0
986 && GET_CODE (pattern) == SET
987 && GET_CODE (SET_DEST (pattern)) == REG
988 && REG_FUNCTION_VALUE_P (SET_DEST (pattern)))
989 break;
990
991 /* Try to do some quick constant folding here.
992 This will save save execution time of the compiler,
993 as well time and space of the program if done here. */
994 if (GET_CODE (pattern) == SET
995 && SET_DEST (pattern) == cc0_rtx)
996 next = try_fold_cc0 (insn);
997
998 if (next != 0)
999 {
1000 insn = next;
1001 }
1002 else
1003 {
1004 rtx note = find_reg_note (insn, REG_EQUIV, 0);
1005
1006 copy = emit_insn (copy_rtx_and_substitute (pattern));
1007 RTX_INTEGRATED_P (copy) = 1;
1008
1009 /* If we are copying an insn that loads a constant,
1010 record the constantness. */
1011 if (note)
1012 REG_NOTES (copy)
1013 = gen_rtx (EXPR_LIST, REG_EQUIV, XEXP (note, 0),
1014 REG_NOTES (copy));
1015 }
1016 break;
1017
1018 case JUMP_INSN:
1019 follows_call = 0;
1020 if (GET_CODE (PATTERN (insn)) == RETURN)
1021 {
1022 if (local_return_label == 0)
1023 local_return_label = gen_label_rtx ();
1024 emit_jump (local_return_label);
1025 break;
1026 }
1027 copy = emit_jump_insn (copy_rtx_and_substitute (PATTERN (insn)));
1028 RTX_INTEGRATED_P (copy) = 1;
1029 break;
1030
1031 case CALL_INSN:
1032#if 0
1033 /* This should no longer be necessary now that references
1034 to this function's return value are flagged to distinguish
1035 them from other references to the same hard register. */
1036 {
1037 rtx newbod;
1038 /* If the call's body is (set (reg...) (call...)),
1039 the register is a function return register, but DON'T
1040 translate it into INLINE_TARGET because it describes the
1041 called function, not the caller's return value. */
1042 if (GET_CODE (PATTERN (insn)) == SET)
1043 newbod = gen_rtx (SET, VOIDmode, SET_DEST (PATTERN (insn)),
1044 copy_rtx_and_substitute (SET_SRC (PATTERN (insn))));
1045 else if (GET_CODE (PATTERN (insn)) == PARALLEL
1046 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1047 {
1048 register int j;
1049 rtx newelem;
1050 newbod = gen_rtx (PARALLEL, VOIDmode,
1051 rtvec_alloc (XVECLEN (PATTERN (insn), 0)));
1052 newelem = gen_rtx (SET, VOIDmode,
1053 SET_DEST (XVECEXP (PATTERN (insn), 0, 0)),
1054 copy_rtx_and_substitute (SET_SRC (XVECEXP (PATTERN (insn), 0, 0))));
1055 XVECEXP (newbod, 0, 0) = newelem;
1056 for (j = 1; j < XVECLEN (newbod, 0); j++)
1057 XVECEXP (newbod, 0, j)
1058 = copy_rtx_and_substitute (XVECEXP (PATTERN (insn), 0, j));
1059 }
1060 else
1061 newbod = copy_rtx_and_substitute (PATTERN (insn));
1062 copy = emit_call_insn (newbod);
1063 }
1064#else /* 1 */
1065 copy = emit_call_insn (copy_rtx_and_substitute (PATTERN (insn)));
1066#endif /* 1 */
1067 RTX_INTEGRATED_P (copy) = 1;
1068 /* Special handling needed for the following INSN depending on
1069 whether it copies the value from the fcn return reg. */
1070 if (GET_CODE (PATTERN (insn)) == SET)
1071 follows_call = SET_DEST (PATTERN (insn));
1072 else if (GET_CODE (PATTERN (insn)) == PARALLEL
1073 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1074 follows_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1075 break;
1076
1077 case CODE_LABEL:
1078 copy = emit_label (label_map[CODE_LABEL_NUMBER (insn)]);
1079 follows_call = 0;
1080 break;
1081
1082 case BARRIER:
1083 copy = emit_barrier ();
1084 break;
1085
1086 case NOTE:
1087 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1088 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG)
1089 copy = emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
1090 else
1091 copy = 0;
1092 break;
1093
1094 default:
1095 abort ();
1096 break;
1097 }
1098
1099 insn_map[INSN_UID (insn)] = copy;
1100 }
1101
1102 if (local_return_label)
1103 emit_label (local_return_label);
1104
1105 /* Make copies of the decls of the symbols in the inline function, so that
1106 the copies of the variables get declared in the current function. */
1107 copy_decl_tree (DECL_INITIAL (fndecl), 0);
1108
1109 /* End the scope containing the copied formal parameter variables. */
1110
1111 expand_end_bindings (getdecls (), 1, 1);
1112 poplevel (1, 1, 0);
1113 poplevel (0, 0, 0);
1114
1115 emit_line_note (input_filename, lineno);
1116 reg_map = NULL;
1117 label_map = NULL;
1118
1119 if (ignore || TYPE_MODE (type) == VOIDmode)
1120 return 0;
1121
1122 if (structure_value_addr)
1123 {
1124 if (target)
1125 return target;
1126 return gen_rtx (MEM, TYPE_MODE (type),
1127 memory_address (BLKmode, structure_value_addr));
1128 }
1129
1130 return target;
1131}
1132\f
1133/* Given a chain of PARM_DECLs, ARGS, and a vector of RTL homes VEC,
1134 copy each decl into a VAR_DECL, push all of those decls
1135 and give each one the corresponding home. */
1136
1137static void
1138copy_parm_decls (args, vec)
1139 tree args;
1140 rtx *vec;
1141{
1142 register tree tail;
1143 register int i;
1144
1145 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1146 {
1147 register tree decl = pushdecl (build_decl (VAR_DECL, DECL_NAME (tail),
1148 TREE_TYPE (tail)));
1149 /* These args would always appear unused, if not for this. */
1150 TREE_USED (decl) = 1;
1151 /* Prevent warning for shadowing with these. */
1152 TREE_INLINE (decl) = 1;
1153 DECL_RTL (decl) = vec[i];
1154 }
1155}
1156
1157/* Given a LET_STMT node, push decls and levels
1158 so as to construct in the current function a tree of contexts
1159 isomorphic to the one that is given. */
1160
1161static void
1162copy_decl_tree (let, level)
1163 tree let;
1164 int level;
1165{
1166 tree t, node;
1167
1168 pushlevel (0);
1169
1170 for (t = STMT_VARS (let); t; t = TREE_CHAIN (t))
1171 {
1172 tree d = build_decl (TREE_CODE (t), DECL_NAME (t), TREE_TYPE (t));
1173 DECL_SOURCE_LINE (d) = DECL_SOURCE_LINE (t);
1174 DECL_SOURCE_FILE (d) = DECL_SOURCE_FILE (t);
1175 if (DECL_RTL (t) != 0)
1176 {
1177 if (GET_CODE (DECL_RTL (t)) == MEM
1178 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (t), 0)))
1179 /* copy_rtx_and_substitute would call memory_address
1180 which would copy the address into a register.
1181 Then debugging-output wouldn't know how to handle it. */
1182 DECL_RTL (d) = DECL_RTL (t);
1183 else
1184 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t));
1185 }
1186 TREE_EXTERNAL (d) = TREE_EXTERNAL (t);
1187 TREE_STATIC (d) = TREE_STATIC (t);
1188 TREE_PUBLIC (d) = TREE_PUBLIC (t);
1189 TREE_LITERAL (d) = TREE_LITERAL (t);
1190 TREE_ADDRESSABLE (d) = TREE_ADDRESSABLE (t);
1191 TREE_READONLY (d) = TREE_READONLY (t);
1192 TREE_VOLATILE (d) = TREE_VOLATILE (t);
1193 /* These args would always appear unused, if not for this. */
1194 TREE_USED (d) = 1;
1195 /* Prevent warning for shadowing with these. */
1196 TREE_INLINE (d) = 1;
1197 pushdecl (d);
1198 }
1199
1200 for (t = STMT_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1201 copy_decl_tree (t, level + 1);
1202
1203 node = poplevel (level > 0, 0, 0);
1204 if (node)
1205 TREE_USED (node) = TREE_USED (let);
1206}
1207\f
1208/* Create a new copy of an rtx.
1209 Recursively copies the operands of the rtx,
1210 except for those few rtx codes that are sharable. */
1211
1212static rtx
1213copy_rtx_and_substitute (orig)
1214 register rtx orig;
1215{
1216 register rtx copy, temp;
1217 register int i, j;
1218 register RTX_CODE code;
1219 register enum machine_mode mode;
1220 register char *format_ptr;
1221 int regno;
1222
1223 if (orig == 0)
1224 return 0;
1225
1226 code = GET_CODE (orig);
1227 mode = GET_MODE (orig);
1228
1229 switch (code)
1230 {
1231 case REG:
1232 /* If a frame-pointer register shows up, then we
1233 must `fix' the reference. If the stack pointer
1234 register shows up, it must be part of stack-adjustments
1235 (*not* because we eliminated the frame pointer!).
1236 Small hard registers are returned as-is. Pseudo-registers
1237 go through their `reg_map'. */
1238 regno = REGNO (orig);
1239 if (regno < FIRST_PSEUDO_REGISTER)
1240 {
1241 /* Some hard registers are also mapped,
1242 but others are not translated. */
1243 if (reg_map[regno] != 0)
1244 return reg_map[regno];
1245 if (REG_FUNCTION_VALUE_P (orig))
1246 {
1247 /* This is a reference to the function return value. If
1248 the function doesn't have a return value, error.
1249 If it does, it may not be the same mode as `inline_target'
1250 because SUBREG is not required for hard regs.
1251 If not, adjust mode of inline_target to fit the context. */
1252 if (inline_target == 0)
1253 abort ();
1254 if (mode == GET_MODE (inline_target))
1255 return inline_target;
1256 return gen_rtx (SUBREG, mode, inline_target, 0);
1257 }
1258 if (regno == FRAME_POINTER_REGNUM)
1259 return plus_constant (orig, fp_delta);
1260 return orig;
1261 }
1262 if (reg_map[regno] == NULL)
1263 reg_map[regno] = gen_reg_rtx (mode);
1264 return reg_map[regno];
1265
1266 case SUBREG:
1267 copy = copy_rtx_and_substitute (SUBREG_REG (orig));
1268 /* SUBREG is ordinary, but don't make nested SUBREGs. */
1269 if (GET_CODE (copy) == SUBREG)
1270 return gen_rtx (SUBREG, GET_MODE (orig), SUBREG_REG (copy),
1271 SUBREG_WORD (orig) + SUBREG_WORD (copy));
1272 return gen_rtx (SUBREG, GET_MODE (orig), copy,
1273 SUBREG_WORD (orig));
1274
1275 case CODE_LABEL:
1276 return label_map[CODE_LABEL_NUMBER (orig)];
1277
1278 case LABEL_REF:
1279 copy = rtx_alloc (LABEL_REF);
1280 PUT_MODE (copy, mode);
1281 XEXP (copy, 0) = label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))];
1282 return copy;
1283
1284 case PC:
1285 case CC0:
1286 case CONST_INT:
1287 case CONST_DOUBLE:
1288 case SYMBOL_REF:
1289 return orig;
1290
1291 case ASM_OPERANDS:
1292 /* If a single asm insn contains multiple output operands
1293 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
1294 We must make sure that the copied insn continues to share it. */
1295 if (orig_asm_operands_vector == XVEC (orig, 3))
1296 {
1297 copy = rtx_alloc (ASM_OPERANDS);
1298 XSTR (copy, 0) = XSTR (orig, 0);
1299 XSTR (copy, 1) = XSTR (orig, 1);
1300 XINT (copy, 2) = XINT (orig, 2);
1301 XVEC (copy, 3) = copy_asm_operands_vector;
1302 XVEC (copy, 4) = copy_asm_constraints_vector;
1303 XSTR (copy, 5) = XSTR (orig, 5);
1304 XINT (copy, 6) = XINT (orig, 6);
1305 return copy;
1306 }
1307 break;
1308
1309 case CALL:
1310 /* This is given special treatment because the first
1311 operand of a CALL is a (MEM ...) which may get
1312 forced into a register for cse. This is undesirable
1313 if function-address cse isn't wanted or if we won't do cse. */
1314#ifndef NO_FUNCTION_CSE
1315 if (! (optimize && ! flag_no_function_cse))
1316#endif
1317 return gen_rtx (CALL, GET_MODE (orig),
1318 gen_rtx (MEM, GET_MODE (XEXP (orig, 0)),
1319 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0))),
1320 copy_rtx_and_substitute (XEXP (orig, 1)));
1321 break;
1322
1323 case PLUS:
1324 /* Note: the PLUS case is not nearly as careful as the MEM
1325 case in terms of preserving addresses. The reason for this
1326 is that it is expected that if a PLUS_EXPR turns out not
1327 to be a legitimate address, reload can fix that up, without
1328 doing major damage. However, a MEM rtx must preside
1329 over a legitimate address. The MEM case has lots of hair
1330 to deal with what happens when it sits on a PLUS... */
1331 /* Take care of the easy case quickly. */
1332 if (XEXP (orig, 0) == frame_pointer_rtx
1333 || XEXP (orig, 1) == frame_pointer_rtx
1334 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1335 && (XEXP (orig, 0) == arg_pointer_rtx
1336 || XEXP (orig, 1) == arg_pointer_rtx)))
1337 {
1338 rtx reg;
1339 if (XEXP (orig, 0) == frame_pointer_rtx
1340 || XEXP (orig, 0) == arg_pointer_rtx)
1341 reg = XEXP (orig, 0), copy = XEXP (orig, 1);
1342 else
1343 reg = XEXP (orig, 1), copy = XEXP (orig, 0);
1344
1345 if (GET_CODE (copy) == CONST_INT)
1346 {
1347 int c = INTVAL (copy);
1348
1349 if (reg == arg_pointer_rtx && c >= first_parm_offset)
1350 {
1351 copy = access_parm_map (c, VOIDmode);
1352 if (GET_CODE (copy) != MEM)
1353 /* Should not happen, because a parm we need to address
1354 should not be living in a register.
1355 (expand_inline_function copied it to a stack slot.) */
1356 abort ();
1357 return XEXP (copy, 0);
1358 }
1359 return gen_rtx (PLUS, mode,
1360 frame_pointer_rtx,
1361 gen_rtx (CONST_INT, SImode,
1362 c + fp_delta));
1363 }
1364 copy = copy_rtx_and_substitute (copy);
1365 temp = force_reg (mode, gen_rtx (PLUS, mode, frame_pointer_rtx, copy));
1366 return plus_constant (temp, fp_delta);
1367 }
1368 else if (reg_mentioned_p (frame_pointer_rtx, orig)
1369 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1370 && reg_mentioned_p (arg_pointer_rtx, orig)))
1371 {
1372 /* If we have a complex sum which has a frame pointer
1373 in it, and it was a legitimate address, then
1374 keep it that way. */
1375 if (memory_address_p (mode, orig))
1376 {
1377 if (GET_CODE (XEXP (orig, 0)) == CONST_INT)
1378 {
1379 copy = copy_rtx_and_substitute (XEXP (orig, 1));
1380 temp = plus_constant (copy, INTVAL (XEXP (orig, 0)));
1381 }
1382 else if (GET_CODE (XEXP (orig, 1)) == CONST_INT)
1383 {
1384 copy = copy_rtx_and_substitute (XEXP (orig, 0));
1385 temp = plus_constant (copy, INTVAL (XEXP (orig, 1)));
1386 }
1387 else
1388 {
1389 temp = gen_rtx (PLUS, GET_MODE (orig),
1390 copy_rtx_and_substitute (XEXP (orig, 0)),
1391 copy_rtx_and_substitute (XEXP (orig, 1)));
1392 }
1393 temp = memory_address (mode, temp);
1394 }
1395 else
1396 temp = gen_rtx (PLUS, GET_MODE (orig),
1397 copy_rtx_and_substitute (XEXP (orig, 0)),
1398 copy_rtx_and_substitute (XEXP (orig, 1)));
1399 }
1400 else
1401 temp = gen_rtx (PLUS, GET_MODE (orig),
1402 copy_rtx_and_substitute (XEXP (orig, 0)),
1403 copy_rtx_and_substitute (XEXP (orig, 1)));
1404
1405 return temp;
1406
1407 case MEM:
1408 /* Take care of easiest case here. */
1409 copy = XEXP (orig, 0);
1410 if (copy == frame_pointer_rtx || copy == arg_pointer_rtx)
1411 return gen_rtx (MEM, mode,
1412 plus_constant (frame_pointer_rtx, fp_delta));
1413
1414 /* Allow a pushing-address even if that is not valid as an
1415 ordinary memory address. It indicates we are inlining a special
1416 push-insn. These must be copied; otherwise unshare_all_rtl
1417 might clobber them to point at temporary rtl of this function. */
1418#ifdef STACK_GROWS_DOWNWARD
1419 if (GET_CODE (copy) == PRE_DEC && XEXP (copy, 0) == stack_pointer_rtx)
1420 return gen_rtx (MEM, mode, copy_rtx_and_substitute (copy));
1421#else
1422 if (GET_CODE (copy) == PRE_INC && XEXP (copy, 0) == stack_pointer_rtx)
1423 return gen_rtx (MEM, mode, copy_rtx_and_substitute (copy));
1424#endif
1425
1426 /* If this is some other sort of address that isn't generally valid,
1427 break out all the registers referred to. */
1428 if (! memory_address_p (mode, copy))
1429 return gen_rtx (MEM, mode, copy_address (copy));
1430
1431 if (GET_CODE (copy) == PLUS)
1432 {
1433 if (XEXP (copy, 0) == frame_pointer_rtx
1434 || XEXP (copy, 1) == frame_pointer_rtx
1435 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1436 && (XEXP (copy, 0) == arg_pointer_rtx
1437 || XEXP (copy, 1) == arg_pointer_rtx)))
1438 {
1439 rtx reg;
1440 if (XEXP (copy, 0) == frame_pointer_rtx
1441 || XEXP (copy, 0) == arg_pointer_rtx)
1442 reg = XEXP (copy, 0), copy = XEXP (copy, 1);
1443 else
1444 reg = XEXP (copy, 1), copy = XEXP (copy, 0);
1445
1446 if (GET_CODE (copy) == CONST_INT)
1447 {
1448 int c = INTVAL (copy);
1449
1450 if (reg == arg_pointer_rtx && c >= first_parm_offset)
1451 return access_parm_map (c, mode);
1452
1453 temp = gen_rtx (PLUS, Pmode,
1454 frame_pointer_rtx,
1455 gen_rtx (CONST_INT, SImode,
1456 c + fp_delta));
1457 if (! memory_address_p (Pmode, temp))
1458 return gen_rtx (MEM, mode, plus_constant (inline_fp_rtx, c));
1459 }
1460 copy = copy_rtx_and_substitute (copy);
1461 temp = gen_rtx (PLUS, Pmode, frame_pointer_rtx, copy);
1462 temp = plus_constant (temp, fp_delta);
1463 temp = memory_address (Pmode, temp);
1464 }
1465 else if (reg_mentioned_p (frame_pointer_rtx, copy)
1466 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1467 && reg_mentioned_p (arg_pointer_rtx, copy)))
1468 {
1469 if (GET_CODE (XEXP (copy, 0)) == CONST_INT)
1470 {
1471 temp = copy_rtx_and_substitute (XEXP (copy, 1));
1472 temp = plus_constant (temp, INTVAL (XEXP (copy, 0)));
1473 }
1474 else if (GET_CODE (XEXP (copy, 1)) == CONST_INT)
1475 {
1476 temp = copy_rtx_and_substitute (XEXP (copy, 0));
1477 temp = plus_constant (temp, INTVAL (XEXP (copy, 1)));
1478 }
1479 else
1480 {
1481 temp = gen_rtx (PLUS, GET_MODE (copy),
1482 copy_rtx_and_substitute (XEXP (copy, 0)),
1483 copy_rtx_and_substitute (XEXP (copy, 1)));
1484 }
1485 }
1486 else
1487 {
1488 if (GET_CODE (XEXP (copy, 1)) == CONST_INT)
1489 temp = plus_constant (copy_rtx_and_substitute (XEXP (copy, 0)),
1490 INTVAL (XEXP (copy, 1)));
1491 else if (GET_CODE (XEXP (copy, 0)) == CONST_INT)
1492 temp = plus_constant (copy_rtx_and_substitute (XEXP (copy, 1)),
1493 INTVAL (XEXP (copy, 0)));
1494 else
1495 {
1496 rtx left = copy_rtx_and_substitute (XEXP (copy, 0));
1497 rtx right = copy_rtx_and_substitute (XEXP (copy, 1));
1498
1499 temp = gen_rtx (PLUS, GET_MODE (copy), left, right);
1500 }
1501 }
1502 }
1503 else
1504 temp = copy_rtx_and_substitute (copy);
1505
1506 return change_address (orig, mode, temp);
1507
1508 case RETURN:
1509 abort ();
1510 }
1511
1512 copy = rtx_alloc (code);
1513 PUT_MODE (copy, mode);
1514 copy->in_struct = orig->in_struct;
1515 copy->volatil = orig->volatil;
1516 copy->unchanging = orig->unchanging;
1517
1518 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
1519
1520 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
1521 {
1522 switch (*format_ptr++)
1523 {
1524 case '0':
1525 break;
1526
1527 case 'e':
1528 XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i));
1529 break;
1530
1531 case 'u':
1532 /* Change any references to old-insns to point to the
1533 corresponding copied insns. */
1534 XEXP (copy, i) = insn_map[INSN_UID (XEXP (orig, i))];
1535 break;
1536
1537 case 'E':
1538 XVEC (copy, i) = XVEC (orig, i);
1539 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
1540 {
1541 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
1542 for (j = 0; j < XVECLEN (copy, i); j++)
1543 XVECEXP (copy, i, j) = copy_rtx_and_substitute (XVECEXP (orig, i, j));
1544 }
1545 break;
1546
1547 case 'i':
1548 XINT (copy, i) = XINT (orig, i);
1549 break;
1550
1551 case 's':
1552 XSTR (copy, i) = XSTR (orig, i);
1553 break;
1554
1555 default:
1556 abort ();
1557 }
1558 }
1559
1560 if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
1561 {
1562 orig_asm_operands_vector = XVEC (orig, 3);
1563 copy_asm_operands_vector = XVEC (copy, 3);
1564 copy_asm_constraints_vector = XVEC (copy, 4);
1565 }
1566
1567 return copy;
1568}
1569\f
1570/* Get the value corresponding to an address relative to the arg pointer
1571 at index RELADDRESS. MODE is the machine mode of the reference.
1572 MODE is used only when the value is a REG.
1573 Pass VOIDmode for MODE when the mode is not known;
1574 in such cases, you should make sure the value is a MEM. */
1575
1576static rtx
1577access_parm_map (reladdress, mode)
1578 int reladdress;
1579 enum machine_mode mode;
1580{
1581 /* Index in parm_map. */
1582 int index = reladdress / UNITS_PER_WORD;
1583 /* Offset of the data being referenced
1584 from the beginning of the value for that parm. */
1585 int offset = reladdress % UNITS_PER_WORD;
1586 rtx copy;
1587
1588 /* If we are referring to the middle of a multiword parm,
1589 find the beginning of that parm.
1590 OFFSET gets the offset of the reference from
1591 the beginning of the parm. */
1592
1593 while (parm_map[index] == 0)
1594 {
1595 index--;
1596 if (index < first_parm_offset / UNITS_PER_WORD)
1597 /* If this abort happens, it means we need
1598 to handle "decrementing" INDEX back far
1599 enough to start looking among the reg parms
1600 instead of the stack parms. What a mess! */
1601 abort ();
1602 offset += UNITS_PER_WORD;
1603 }
1604
1605 copy = parm_map[index];
1606
1607#ifdef BYTES_BIG_ENDIAN
1608 /* Subtract from OFFSET the offset of where
1609 the actual parm value would start. */
1610 if (GET_MODE_SIZE (GET_MODE (copy)) < UNITS_PER_WORD)
1611 offset
1612 -= (UNITS_PER_WORD
1613 - GET_MODE_SIZE (GET_MODE (copy)));
1614#endif
1615
1616 /* For memory ref, adjust it by the desired offset. */
1617 if (GET_CODE (copy) == MEM)
1618 {
1619 if (offset != 0)
1620 return change_address (copy, mode,
1621 plus_constant (XEXP (copy, 0),
1622 offset));
1623 return copy;
1624 }
1625
1626 if (GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG
1627 && ! CONSTANT_P (copy))
1628 abort ();
1629 if (mode == VOIDmode)
1630 abort ();
1631
1632 /* A REG cannot be offset by bytes, so use a subreg
1633 (which is possible only in certain cases). */
1634 if (GET_MODE (copy) != mode
1635 && GET_MODE (copy) != VOIDmode)
1636 {
1637 int word;
1638 /* Crash if the portion of the arg wanted
1639 is not the least significant.
1640 Functions with refs to other parts of a
1641 parameter should not be inline--
1642 see function_cannot_inline_p. */
1643#ifdef BYTES_BIG_ENDIAN
1644 if ((offset + GET_MODE_SIZE (mode)) % UNITS_PER_WORD
1645 != GET_MODE_SIZE (GET_MODE (copy)) % UNITS_PER_WORD)
1646 abort ();
1647#else
1648 if ((offset % UNITS_PER_WORD) != 0)
1649 abort ();
1650#endif
1651 word = offset % UNITS_PER_WORD;
1652 if (GET_CODE (copy) == SUBREG)
1653 word = SUBREG_WORD (copy), copy = SUBREG_REG (copy);
1654 if (CONSTANT_P (copy))
1655 copy = force_reg (GET_MODE (copy), copy);
1656 return gen_rtx (SUBREG, mode, copy, word);
1657 }
1658
1659 return copy;
1660}
1661\f
1662/* Like copy_rtx_and_substitute but produces different output, suitable
1663 for an ideosyncractic address that isn't memory_address_p.
1664 The output resembles the input except that REGs and MEMs are replaced
1665 with new psuedo registers. All the "real work" is done in separate
1666 insns which set up the values of these new registers. */
1667
1668static rtx
1669copy_address (orig)
1670 register rtx orig;
1671{
1672 register rtx copy;
1673 register int i, j;
1674 register RTX_CODE code;
1675 register enum machine_mode mode;
1676 register char *format_ptr;
1677
1678 if (orig == 0)
1679 return 0;
1680
1681 code = GET_CODE (orig);
1682 mode = GET_MODE (orig);
1683
1684 switch (code)
1685 {
1686 case REG:
1687 if (REGNO (orig) != FRAME_POINTER_REGNUM)
1688 return copy_rtx_and_substitute (orig);
1689 return plus_constant (frame_pointer_rtx, fp_delta);
1690
1691 case PLUS:
1692 if (GET_CODE (XEXP (orig, 0)) == REG
1693 && REGNO (XEXP (orig, 0)) == FRAME_POINTER_REGNUM)
1694 return plus_constant (orig, fp_delta);
1695 break;
1696
1697 case MEM:
1698 return copy_to_reg (copy_rtx_and_substitute (orig));
1699
1700 case CODE_LABEL:
1701 case LABEL_REF:
1702 return copy_rtx_and_substitute (orig);
1703
1704 case PC:
1705 case CC0:
1706 case CONST_INT:
1707 case CONST_DOUBLE:
1708 case SYMBOL_REF:
1709 return orig;
1710 }
1711
1712 copy = rtx_alloc (code);
1713 PUT_MODE (copy, mode);
1714 copy->in_struct = orig->in_struct;
1715 copy->volatil = orig->volatil;
1716 copy->unchanging = orig->unchanging;
1717
1718 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
1719
1720 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
1721 {
1722 switch (*format_ptr++)
1723 {
1724 case '0':
1725 break;
1726
1727 case 'e':
1728 XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i));
1729 break;
1730
1731 case 'u':
1732 /* Change any references to old-insns to point to the
1733 corresponding copied insns. */
1734 XEXP (copy, i) = insn_map[INSN_UID (XEXP (orig, i))];
1735 break;
1736
1737 case 'E':
1738 XVEC (copy, i) = XVEC (orig, i);
1739 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
1740 {
1741 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
1742 for (j = 0; j < XVECLEN (copy, i); j++)
1743 XVECEXP (copy, i, j) = copy_rtx_and_substitute (XVECEXP (orig, i, j));
1744 }
1745 break;
1746
1747 case 'i':
1748 XINT (copy, i) = XINT (orig, i);
1749 break;
1750
1751 case 's':
1752 XSTR (copy, i) = XSTR (orig, i);
1753 break;
1754
1755 default:
1756 abort ();
1757 }
1758 }
1759 return copy;
1760}
1761\f
1762/* Attempt to simplify INSN while copying it from an inline fn,
1763 assuming it is a SET that sets CC0.
1764
1765 If we simplify it, we emit the appropriate insns and return
1766 the last insn that we have handled (since we may handle the insn
1767 that follows INSN as well as INSN itself).
1768
1769 Otherwise we do nothing and return zero. */
1770
1771static rtx
1772try_fold_cc0 (insn)
1773 rtx insn;
1774{
1775 rtx cnst = copy_rtx_and_substitute (SET_SRC (PATTERN (insn)));
1776 rtx pat, copy;
1777
1778 if (CONSTANT_P (cnst)
1779 /* @@ Cautious: Don't know how many of these tests we need. */
1780 && NEXT_INSN (insn)
1781 && GET_CODE (pat = PATTERN (NEXT_INSN (insn))) == SET
1782 && SET_DEST (pat) == pc_rtx
1783 && GET_CODE (pat = SET_SRC (pat)) == IF_THEN_ELSE
1784 && GET_RTX_LENGTH (GET_CODE (XEXP (pat, 0))) == 2)
1785 {
1786 rtx cnst2;
1787 rtx cond = XEXP (pat, 0);
1788
1789 if ((XEXP (cond, 0) == cc0_rtx
1790 && CONSTANT_P (XEXP (cond, 1))
1791 && (cnst2 = XEXP (cond, 1)))
1792 || (XEXP (cond, 1) == cc0_rtx
1793 && CONSTANT_P (XEXP (cond, 0))
1794 && (cnst2 = XEXP (cond, 0))))
1795 {
1796 copy = fold_out_const_cc0 (cond, XEXP (pat, 1), XEXP (pat, 2),
1797 cnst, cnst2);
1798 if (copy)
1799 {
1800 if (GET_CODE (copy) == LABEL_REF)
1801 {
1802 /* We will branch unconditionally to
1803 the label specified by COPY.
1804 Eliminate dead code by running down the
1805 list of insn until we see a CODE_LABEL.
1806 If the CODE_LABEL is the one specified
1807 by COPY, we win, and can delete all code
1808 up to (but not necessarily including)
1809 that label. Otherwise only win a little:
1810 emit the branch insn, and continue expanding. */
1811 rtx tmp = NEXT_INSN (insn);
1812 while (tmp && GET_CODE (tmp) != CODE_LABEL)
1813 tmp = NEXT_INSN (tmp);
1814 if (! tmp)
1815 abort ();
1816 if (label_map[CODE_LABEL_NUMBER (tmp)] == XEXP (copy, 0))
1817 {
1818 /* Big win. */
1819 return PREV_INSN (tmp);
1820 }
1821 else
1822 {
1823 /* Small win. Emit the unconditional branch,
1824 followed by a BARRIER, so that jump optimization
1825 will know what to do. */
1826 emit_jump (copy);
1827 return NEXT_INSN (insn);
1828 }
1829 }
1830 else if (copy == pc_rtx)
1831 {
1832 /* Do not take the branch, just fall through.
1833 Jump optimize should handle the elimination of
1834 dead code if appropriate. */
1835 return NEXT_INSN (insn);
1836 }
1837 else
1838 abort ();
1839 }
1840 }
1841 }
1842 return 0;
1843}
1844\f
1845/* If (COND_RTX CNST1 CNST2) yield a result we can treat
1846 as being constant, return THEN_RTX if the result is always
1847 non-zero, and return ELSE_RTX otherwise. */
1848static rtx
1849fold_out_const_cc0 (cond_rtx, then_rtx, else_rtx, cnst1, cnst2)
1850 rtx cond_rtx, then_rtx, else_rtx;
1851 rtx cnst1, cnst2;
1852{
1853 int value1, value2;
1854 int int1 = GET_CODE (cnst1) == CONST_INT;
1855 int int2 = GET_CODE (cnst2) == CONST_INT;
1856 if (int1)
1857 value1 = INTVAL (cnst1);
1858 else
1859 value1 = 1;
1860 if (int2)
1861 value2 = INTVAL (cnst2);
1862 else
1863 value2 = 1;
1864
1865 switch (GET_CODE (cond_rtx))
1866 {
1867 case NE:
1868 if (int1 && int2)
1869 if (value1 != value2)
1870 return copy_rtx_and_substitute (then_rtx);
1871 else
1872 return copy_rtx_and_substitute (else_rtx);
1873 if (value1 == 0 || value2 == 0)
1874 return copy_rtx_and_substitute (then_rtx);
1875 if (int1 == 0 && int2 == 0)
1876 if (rtx_equal_p (cnst1, cnst2))
1877 return copy_rtx_and_substitute (else_rtx);
1878 break;
1879 case EQ:
1880 if (int1 && int2)
1881 if (value1 == value2)
1882 return copy_rtx_and_substitute (then_rtx);
1883 else
1884 return copy_rtx_and_substitute (else_rtx);
1885 if (value1 == 0 || value2 == 0)
1886 return copy_rtx_and_substitute (else_rtx);
1887 if (int1 == 0 && int2 == 0)
1888 if (rtx_equal_p (cnst1, cnst2))
1889 return copy_rtx_and_substitute (then_rtx);
1890 break;
1891 case GE:
1892 if (int1 && int2)
1893 if (value1 >= value2)
1894 return copy_rtx_and_substitute (then_rtx);
1895 else
1896 return copy_rtx_and_substitute (else_rtx);
1897 if (value1 == 0)
1898 return copy_rtx_and_substitute (else_rtx);
1899 if (value2 == 0)
1900 return copy_rtx_and_substitute (then_rtx);
1901 break;
1902 case GT:
1903 if (int1 && int2)
1904 if (value1 > value2)
1905 return copy_rtx_and_substitute (then_rtx);
1906 else
1907 return copy_rtx_and_substitute (else_rtx);
1908 if (value1 == 0)
1909 return copy_rtx_and_substitute (else_rtx);
1910 if (value2 == 0)
1911 return copy_rtx_and_substitute (then_rtx);
1912 break;
1913 case LE:
1914 if (int1 && int2)
1915 if (value1 <= value2)
1916 return copy_rtx_and_substitute (then_rtx);
1917 else
1918 return copy_rtx_and_substitute (else_rtx);
1919 if (value1 == 0)
1920 return copy_rtx_and_substitute (then_rtx);
1921 if (value2 == 0)
1922 return copy_rtx_and_substitute (else_rtx);
1923 break;
1924 case LT:
1925 if (int1 && int2)
1926 if (value1 < value2)
1927 return copy_rtx_and_substitute (then_rtx);
1928 else
1929 return copy_rtx_and_substitute (else_rtx);
1930 if (value1 == 0)
1931 return copy_rtx_and_substitute (then_rtx);
1932 if (value2 == 0)
1933 return copy_rtx_and_substitute (else_rtx);
1934 break;
1935 case GEU:
1936 if (int1 && int2)
1937 if ((unsigned)value1 >= (unsigned)value2)
1938 return copy_rtx_and_substitute (then_rtx);
1939 else
1940 return copy_rtx_and_substitute (else_rtx);
1941 if (value1 == 0)
1942 return copy_rtx_and_substitute (else_rtx);
1943 if (value2 == 0)
1944 return copy_rtx_and_substitute (then_rtx);
1945 break;
1946 case GTU:
1947 if (int1 && int2)
1948 if ((unsigned)value1 > (unsigned)value2)
1949 return copy_rtx_and_substitute (then_rtx);
1950 else
1951 return copy_rtx_and_substitute (else_rtx);
1952 if (value1 == 0)
1953 return copy_rtx_and_substitute (else_rtx);
1954 if (value2 == 0)
1955 return copy_rtx_and_substitute (then_rtx);
1956 break;
1957 case LEU:
1958 if (int1 && int2)
1959 if ((unsigned)value1 <= (unsigned)value2)
1960 return copy_rtx_and_substitute (then_rtx);
1961 else
1962 return copy_rtx_and_substitute (else_rtx);
1963 if (value1 == 0)
1964 return copy_rtx_and_substitute (then_rtx);
1965 if (value2 == 0)
1966 return copy_rtx_and_substitute (else_rtx);
1967 break;
1968 case LTU:
1969 if (int1 && int2)
1970 if ((unsigned)value1 < (unsigned)value2)
1971 return copy_rtx_and_substitute (then_rtx);
1972 else
1973 return copy_rtx_and_substitute (else_rtx);
1974 if (value1 == 0)
1975 return copy_rtx_and_substitute (then_rtx);
1976 if (value2 == 0)
1977 return copy_rtx_and_substitute (else_rtx);
1978 break;
1979 }
1980 /* Could not hack it. */
1981 return 0;
1982}
1983\f
1984/* Output the assembly language code for the function FNDECL
1985 from its DECL_SAVED_INSNS. Used for inline functions that are output
1986 at end of compilation instead of where they came in the source. */
1987
1988void
1989output_inline_function (fndecl)
1990 tree fndecl;
1991{
1992 rtx head = DECL_SAVED_INSNS (fndecl);
1993 rtx last;
1994 extern rtx stack_slot_list;
1995
1996 temporary_allocation ();
1997
1998 current_function_decl = fndecl;
1999
2000 /* This call is only used to initialize global variables. */
2001 init_function_start (fndecl, "lossage", 1);
2002
2003 /* Set stack frame size. */
2004 assign_stack_local (BLKmode, DECL_FRAME_SIZE (fndecl));
2005
2006 restore_reg_data (FIRST_PARM_INSN (head));
2007
2008 stack_slot_list = XEXP (head, 9);
2009
2010 expand_function_end (DECL_SOURCE_FILE (fndecl), DECL_SOURCE_LINE (fndecl));
2011
2012 for (last = head; NEXT_INSN (last); last = NEXT_INSN (last))
2013 ;
2014
2015 set_new_first_and_last_insn (FIRST_PARM_INSN (head), last);
2016
2017 /* Compile this function all the way down to assembly code. */
2018 rest_of_compilation (fndecl);
2019
2020 current_function_decl = 0;
2021
2022 permanent_allocation ();
2023}