Updated to libg++ 2.4
[unix-history] / gnu / usr.bin / cc / common / reload1.c
CommitLineData
9bf86ebb
PR
1/* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include <stdio.h>
22#include "config.h"
23#include "rtl.h"
24#include "obstack.h"
25#include "insn-config.h"
26#include "insn-flags.h"
27#include "insn-codes.h"
28#include "flags.h"
29#include "expr.h"
30#include "regs.h"
31#include "hard-reg-set.h"
32#include "reload.h"
33#include "recog.h"
34#include "basic-block.h"
35#include "output.h"
36
37/* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
42 that need them.
43
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
47
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
51
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
58
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
62
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
69
70
71#ifndef REGISTER_MOVE_COST
72#define REGISTER_MOVE_COST(x, y) 2
73#endif
74
75#ifndef MEMORY_MOVE_COST
76#define MEMORY_MOVE_COST(x) 4
77#endif
78\f
79/* During reload_as_needed, element N contains a REG rtx for the hard reg
80 into which reg N has been reloaded (perhaps for a previous insn). */
81static rtx *reg_last_reload_reg;
82
83/* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
84 for an output reload that stores into reg N. */
85static char *reg_has_output_reload;
86
87/* Indicates which hard regs are reload-registers for an output reload
88 in the current insn. */
89static HARD_REG_SET reg_is_output_reload;
90
91/* Element N is the constant value to which pseudo reg N is equivalent,
92 or zero if pseudo reg N is not equivalent to a constant.
93 find_reloads looks at this in order to replace pseudo reg N
94 with the constant it stands for. */
95rtx *reg_equiv_constant;
96
97/* Element N is a memory location to which pseudo reg N is equivalent,
98 prior to any register elimination (such as frame pointer to stack
99 pointer). Depending on whether or not it is a valid address, this value
100 is transferred to either reg_equiv_address or reg_equiv_mem. */
101rtx *reg_equiv_memory_loc;
102
103/* Element N is the address of stack slot to which pseudo reg N is equivalent.
104 This is used when the address is not valid as a memory address
105 (because its displacement is too big for the machine.) */
106rtx *reg_equiv_address;
107
108/* Element N is the memory slot to which pseudo reg N is equivalent,
109 or zero if pseudo reg N is not equivalent to a memory slot. */
110rtx *reg_equiv_mem;
111
112/* Widest width in which each pseudo reg is referred to (via subreg). */
113static int *reg_max_ref_width;
114
115/* Element N is the insn that initialized reg N from its equivalent
116 constant or memory slot. */
117static rtx *reg_equiv_init;
118
119/* During reload_as_needed, element N contains the last pseudo regno
120 reloaded into the Nth reload register. This vector is in parallel
121 with spill_regs. If that pseudo reg occupied more than one register,
122 reg_reloaded_contents points to that pseudo for each spill register in
123 use; all of these must remain set for an inheritance to occur. */
124static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
125
126/* During reload_as_needed, element N contains the insn for which
127 the Nth reload register was last used. This vector is in parallel
128 with spill_regs, and its contents are significant only when
129 reg_reloaded_contents is significant. */
130static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
131
132/* Number of spill-regs so far; number of valid elements of spill_regs. */
133static int n_spills;
134
135/* In parallel with spill_regs, contains REG rtx's for those regs.
136 Holds the last rtx used for any given reg, or 0 if it has never
137 been used for spilling yet. This rtx is reused, provided it has
138 the proper mode. */
139static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
140
141/* In parallel with spill_regs, contains nonzero for a spill reg
142 that was stored after the last time it was used.
143 The precise value is the insn generated to do the store. */
144static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
145
146/* This table is the inverse mapping of spill_regs:
147 indexed by hard reg number,
148 it contains the position of that reg in spill_regs,
149 or -1 for something that is not in spill_regs. */
150static short spill_reg_order[FIRST_PSEUDO_REGISTER];
151
152/* This reg set indicates registers that may not be used for retrying global
153 allocation. The registers that may not be used include all spill registers
154 and the frame pointer (if we are using one). */
155HARD_REG_SET forbidden_regs;
156
157/* This reg set indicates registers that are not good for spill registers.
158 They will not be used to complete groups of spill registers. This includes
159 all fixed registers, registers that may be eliminated, and, if
160 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
161
162 (spill_reg_order prevents these registers from being used to start a
163 group.) */
164static HARD_REG_SET bad_spill_regs;
165
166/* Describes order of use of registers for reloading
167 of spilled pseudo-registers. `spills' is the number of
168 elements that are actually valid; new ones are added at the end. */
169static short spill_regs[FIRST_PSEUDO_REGISTER];
170
171/* Describes order of preference for putting regs into spill_regs.
172 Contains the numbers of all the hard regs, in order most preferred first.
173 This order is different for each function.
174 It is set up by order_regs_for_reload.
175 Empty elements at the end contain -1. */
176static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
177
178/* 1 for a hard register that appears explicitly in the rtl
179 (for example, function value registers, special registers
180 used by insns, structure value pointer registers). */
181static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
182
183/* Indicates if a register was counted against the need for
184 groups. 0 means it can count against max_nongroup instead. */
185static HARD_REG_SET counted_for_groups;
186
187/* Indicates if a register was counted against the need for
188 non-groups. 0 means it can become part of a new group.
189 During choose_reload_regs, 1 here means don't use this reg
190 as part of a group, even if it seems to be otherwise ok. */
191static HARD_REG_SET counted_for_nongroups;
192
193/* Indexed by pseudo reg number N,
194 says may not delete stores into the real (memory) home of pseudo N.
195 This is set if we already substituted a memory equivalent in some uses,
196 which happens when we have to eliminate the fp from it. */
197static char *cannot_omit_stores;
198
199/* Nonzero if indirect addressing is supported on the machine; this means
200 that spilling (REG n) does not require reloading it into a register in
201 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
202 value indicates the level of indirect addressing supported, e.g., two
203 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
204 a hard register. */
205
206static char spill_indirect_levels;
207
208/* Nonzero if indirect addressing is supported when the innermost MEM is
209 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
210 which these are valid is the same as spill_indirect_levels, above. */
211
212char indirect_symref_ok;
213
214/* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
215
216char double_reg_address_ok;
217
218/* Record the stack slot for each spilled hard register. */
219
220static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
221
222/* Width allocated so far for that stack slot. */
223
224static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
225
226/* Indexed by register class and basic block number, nonzero if there is
227 any need for a spill register of that class in that basic block.
228 The pointer is 0 if we did stupid allocation and don't know
229 the structure of basic blocks. */
230
231char *basic_block_needs[N_REG_CLASSES];
232
233/* First uid used by insns created by reload in this function.
234 Used in find_equiv_reg. */
235int reload_first_uid;
236
237/* Flag set by local-alloc or global-alloc if anything is live in
238 a call-clobbered reg across calls. */
239
240int caller_save_needed;
241
242/* Set to 1 while reload_as_needed is operating.
243 Required by some machines to handle any generated moves differently. */
244
245int reload_in_progress = 0;
246
247/* These arrays record the insn_code of insns that may be needed to
248 perform input and output reloads of special objects. They provide a
249 place to pass a scratch register. */
250
251enum insn_code reload_in_optab[NUM_MACHINE_MODES];
252enum insn_code reload_out_optab[NUM_MACHINE_MODES];
253
254/* This obstack is used for allocation of rtl during register elimination.
255 The allocated storage can be freed once find_reloads has processed the
256 insn. */
257
258struct obstack reload_obstack;
259char *reload_firstobj;
260
261#define obstack_chunk_alloc xmalloc
262#define obstack_chunk_free free
263
264/* List of labels that must never be deleted. */
265extern rtx forced_labels;
266\f
267/* This structure is used to record information about register eliminations.
268 Each array entry describes one possible way of eliminating a register
269 in favor of another. If there is more than one way of eliminating a
270 particular register, the most preferred should be specified first. */
271
272static struct elim_table
273{
274 int from; /* Register number to be eliminated. */
275 int to; /* Register number used as replacement. */
276 int initial_offset; /* Initial difference between values. */
277 int can_eliminate; /* Non-zero if this elimination can be done. */
278 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
279 insns made by reload. */
280 int offset; /* Current offset between the two regs. */
281 int max_offset; /* Maximum offset between the two regs. */
282 int previous_offset; /* Offset at end of previous insn. */
283 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
284 rtx from_rtx; /* REG rtx for the register to be eliminated.
285 We cannot simply compare the number since
286 we might then spuriously replace a hard
287 register corresponding to a pseudo
288 assigned to the reg to be eliminated. */
289 rtx to_rtx; /* REG rtx for the replacement. */
290} reg_eliminate[] =
291
292/* If a set of eliminable registers was specified, define the table from it.
293 Otherwise, default to the normal case of the frame pointer being
294 replaced by the stack pointer. */
295
296#ifdef ELIMINABLE_REGS
297 ELIMINABLE_REGS;
298#else
299 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
300#endif
301
302#define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
303
304/* Record the number of pending eliminations that have an offset not equal
305 to their initial offset. If non-zero, we use a new copy of each
306 replacement result in any insns encountered. */
307static int num_not_at_initial_offset;
308
309/* Count the number of registers that we may be able to eliminate. */
310static int num_eliminable;
311
312/* For each label, we record the offset of each elimination. If we reach
313 a label by more than one path and an offset differs, we cannot do the
314 elimination. This information is indexed by the number of the label.
315 The first table is an array of flags that records whether we have yet
316 encountered a label and the second table is an array of arrays, one
317 entry in the latter array for each elimination. */
318
319static char *offsets_known_at;
320static int (*offsets_at)[NUM_ELIMINABLE_REGS];
321
322/* Number of labels in the current function. */
323
324static int num_labels;
325
326struct hard_reg_n_uses { int regno; int uses; };
327\f
328static int possible_group_p PROTO((int, int *));
329static void count_possible_groups PROTO((int *, enum machine_mode *,
330 int *));
331static int modes_equiv_for_class_p PROTO((enum machine_mode,
332 enum machine_mode,
333 enum reg_class));
334static void spill_failure PROTO((rtx));
335static int new_spill_reg PROTO((int, int, int *, int *, int,
336 FILE *));
337static void delete_dead_insn PROTO((rtx));
338static void alter_reg PROTO((int, int));
339static void set_label_offsets PROTO((rtx, rtx, int));
340static int eliminate_regs_in_insn PROTO((rtx, int));
341static void mark_not_eliminable PROTO((rtx, rtx));
342static int spill_hard_reg PROTO((int, int, FILE *, int));
343static void scan_paradoxical_subregs PROTO((rtx));
344static int hard_reg_use_compare PROTO((struct hard_reg_n_uses *,
345 struct hard_reg_n_uses *));
346static void order_regs_for_reload PROTO((void));
347static void reload_as_needed PROTO((rtx, int));
348static void forget_old_reloads_1 PROTO((rtx, rtx));
349static int reload_reg_class_lower PROTO((short *, short *));
350static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
351 enum machine_mode));
352static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
353 enum machine_mode));
354static int reload_reg_free_p PROTO((int, int, enum reload_type));
355static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
356static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
357static int allocate_reload_reg PROTO((int, rtx, int, int));
358static void choose_reload_regs PROTO((rtx, rtx));
359static void merge_assigned_reloads PROTO((rtx));
360static void emit_reload_insns PROTO((rtx));
361static void delete_output_reload PROTO((rtx, int, rtx));
362static void inc_for_reload PROTO((rtx, rtx, int));
363static int constraint_accepts_reg_p PROTO((char *, rtx));
364static int count_occurrences PROTO((rtx, rtx));
365\f
366/* Initialize the reload pass once per compilation. */
367
368void
369init_reload ()
370{
371 register int i;
372
373 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
374 Set spill_indirect_levels to the number of levels such addressing is
375 permitted, zero if it is not permitted at all. */
376
377 register rtx tem
378 = gen_rtx (MEM, Pmode,
379 gen_rtx (PLUS, Pmode,
380 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
381 GEN_INT (4)));
382 spill_indirect_levels = 0;
383
384 while (memory_address_p (QImode, tem))
385 {
386 spill_indirect_levels++;
387 tem = gen_rtx (MEM, Pmode, tem);
388 }
389
390 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
391
392 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
393 indirect_symref_ok = memory_address_p (QImode, tem);
394
395 /* See if reg+reg is a valid (and offsettable) address. */
396
397 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
398 {
399 tem = gen_rtx (PLUS, Pmode,
400 gen_rtx (REG, Pmode, FRAME_POINTER_REGNUM),
401 gen_rtx (REG, Pmode, i));
402 /* This way, we make sure that reg+reg is an offsettable address. */
403 tem = plus_constant (tem, 4);
404
405 if (memory_address_p (QImode, tem))
406 {
407 double_reg_address_ok = 1;
408 break;
409 }
410 }
411
412 /* Initialize obstack for our rtl allocation. */
413 gcc_obstack_init (&reload_obstack);
414 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
415}
416
417/* Main entry point for the reload pass.
418
419 FIRST is the first insn of the function being compiled.
420
421 GLOBAL nonzero means we were called from global_alloc
422 and should attempt to reallocate any pseudoregs that we
423 displace from hard regs we will use for reloads.
424 If GLOBAL is zero, we do not have enough information to do that,
425 so any pseudo reg that is spilled must go to the stack.
426
427 DUMPFILE is the global-reg debugging dump file stream, or 0.
428 If it is nonzero, messages are written to it to describe
429 which registers are seized as reload regs, which pseudo regs
430 are spilled from them, and where the pseudo regs are reallocated to.
431
432 Return value is nonzero if reload failed
433 and we must not do any more for this function. */
434
435int
436reload (first, global, dumpfile)
437 rtx first;
438 int global;
439 FILE *dumpfile;
440{
441 register int class;
442 register int i, j;
443 register rtx insn;
444 register struct elim_table *ep;
445
446 int something_changed;
447 int something_needs_reloads;
448 int something_needs_elimination;
449 int new_basic_block_needs;
450 enum reg_class caller_save_spill_class = NO_REGS;
451 int caller_save_group_size = 1;
452
453 /* Nonzero means we couldn't get enough spill regs. */
454 int failure = 0;
455
456 /* The basic block number currently being processed for INSN. */
457 int this_block;
458
459 /* Make sure even insns with volatile mem refs are recognizable. */
460 init_recog ();
461
462 /* Enable find_equiv_reg to distinguish insns made by reload. */
463 reload_first_uid = get_max_uid ();
464
465 for (i = 0; i < N_REG_CLASSES; i++)
466 basic_block_needs[i] = 0;
467
468#ifdef SECONDARY_MEMORY_NEEDED
469 /* Initialize the secondary memory table. */
470 clear_secondary_mem ();
471#endif
472
473 /* Remember which hard regs appear explicitly
474 before we merge into `regs_ever_live' the ones in which
475 pseudo regs have been allocated. */
476 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
477
478 /* We don't have a stack slot for any spill reg yet. */
479 bzero (spill_stack_slot, sizeof spill_stack_slot);
480 bzero (spill_stack_slot_width, sizeof spill_stack_slot_width);
481
482 /* Initialize the save area information for caller-save, in case some
483 are needed. */
484 init_save_areas ();
485
486 /* Compute which hard registers are now in use
487 as homes for pseudo registers.
488 This is done here rather than (eg) in global_alloc
489 because this point is reached even if not optimizing. */
490
491 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
492 mark_home_live (i);
493
494 /* Make sure that the last insn in the chain
495 is not something that needs reloading. */
496 emit_note (NULL_PTR, NOTE_INSN_DELETED);
497
498 /* Find all the pseudo registers that didn't get hard regs
499 but do have known equivalent constants or memory slots.
500 These include parameters (known equivalent to parameter slots)
501 and cse'd or loop-moved constant memory addresses.
502
503 Record constant equivalents in reg_equiv_constant
504 so they will be substituted by find_reloads.
505 Record memory equivalents in reg_mem_equiv so they can
506 be substituted eventually by altering the REG-rtx's. */
507
508 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
509 bzero (reg_equiv_constant, max_regno * sizeof (rtx));
510 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
511 bzero (reg_equiv_memory_loc, max_regno * sizeof (rtx));
512 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
513 bzero (reg_equiv_mem, max_regno * sizeof (rtx));
514 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
515 bzero (reg_equiv_init, max_regno * sizeof (rtx));
516 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
517 bzero (reg_equiv_address, max_regno * sizeof (rtx));
518 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
519 bzero (reg_max_ref_width, max_regno * sizeof (int));
520 cannot_omit_stores = (char *) alloca (max_regno);
521 bzero (cannot_omit_stores, max_regno);
522
523 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
524 Also find all paradoxical subregs
525 and find largest such for each pseudo. */
526
527 for (insn = first; insn; insn = NEXT_INSN (insn))
528 {
529 rtx set = single_set (insn);
530
531 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
532 {
533 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
534 if (note
535#ifdef LEGITIMATE_PIC_OPERAND_P
536 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
537 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
538#endif
539 )
540 {
541 rtx x = XEXP (note, 0);
542 i = REGNO (SET_DEST (set));
543 if (i > LAST_VIRTUAL_REGISTER)
544 {
545 if (GET_CODE (x) == MEM)
546 reg_equiv_memory_loc[i] = x;
547 else if (CONSTANT_P (x))
548 {
549 if (LEGITIMATE_CONSTANT_P (x))
550 reg_equiv_constant[i] = x;
551 else
552 reg_equiv_memory_loc[i]
553 = force_const_mem (GET_MODE (SET_DEST (set)), x);
554 }
555 else
556 continue;
557
558 /* If this register is being made equivalent to a MEM
559 and the MEM is not SET_SRC, the equivalencing insn
560 is one with the MEM as a SET_DEST and it occurs later.
561 So don't mark this insn now. */
562 if (GET_CODE (x) != MEM
563 || rtx_equal_p (SET_SRC (set), x))
564 reg_equiv_init[i] = insn;
565 }
566 }
567 }
568
569 /* If this insn is setting a MEM from a register equivalent to it,
570 this is the equivalencing insn. */
571 else if (set && GET_CODE (SET_DEST (set)) == MEM
572 && GET_CODE (SET_SRC (set)) == REG
573 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
574 && rtx_equal_p (SET_DEST (set),
575 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
576 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
577
578 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
579 scan_paradoxical_subregs (PATTERN (insn));
580 }
581
582 /* Does this function require a frame pointer? */
583
584 frame_pointer_needed = (! flag_omit_frame_pointer
585#ifdef EXIT_IGNORE_STACK
586 /* ?? If EXIT_IGNORE_STACK is set, we will not save
587 and restore sp for alloca. So we can't eliminate
588 the frame pointer in that case. At some point,
589 we should improve this by emitting the
590 sp-adjusting insns for this case. */
591 || (current_function_calls_alloca
592 && EXIT_IGNORE_STACK)
593#endif
594 || FRAME_POINTER_REQUIRED);
595
596 num_eliminable = 0;
597
598 /* Initialize the table of registers to eliminate. The way we do this
599 depends on how the eliminable registers were defined. */
600#ifdef ELIMINABLE_REGS
601 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
602 {
603 ep->can_eliminate = ep->can_eliminate_previous
604 = (CAN_ELIMINATE (ep->from, ep->to)
605 && (ep->from != FRAME_POINTER_REGNUM || ! frame_pointer_needed));
606 }
607#else
608 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
609 = ! frame_pointer_needed;
610#endif
611
612 /* Count the number of eliminable registers and build the FROM and TO
613 REG rtx's. Note that code in gen_rtx will cause, e.g.,
614 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
615 We depend on this. */
616 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
617 {
618 num_eliminable += ep->can_eliminate;
619 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
620 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
621 }
622
623 num_labels = max_label_num () - get_first_label_num ();
624
625 /* Allocate the tables used to store offset information at labels. */
626 offsets_known_at = (char *) alloca (num_labels);
627 offsets_at
628 = (int (*)[NUM_ELIMINABLE_REGS])
629 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
630
631 offsets_known_at -= get_first_label_num ();
632 offsets_at -= get_first_label_num ();
633
634 /* Alter each pseudo-reg rtx to contain its hard reg number.
635 Assign stack slots to the pseudos that lack hard regs or equivalents.
636 Do not touch virtual registers. */
637
638 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
639 alter_reg (i, -1);
640
641 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
642 because the stack size may be a part of the offset computation for
643 register elimination. */
644 assign_stack_local (BLKmode, 0, 0);
645
646 /* If we have some registers we think can be eliminated, scan all insns to
647 see if there is an insn that sets one of these registers to something
648 other than itself plus a constant. If so, the register cannot be
649 eliminated. Doing this scan here eliminates an extra pass through the
650 main reload loop in the most common case where register elimination
651 cannot be done. */
652 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
653 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
654 || GET_CODE (insn) == CALL_INSN)
655 note_stores (PATTERN (insn), mark_not_eliminable);
656
657#ifndef REGISTER_CONSTRAINTS
658 /* If all the pseudo regs have hard regs,
659 except for those that are never referenced,
660 we know that no reloads are needed. */
661 /* But that is not true if there are register constraints, since
662 in that case some pseudos might be in the wrong kind of hard reg. */
663
664 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
665 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
666 break;
667
668 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
669 return;
670#endif
671
672 /* Compute the order of preference for hard registers to spill.
673 Store them by decreasing preference in potential_reload_regs. */
674
675 order_regs_for_reload ();
676
677 /* So far, no hard regs have been spilled. */
678 n_spills = 0;
679 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
680 spill_reg_order[i] = -1;
681
682 /* On most machines, we can't use any register explicitly used in the
683 rtl as a spill register. But on some, we have to. Those will have
684 taken care to keep the life of hard regs as short as possible. */
685
686#ifdef SMALL_REGISTER_CLASSES
687 CLEAR_HARD_REG_SET (forbidden_regs);
688#else
689 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
690#endif
691
692 /* Spill any hard regs that we know we can't eliminate. */
693 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
694 if (! ep->can_eliminate)
695 {
696 spill_hard_reg (ep->from, global, dumpfile, 1);
697 regs_ever_live[ep->from] = 1;
698 }
699
700 if (global)
701 for (i = 0; i < N_REG_CLASSES; i++)
702 {
703 basic_block_needs[i] = (char *)alloca (n_basic_blocks);
704 bzero (basic_block_needs[i], n_basic_blocks);
705 }
706
707 /* From now on, we need to emit any moves without making new pseudos. */
708 reload_in_progress = 1;
709
710 /* This loop scans the entire function each go-round
711 and repeats until one repetition spills no additional hard regs. */
712
713 /* This flag is set when a pseudo reg is spilled,
714 to require another pass. Note that getting an additional reload
715 reg does not necessarily imply any pseudo reg was spilled;
716 sometimes we find a reload reg that no pseudo reg was allocated in. */
717 something_changed = 1;
718 /* This flag is set if there are any insns that require reloading. */
719 something_needs_reloads = 0;
720 /* This flag is set if there are any insns that require register
721 eliminations. */
722 something_needs_elimination = 0;
723 while (something_changed)
724 {
725 rtx after_call = 0;
726
727 /* For each class, number of reload regs needed in that class.
728 This is the maximum over all insns of the needs in that class
729 of the individual insn. */
730 int max_needs[N_REG_CLASSES];
731 /* For each class, size of group of consecutive regs
732 that is needed for the reloads of this class. */
733 int group_size[N_REG_CLASSES];
734 /* For each class, max number of consecutive groups needed.
735 (Each group contains group_size[CLASS] consecutive registers.) */
736 int max_groups[N_REG_CLASSES];
737 /* For each class, max number needed of regs that don't belong
738 to any of the groups. */
739 int max_nongroups[N_REG_CLASSES];
740 /* For each class, the machine mode which requires consecutive
741 groups of regs of that class.
742 If two different modes ever require groups of one class,
743 they must be the same size and equally restrictive for that class,
744 otherwise we can't handle the complexity. */
745 enum machine_mode group_mode[N_REG_CLASSES];
746 /* Record the insn where each maximum need is first found. */
747 rtx max_needs_insn[N_REG_CLASSES];
748 rtx max_groups_insn[N_REG_CLASSES];
749 rtx max_nongroups_insn[N_REG_CLASSES];
750 rtx x;
751 int starting_frame_size = get_frame_size ();
752 static char *reg_class_names[] = REG_CLASS_NAMES;
753
754 something_changed = 0;
755 bzero (max_needs, sizeof max_needs);
756 bzero (max_groups, sizeof max_groups);
757 bzero (max_nongroups, sizeof max_nongroups);
758 bzero (max_needs_insn, sizeof max_needs_insn);
759 bzero (max_groups_insn, sizeof max_groups_insn);
760 bzero (max_nongroups_insn, sizeof max_nongroups_insn);
761 bzero (group_size, sizeof group_size);
762 for (i = 0; i < N_REG_CLASSES; i++)
763 group_mode[i] = VOIDmode;
764
765 /* Keep track of which basic blocks are needing the reloads. */
766 this_block = 0;
767
768 /* Remember whether any element of basic_block_needs
769 changes from 0 to 1 in this pass. */
770 new_basic_block_needs = 0;
771
772 /* Reset all offsets on eliminable registers to their initial values. */
773#ifdef ELIMINABLE_REGS
774 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
775 {
776 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
777 ep->previous_offset = ep->offset
778 = ep->max_offset = ep->initial_offset;
779 }
780#else
781#ifdef INITIAL_FRAME_POINTER_OFFSET
782 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
783#else
784 if (!FRAME_POINTER_REQUIRED)
785 abort ();
786 reg_eliminate[0].initial_offset = 0;
787#endif
788 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
789 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
790#endif
791
792 num_not_at_initial_offset = 0;
793
794 bzero (&offsets_known_at[get_first_label_num ()], num_labels);
795
796 /* Set a known offset for each forced label to be at the initial offset
797 of each elimination. We do this because we assume that all
798 computed jumps occur from a location where each elimination is
799 at its initial offset. */
800
801 for (x = forced_labels; x; x = XEXP (x, 1))
802 if (XEXP (x, 0))
803 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
804
805 /* For each pseudo register that has an equivalent location defined,
806 try to eliminate any eliminable registers (such as the frame pointer)
807 assuming initial offsets for the replacement register, which
808 is the normal case.
809
810 If the resulting location is directly addressable, substitute
811 the MEM we just got directly for the old REG.
812
813 If it is not addressable but is a constant or the sum of a hard reg
814 and constant, it is probably not addressable because the constant is
815 out of range, in that case record the address; we will generate
816 hairy code to compute the address in a register each time it is
817 needed.
818
819 If the location is not addressable, but does not have one of the
820 above forms, assign a stack slot. We have to do this to avoid the
821 potential of producing lots of reloads if, e.g., a location involves
822 a pseudo that didn't get a hard register and has an equivalent memory
823 location that also involves a pseudo that didn't get a hard register.
824
825 Perhaps at some point we will improve reload_when_needed handling
826 so this problem goes away. But that's very hairy. */
827
828 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
829 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
830 {
831 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
832
833 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
834 XEXP (x, 0)))
835 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
836 else if (CONSTANT_P (XEXP (x, 0))
837 || (GET_CODE (XEXP (x, 0)) == PLUS
838 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
839 && (REGNO (XEXP (XEXP (x, 0), 0))
840 < FIRST_PSEUDO_REGISTER)
841 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
842 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
843 else
844 {
845 /* Make a new stack slot. Then indicate that something
846 changed so we go back and recompute offsets for
847 eliminable registers because the allocation of memory
848 below might change some offset. reg_equiv_{mem,address}
849 will be set up for this pseudo on the next pass around
850 the loop. */
851 reg_equiv_memory_loc[i] = 0;
852 reg_equiv_init[i] = 0;
853 alter_reg (i, -1);
854 something_changed = 1;
855 }
856 }
857
858 /* If we allocated another pseudo to the stack, redo elimination
859 bookkeeping. */
860 if (something_changed)
861 continue;
862
863 /* If caller-saves needs a group, initialize the group to include
864 the size and mode required for caller-saves. */
865
866 if (caller_save_group_size > 1)
867 {
868 group_mode[(int) caller_save_spill_class] = Pmode;
869 group_size[(int) caller_save_spill_class] = caller_save_group_size;
870 }
871
872 /* Compute the most additional registers needed by any instruction.
873 Collect information separately for each class of regs. */
874
875 for (insn = first; insn; insn = NEXT_INSN (insn))
876 {
877 if (global && this_block + 1 < n_basic_blocks
878 && insn == basic_block_head[this_block+1])
879 ++this_block;
880
881 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
882 might include REG_LABEL), we need to see what effects this
883 has on the known offsets at labels. */
884
885 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
886 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
887 && REG_NOTES (insn) != 0))
888 set_label_offsets (insn, insn, 0);
889
890 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
891 {
892 /* Nonzero means don't use a reload reg that overlaps
893 the place where a function value can be returned. */
894 rtx avoid_return_reg = 0;
895
896 rtx old_body = PATTERN (insn);
897 int old_code = INSN_CODE (insn);
898 rtx old_notes = REG_NOTES (insn);
899 int did_elimination = 0;
900 int max_total_input_groups = 0, max_total_output_groups = 0;
901
902 /* To compute the number of reload registers of each class
903 needed for an insn, we must similate what choose_reload_regs
904 can do. We do this by splitting an insn into an "input" and
905 an "output" part. RELOAD_OTHER reloads are used in both.
906 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
907 which must be live over the entire input section of reloads,
908 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
909 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
910 inputs.
911
912 The registers needed for output are RELOAD_OTHER and
913 RELOAD_FOR_OUTPUT, which are live for the entire output
914 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
915 reloads for each operand.
916
917 The total number of registers needed is the maximum of the
918 inputs and outputs. */
919
920 /* These just count RELOAD_OTHER. */
921 int insn_needs[N_REG_CLASSES];
922 int insn_groups[N_REG_CLASSES];
923 int insn_total_groups = 0;
924
925 /* Count RELOAD_FOR_INPUT reloads. */
926 int insn_needs_for_inputs[N_REG_CLASSES];
927 int insn_groups_for_inputs[N_REG_CLASSES];
928 int insn_total_groups_for_inputs = 0;
929
930 /* Count RELOAD_FOR_OUTPUT reloads. */
931 int insn_needs_for_outputs[N_REG_CLASSES];
932 int insn_groups_for_outputs[N_REG_CLASSES];
933 int insn_total_groups_for_outputs = 0;
934
935 /* Count RELOAD_FOR_INSN reloads. */
936 int insn_needs_for_insn[N_REG_CLASSES];
937 int insn_groups_for_insn[N_REG_CLASSES];
938 int insn_total_groups_for_insn = 0;
939
940 /* Count RELOAD_FOR_OTHER_ADDRESS reloads. */
941 int insn_needs_for_other_addr[N_REG_CLASSES];
942 int insn_groups_for_other_addr[N_REG_CLASSES];
943 int insn_total_groups_for_other_addr = 0;
944
945 /* Count RELOAD_FOR_INPUT_ADDRESS reloads. */
946 int insn_needs_for_in_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
947 int insn_groups_for_in_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
948 int insn_total_groups_for_in_addr[MAX_RECOG_OPERANDS];
949
950 /* Count RELOAD_FOR_OUTPUT_ADDRESS reloads. */
951 int insn_needs_for_out_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
952 int insn_groups_for_out_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
953 int insn_total_groups_for_out_addr[MAX_RECOG_OPERANDS];
954
955 /* Count RELOAD_FOR_OPERAND_ADDRESS reloads. */
956 int insn_needs_for_op_addr[N_REG_CLASSES];
957 int insn_groups_for_op_addr[N_REG_CLASSES];
958 int insn_total_groups_for_op_addr = 0;
959
960#if 0 /* This wouldn't work nowadays, since optimize_bit_field
961 looks for non-strict memory addresses. */
962 /* Optimization: a bit-field instruction whose field
963 happens to be a byte or halfword in memory
964 can be changed to a move instruction. */
965
966 if (GET_CODE (PATTERN (insn)) == SET)
967 {
968 rtx dest = SET_DEST (PATTERN (insn));
969 rtx src = SET_SRC (PATTERN (insn));
970
971 if (GET_CODE (dest) == ZERO_EXTRACT
972 || GET_CODE (dest) == SIGN_EXTRACT)
973 optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem);
974 if (GET_CODE (src) == ZERO_EXTRACT
975 || GET_CODE (src) == SIGN_EXTRACT)
976 optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem);
977 }
978#endif
979
980 /* If needed, eliminate any eliminable registers. */
981 if (num_eliminable)
982 did_elimination = eliminate_regs_in_insn (insn, 0);
983
984#ifdef SMALL_REGISTER_CLASSES
985 /* Set avoid_return_reg if this is an insn
986 that might use the value of a function call. */
987 if (GET_CODE (insn) == CALL_INSN)
988 {
989 if (GET_CODE (PATTERN (insn)) == SET)
990 after_call = SET_DEST (PATTERN (insn));
991 else if (GET_CODE (PATTERN (insn)) == PARALLEL
992 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
993 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
994 else
995 after_call = 0;
996 }
997 else if (after_call != 0
998 && !(GET_CODE (PATTERN (insn)) == SET
999 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
1000 {
1001 if (reg_mentioned_p (after_call, PATTERN (insn)))
1002 avoid_return_reg = after_call;
1003 after_call = 0;
1004 }
1005#endif /* SMALL_REGISTER_CLASSES */
1006
1007 /* Analyze the instruction. */
1008 find_reloads (insn, 0, spill_indirect_levels, global,
1009 spill_reg_order);
1010
1011 /* Remember for later shortcuts which insns had any reloads or
1012 register eliminations.
1013
1014 One might think that it would be worthwhile to mark insns
1015 that need register replacements but not reloads, but this is
1016 not safe because find_reloads may do some manipulation of
1017 the insn (such as swapping commutative operands), which would
1018 be lost when we restore the old pattern after register
1019 replacement. So the actions of find_reloads must be redone in
1020 subsequent passes or in reload_as_needed.
1021
1022 However, it is safe to mark insns that need reloads
1023 but not register replacement. */
1024
1025 PUT_MODE (insn, (did_elimination ? QImode
1026 : n_reloads ? HImode
1027 : GET_MODE (insn) == DImode ? DImode
1028 : VOIDmode));
1029
1030 /* Discard any register replacements done. */
1031 if (did_elimination)
1032 {
1033 obstack_free (&reload_obstack, reload_firstobj);
1034 PATTERN (insn) = old_body;
1035 INSN_CODE (insn) = old_code;
1036 REG_NOTES (insn) = old_notes;
1037 something_needs_elimination = 1;
1038 }
1039
1040 /* If this insn has no reloads, we need not do anything except
1041 in the case of a CALL_INSN when we have caller-saves and
1042 caller-save needs reloads. */
1043
1044 if (n_reloads == 0
1045 && ! (GET_CODE (insn) == CALL_INSN
1046 && caller_save_spill_class != NO_REGS))
1047 continue;
1048
1049 something_needs_reloads = 1;
1050
1051 for (i = 0; i < N_REG_CLASSES; i++)
1052 {
1053 insn_needs[i] = 0, insn_groups[i] = 0;
1054 insn_needs_for_inputs[i] = 0, insn_groups_for_inputs[i] = 0;
1055 insn_needs_for_outputs[i] = 0, insn_groups_for_outputs[i] = 0;
1056 insn_needs_for_insn[i] = 0, insn_groups_for_insn[i] = 0;
1057 insn_needs_for_op_addr[i] = 0, insn_groups_for_op_addr[i] = 0;
1058 insn_needs_for_other_addr[i] = 0;
1059 insn_groups_for_other_addr[i] = 0;
1060 }
1061
1062 for (i = 0; i < reload_n_operands; i++)
1063 {
1064 insn_total_groups_for_in_addr[i] = 0;
1065 insn_total_groups_for_out_addr[i] = 0;
1066
1067 for (j = 0; j < N_REG_CLASSES; j++)
1068 {
1069 insn_needs_for_in_addr[i][j] = 0;
1070 insn_needs_for_out_addr[i][j] = 0;
1071 insn_groups_for_in_addr[i][j] = 0;
1072 insn_groups_for_out_addr[i][j] = 0;
1073 }
1074 }
1075
1076 /* Count each reload once in every class
1077 containing the reload's own class. */
1078
1079 for (i = 0; i < n_reloads; i++)
1080 {
1081 register enum reg_class *p;
1082 enum reg_class class = reload_reg_class[i];
1083 int size;
1084 enum machine_mode mode;
1085 int *this_groups;
1086 int *this_needs;
1087 int *this_total_groups;
1088
1089 /* Don't count the dummy reloads, for which one of the
1090 regs mentioned in the insn can be used for reloading.
1091 Don't count optional reloads.
1092 Don't count reloads that got combined with others. */
1093 if (reload_reg_rtx[i] != 0
1094 || reload_optional[i] != 0
1095 || (reload_out[i] == 0 && reload_in[i] == 0
1096 && ! reload_secondary_p[i]))
1097 continue;
1098
1099 /* Show that a reload register of this class is needed
1100 in this basic block. We do not use insn_needs and
1101 insn_groups because they are overly conservative for
1102 this purpose. */
1103 if (global && ! basic_block_needs[(int) class][this_block])
1104 {
1105 basic_block_needs[(int) class][this_block] = 1;
1106 new_basic_block_needs = 1;
1107 }
1108
1109 /* Decide which time-of-use to count this reload for. */
1110 switch (reload_when_needed[i])
1111 {
1112 case RELOAD_OTHER:
1113 this_needs = insn_needs;
1114 this_groups = insn_groups;
1115 this_total_groups = &insn_total_groups;
1116 break;
1117
1118 case RELOAD_FOR_INPUT:
1119 this_needs = insn_needs_for_inputs;
1120 this_groups = insn_groups_for_inputs;
1121 this_total_groups = &insn_total_groups_for_inputs;
1122 break;
1123
1124 case RELOAD_FOR_OUTPUT:
1125 this_needs = insn_needs_for_outputs;
1126 this_groups = insn_groups_for_outputs;
1127 this_total_groups = &insn_total_groups_for_outputs;
1128 break;
1129
1130 case RELOAD_FOR_INSN:
1131 this_needs = insn_needs_for_insn;
1132 this_groups = insn_groups_for_outputs;
1133 this_total_groups = &insn_total_groups_for_insn;
1134 break;
1135
1136 case RELOAD_FOR_OTHER_ADDRESS:
1137 this_needs = insn_needs_for_other_addr;
1138 this_groups = insn_groups_for_other_addr;
1139 this_total_groups = &insn_total_groups_for_other_addr;
1140 break;
1141
1142 case RELOAD_FOR_INPUT_ADDRESS:
1143 this_needs = insn_needs_for_in_addr[reload_opnum[i]];
1144 this_groups = insn_groups_for_in_addr[reload_opnum[i]];
1145 this_total_groups
1146 = &insn_total_groups_for_in_addr[reload_opnum[i]];
1147 break;
1148
1149 case RELOAD_FOR_OUTPUT_ADDRESS:
1150 this_needs = insn_needs_for_out_addr[reload_opnum[i]];
1151 this_groups = insn_groups_for_out_addr[reload_opnum[i]];
1152 this_total_groups
1153 = &insn_total_groups_for_out_addr[reload_opnum[i]];
1154 break;
1155
1156 case RELOAD_FOR_OPERAND_ADDRESS:
1157 this_needs = insn_needs_for_op_addr;
1158 this_groups = insn_groups_for_op_addr;
1159 this_total_groups = &insn_total_groups_for_op_addr;
1160 break;
1161 }
1162
1163 mode = reload_inmode[i];
1164 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1165 mode = reload_outmode[i];
1166 size = CLASS_MAX_NREGS (class, mode);
1167 if (size > 1)
1168 {
1169 enum machine_mode other_mode, allocate_mode;
1170
1171 /* Count number of groups needed separately from
1172 number of individual regs needed. */
1173 this_groups[(int) class]++;
1174 p = reg_class_superclasses[(int) class];
1175 while (*p != LIM_REG_CLASSES)
1176 this_groups[(int) *p++]++;
1177 (*this_total_groups)++;
1178
1179 /* Record size and mode of a group of this class. */
1180 /* If more than one size group is needed,
1181 make all groups the largest needed size. */
1182 if (group_size[(int) class] < size)
1183 {
1184 other_mode = group_mode[(int) class];
1185 allocate_mode = mode;
1186
1187 group_size[(int) class] = size;
1188 group_mode[(int) class] = mode;
1189 }
1190 else
1191 {
1192 other_mode = mode;
1193 allocate_mode = group_mode[(int) class];
1194 }
1195
1196 /* Crash if two dissimilar machine modes both need
1197 groups of consecutive regs of the same class. */
1198
1199 if (other_mode != VOIDmode
1200 && other_mode != allocate_mode
1201 && ! modes_equiv_for_class_p (allocate_mode,
1202 other_mode,
1203 class))
1204 abort ();
1205 }
1206 else if (size == 1)
1207 {
1208 this_needs[(int) class] += 1;
1209 p = reg_class_superclasses[(int) class];
1210 while (*p != LIM_REG_CLASSES)
1211 this_needs[(int) *p++] += 1;
1212 }
1213 else
1214 abort ();
1215 }
1216
1217 /* All reloads have been counted for this insn;
1218 now merge the various times of use.
1219 This sets insn_needs, etc., to the maximum total number
1220 of registers needed at any point in this insn. */
1221
1222 for (i = 0; i < N_REG_CLASSES; i++)
1223 {
1224 int in_max, out_max;
1225
1226 for (in_max = 0, out_max = 0, j = 0;
1227 j < reload_n_operands; j++)
1228 {
1229 in_max = MAX (in_max, insn_needs_for_in_addr[j][i]);
1230 out_max = MAX (out_max, insn_needs_for_out_addr[j][i]);
1231 }
1232
1233 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1234 and operand addresses but not things used to reload them.
1235 Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads don't
1236 conflict with things needed to reload inputs or
1237 outputs. */
1238
1239 in_max = MAX (in_max, insn_needs_for_op_addr[i]);
1240 out_max = MAX (out_max, insn_needs_for_insn[i]);
1241
1242 insn_needs_for_inputs[i]
1243 = MAX (insn_needs_for_inputs[i]
1244 + insn_needs_for_op_addr[i]
1245 + insn_needs_for_insn[i],
1246 in_max + insn_needs_for_inputs[i]);
1247
1248 insn_needs_for_outputs[i] += out_max;
1249 insn_needs[i] += MAX (MAX (insn_needs_for_inputs[i],
1250 insn_needs_for_outputs[i]),
1251 insn_needs_for_other_addr[i]);
1252
1253 for (in_max = 0, out_max = 0, j = 0;
1254 j < reload_n_operands; j++)
1255 {
1256 in_max = MAX (in_max, insn_groups_for_in_addr[j][i]);
1257 out_max = MAX (out_max, insn_groups_for_out_addr[j][i]);
1258 }
1259
1260 in_max = MAX (in_max, insn_groups_for_op_addr[i]);
1261 out_max = MAX (out_max, insn_groups_for_insn[i]);
1262
1263 insn_groups_for_inputs[i]
1264 = MAX (insn_groups_for_inputs[i]
1265 + insn_groups_for_op_addr[i]
1266 + insn_groups_for_insn[i],
1267 in_max + insn_groups_for_inputs[i]);
1268
1269 insn_groups_for_outputs[i] += out_max;
1270 insn_groups[i] += MAX (MAX (insn_groups_for_inputs[i],
1271 insn_groups_for_outputs[i]),
1272 insn_groups_for_other_addr[i]);
1273 }
1274
1275 for (i = 0; i < reload_n_operands; i++)
1276 {
1277 max_total_input_groups
1278 = MAX (max_total_input_groups,
1279 insn_total_groups_for_in_addr[i]);
1280 max_total_output_groups
1281 = MAX (max_total_output_groups,
1282 insn_total_groups_for_out_addr[i]);
1283 }
1284
1285 max_total_input_groups = MAX (max_total_input_groups,
1286 insn_total_groups_for_op_addr);
1287 max_total_output_groups = MAX (max_total_output_groups,
1288 insn_total_groups_for_insn);
1289
1290 insn_total_groups_for_inputs
1291 = MAX (max_total_input_groups + insn_total_groups_for_op_addr
1292 + insn_total_groups_for_insn,
1293 max_total_input_groups + insn_total_groups_for_inputs);
1294
1295 insn_total_groups_for_outputs += max_total_output_groups;
1296
1297 insn_total_groups += MAX (MAX (insn_total_groups_for_outputs,
1298 insn_total_groups_for_inputs),
1299 insn_total_groups_for_other_addr);
1300
1301 /* If this is a CALL_INSN and caller-saves will need
1302 a spill register, act as if the spill register is
1303 needed for this insn. However, the spill register
1304 can be used by any reload of this insn, so we only
1305 need do something if no need for that class has
1306 been recorded.
1307
1308 The assumption that every CALL_INSN will trigger a
1309 caller-save is highly conservative, however, the number
1310 of cases where caller-saves will need a spill register but
1311 a block containing a CALL_INSN won't need a spill register
1312 of that class should be quite rare.
1313
1314 If a group is needed, the size and mode of the group will
1315 have been set up at the beginning of this loop. */
1316
1317 if (GET_CODE (insn) == CALL_INSN
1318 && caller_save_spill_class != NO_REGS)
1319 {
1320 int *caller_save_needs
1321 = (caller_save_group_size > 1 ? insn_groups : insn_needs);
1322
1323 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1324 {
1325 register enum reg_class *p
1326 = reg_class_superclasses[(int) caller_save_spill_class];
1327
1328 caller_save_needs[(int) caller_save_spill_class]++;
1329
1330 while (*p != LIM_REG_CLASSES)
1331 caller_save_needs[(int) *p++] += 1;
1332 }
1333
1334 if (caller_save_group_size > 1)
1335 insn_total_groups = MAX (insn_total_groups, 1);
1336
1337
1338 /* Show that this basic block will need a register of
1339 this class. */
1340
1341 if (global
1342 && ! (basic_block_needs[(int) caller_save_spill_class]
1343 [this_block]))
1344 {
1345 basic_block_needs[(int) caller_save_spill_class]
1346 [this_block] = 1;
1347 new_basic_block_needs = 1;
1348 }
1349 }
1350
1351#ifdef SMALL_REGISTER_CLASSES
1352 /* If this insn stores the value of a function call,
1353 and that value is in a register that has been spilled,
1354 and if the insn needs a reload in a class
1355 that might use that register as the reload register,
1356 then add add an extra need in that class.
1357 This makes sure we have a register available that does
1358 not overlap the return value. */
1359 if (avoid_return_reg)
1360 {
1361 int regno = REGNO (avoid_return_reg);
1362 int nregs
1363 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1364 int r;
1365 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1366
1367 /* First compute the "basic needs", which counts a
1368 need only in the smallest class in which it
1369 is required. */
1370
1371 bcopy (insn_needs, basic_needs, sizeof basic_needs);
1372 bcopy (insn_groups, basic_groups, sizeof basic_groups);
1373
1374 for (i = 0; i < N_REG_CLASSES; i++)
1375 {
1376 enum reg_class *p;
1377
1378 if (basic_needs[i] >= 0)
1379 for (p = reg_class_superclasses[i];
1380 *p != LIM_REG_CLASSES; p++)
1381 basic_needs[(int) *p] -= basic_needs[i];
1382
1383 if (basic_groups[i] >= 0)
1384 for (p = reg_class_superclasses[i];
1385 *p != LIM_REG_CLASSES; p++)
1386 basic_groups[(int) *p] -= basic_groups[i];
1387 }
1388
1389 /* Now count extra regs if there might be a conflict with
1390 the return value register.
1391
1392 ??? This is not quite correct because we don't properly
1393 handle the case of groups, but if we end up doing
1394 something wrong, it either will end up not mattering or
1395 we will abort elsewhere. */
1396
1397 for (r = regno; r < regno + nregs; r++)
1398 if (spill_reg_order[r] >= 0)
1399 for (i = 0; i < N_REG_CLASSES; i++)
1400 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1401 {
1402 if (basic_needs[i] > 0 || basic_groups[i] > 0)
1403 {
1404 enum reg_class *p;
1405
1406 insn_needs[i]++;
1407 p = reg_class_superclasses[i];
1408 while (*p != LIM_REG_CLASSES)
1409 insn_needs[(int) *p++]++;
1410 }
1411 }
1412 }
1413#endif /* SMALL_REGISTER_CLASSES */
1414
1415 /* For each class, collect maximum need of any insn. */
1416
1417 for (i = 0; i < N_REG_CLASSES; i++)
1418 {
1419 if (max_needs[i] < insn_needs[i])
1420 {
1421 max_needs[i] = insn_needs[i];
1422 max_needs_insn[i] = insn;
1423 }
1424 if (max_groups[i] < insn_groups[i])
1425 {
1426 max_groups[i] = insn_groups[i];
1427 max_groups_insn[i] = insn;
1428 }
1429 if (insn_total_groups > 0)
1430 if (max_nongroups[i] < insn_needs[i])
1431 {
1432 max_nongroups[i] = insn_needs[i];
1433 max_nongroups_insn[i] = insn;
1434 }
1435 }
1436 }
1437 /* Note that there is a continue statement above. */
1438 }
1439
1440 /* If we allocated any new memory locations, make another pass
1441 since it might have changed elimination offsets. */
1442 if (starting_frame_size != get_frame_size ())
1443 something_changed = 1;
1444
1445 if (dumpfile)
1446 for (i = 0; i < N_REG_CLASSES; i++)
1447 {
1448 if (max_needs[i] > 0)
1449 fprintf (dumpfile,
1450 ";; Need %d reg%s of class %s (for insn %d).\n",
1451 max_needs[i], max_needs[i] == 1 ? "" : "s",
1452 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1453 if (max_nongroups[i] > 0)
1454 fprintf (dumpfile,
1455 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1456 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1457 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1458 if (max_groups[i] > 0)
1459 fprintf (dumpfile,
1460 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1461 max_groups[i], max_groups[i] == 1 ? "" : "s",
1462 mode_name[(int) group_mode[i]],
1463 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1464 }
1465
1466 /* If we have caller-saves, set up the save areas and see if caller-save
1467 will need a spill register. */
1468
1469 if (caller_save_needed
1470 && ! setup_save_areas (&something_changed)
1471 && caller_save_spill_class == NO_REGS)
1472 {
1473 /* The class we will need depends on whether the machine
1474 supports the sum of two registers for an address; see
1475 find_address_reloads for details. */
1476
1477 caller_save_spill_class
1478 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1479 caller_save_group_size
1480 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1481 something_changed = 1;
1482 }
1483
1484 /* See if anything that happened changes which eliminations are valid.
1485 For example, on the Sparc, whether or not the frame pointer can
1486 be eliminated can depend on what registers have been used. We need
1487 not check some conditions again (such as flag_omit_frame_pointer)
1488 since they can't have changed. */
1489
1490 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1491 if ((ep->from == FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1492#ifdef ELIMINABLE_REGS
1493 || ! CAN_ELIMINATE (ep->from, ep->to)
1494#endif
1495 )
1496 ep->can_eliminate = 0;
1497
1498 /* Look for the case where we have discovered that we can't replace
1499 register A with register B and that means that we will now be
1500 trying to replace register A with register C. This means we can
1501 no longer replace register C with register B and we need to disable
1502 such an elimination, if it exists. This occurs often with A == ap,
1503 B == sp, and C == fp. */
1504
1505 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1506 {
1507 struct elim_table *op;
1508 register int new_to = -1;
1509
1510 if (! ep->can_eliminate && ep->can_eliminate_previous)
1511 {
1512 /* Find the current elimination for ep->from, if there is a
1513 new one. */
1514 for (op = reg_eliminate;
1515 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1516 if (op->from == ep->from && op->can_eliminate)
1517 {
1518 new_to = op->to;
1519 break;
1520 }
1521
1522 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1523 disable it. */
1524 for (op = reg_eliminate;
1525 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1526 if (op->from == new_to && op->to == ep->to)
1527 op->can_eliminate = 0;
1528 }
1529 }
1530
1531 /* See if any registers that we thought we could eliminate the previous
1532 time are no longer eliminable. If so, something has changed and we
1533 must spill the register. Also, recompute the number of eliminable
1534 registers and see if the frame pointer is needed; it is if there is
1535 no elimination of the frame pointer that we can perform. */
1536
1537 frame_pointer_needed = 1;
1538 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1539 {
1540 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM)
1541 frame_pointer_needed = 0;
1542
1543 if (! ep->can_eliminate && ep->can_eliminate_previous)
1544 {
1545 ep->can_eliminate_previous = 0;
1546 spill_hard_reg (ep->from, global, dumpfile, 1);
1547 regs_ever_live[ep->from] = 1;
1548 something_changed = 1;
1549 num_eliminable--;
1550 }
1551 }
1552
1553 /* If all needs are met, we win. */
1554
1555 for (i = 0; i < N_REG_CLASSES; i++)
1556 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1557 break;
1558 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1559 break;
1560
1561 /* Not all needs are met; must spill some hard regs. */
1562
1563 /* Put all registers spilled so far back in potential_reload_regs, but
1564 put them at the front, since we've already spilled most of the
1565 psuedos in them (we might have left some pseudos unspilled if they
1566 were in a block that didn't need any spill registers of a conflicting
1567 class. We used to try to mark off the need for those registers,
1568 but doing so properly is very complex and reallocating them is the
1569 simpler approach. First, "pack" potential_reload_regs by pushing
1570 any nonnegative entries towards the end. That will leave room
1571 for the registers we already spilled.
1572
1573 Also, undo the marking of the spill registers from the last time
1574 around in FORBIDDEN_REGS since we will be probably be allocating
1575 them again below.
1576
1577 ??? It is theoretically possible that we might end up not using one
1578 of our previously-spilled registers in this allocation, even though
1579 they are at the head of the list. It's not clear what to do about
1580 this, but it was no better before, when we marked off the needs met
1581 by the previously-spilled registers. With the current code, globals
1582 can be allocated into these registers, but locals cannot. */
1583
1584 if (n_spills)
1585 {
1586 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1587 if (potential_reload_regs[i] != -1)
1588 potential_reload_regs[j--] = potential_reload_regs[i];
1589
1590 for (i = 0; i < n_spills; i++)
1591 {
1592 potential_reload_regs[i] = spill_regs[i];
1593 spill_reg_order[spill_regs[i]] = -1;
1594 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1595 }
1596
1597 n_spills = 0;
1598 }
1599
1600 /* Now find more reload regs to satisfy the remaining need
1601 Do it by ascending class number, since otherwise a reg
1602 might be spilled for a big class and might fail to count
1603 for a smaller class even though it belongs to that class.
1604
1605 Count spilled regs in `spills', and add entries to
1606 `spill_regs' and `spill_reg_order'.
1607
1608 ??? Note there is a problem here.
1609 When there is a need for a group in a high-numbered class,
1610 and also need for non-group regs that come from a lower class,
1611 the non-group regs are chosen first. If there aren't many regs,
1612 they might leave no room for a group.
1613
1614 This was happening on the 386. To fix it, we added the code
1615 that calls possible_group_p, so that the lower class won't
1616 break up the last possible group.
1617
1618 Really fixing the problem would require changes above
1619 in counting the regs already spilled, and in choose_reload_regs.
1620 It might be hard to avoid introducing bugs there. */
1621
1622 CLEAR_HARD_REG_SET (counted_for_groups);
1623 CLEAR_HARD_REG_SET (counted_for_nongroups);
1624
1625 for (class = 0; class < N_REG_CLASSES; class++)
1626 {
1627 /* First get the groups of registers.
1628 If we got single registers first, we might fragment
1629 possible groups. */
1630 while (max_groups[class] > 0)
1631 {
1632 /* If any single spilled regs happen to form groups,
1633 count them now. Maybe we don't really need
1634 to spill another group. */
1635 count_possible_groups (group_size, group_mode, max_groups);
1636
1637 if (max_groups[class] <= 0)
1638 break;
1639
1640 /* Groups of size 2 (the only groups used on most machines)
1641 are treated specially. */
1642 if (group_size[class] == 2)
1643 {
1644 /* First, look for a register that will complete a group. */
1645 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1646 {
1647 int other;
1648
1649 j = potential_reload_regs[i];
1650 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1651 &&
1652 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1653 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1654 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1655 && HARD_REGNO_MODE_OK (other, group_mode[class])
1656 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1657 other)
1658 /* We don't want one part of another group.
1659 We could get "two groups" that overlap! */
1660 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1661 ||
1662 (j < FIRST_PSEUDO_REGISTER - 1
1663 && (other = j + 1, spill_reg_order[other] >= 0)
1664 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1665 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1666 && HARD_REGNO_MODE_OK (j, group_mode[class])
1667 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1668 other)
1669 && ! TEST_HARD_REG_BIT (counted_for_groups,
1670 other))))
1671 {
1672 register enum reg_class *p;
1673
1674 /* We have found one that will complete a group,
1675 so count off one group as provided. */
1676 max_groups[class]--;
1677 p = reg_class_superclasses[class];
1678 while (*p != LIM_REG_CLASSES)
1679 max_groups[(int) *p++]--;
1680
1681 /* Indicate both these regs are part of a group. */
1682 SET_HARD_REG_BIT (counted_for_groups, j);
1683 SET_HARD_REG_BIT (counted_for_groups, other);
1684 break;
1685 }
1686 }
1687 /* We can't complete a group, so start one. */
1688 if (i == FIRST_PSEUDO_REGISTER)
1689 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1690 {
1691 int k;
1692 j = potential_reload_regs[i];
1693 /* Verify that J+1 is a potential reload reg. */
1694 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1695 if (potential_reload_regs[k] == j + 1)
1696 break;
1697 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1698 && k < FIRST_PSEUDO_REGISTER
1699 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1700 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1701 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1702 && HARD_REGNO_MODE_OK (j, group_mode[class])
1703 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1704 j + 1)
1705 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1706 break;
1707 }
1708
1709 /* I should be the index in potential_reload_regs
1710 of the new reload reg we have found. */
1711
1712 if (i >= FIRST_PSEUDO_REGISTER)
1713 {
1714 /* There are no groups left to spill. */
1715 spill_failure (max_groups_insn[class]);
1716 failure = 1;
1717 goto failed;
1718 }
1719 else
1720 something_changed
1721 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1722 global, dumpfile);
1723 }
1724 else
1725 {
1726 /* For groups of more than 2 registers,
1727 look for a sufficient sequence of unspilled registers,
1728 and spill them all at once. */
1729 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1730 {
1731 int k;
1732
1733 j = potential_reload_regs[i];
1734 if (j >= 0
1735 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1736 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1737 {
1738 /* Check each reg in the sequence. */
1739 for (k = 0; k < group_size[class]; k++)
1740 if (! (spill_reg_order[j + k] < 0
1741 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1742 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1743 break;
1744 /* We got a full sequence, so spill them all. */
1745 if (k == group_size[class])
1746 {
1747 register enum reg_class *p;
1748 for (k = 0; k < group_size[class]; k++)
1749 {
1750 int idx;
1751 SET_HARD_REG_BIT (counted_for_groups, j + k);
1752 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1753 if (potential_reload_regs[idx] == j + k)
1754 break;
1755 something_changed
1756 |= new_spill_reg (idx, class,
1757 max_needs, NULL_PTR,
1758 global, dumpfile);
1759 }
1760
1761 /* We have found one that will complete a group,
1762 so count off one group as provided. */
1763 max_groups[class]--;
1764 p = reg_class_superclasses[class];
1765 while (*p != LIM_REG_CLASSES)
1766 max_groups[(int) *p++]--;
1767
1768 break;
1769 }
1770 }
1771 }
1772 /* We couldn't find any registers for this reload.
1773 Avoid going into an infinite loop. */
1774 if (i >= FIRST_PSEUDO_REGISTER)
1775 {
1776 /* There are no groups left. */
1777 spill_failure (max_groups_insn[class]);
1778 failure = 1;
1779 goto failed;
1780 }
1781 }
1782 }
1783
1784 /* Now similarly satisfy all need for single registers. */
1785
1786 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1787 {
1788#ifdef SMALL_REGISTER_CLASSES
1789 /* This should be right for all machines, but only the 386
1790 is known to need it, so this conditional plays safe.
1791 ??? For 2.5, try making this unconditional. */
1792 /* If we spilled enough regs, but they weren't counted
1793 against the non-group need, see if we can count them now.
1794 If so, we can avoid some actual spilling. */
1795 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1796 for (i = 0; i < n_spills; i++)
1797 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1798 spill_regs[i])
1799 && !TEST_HARD_REG_BIT (counted_for_groups,
1800 spill_regs[i])
1801 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1802 spill_regs[i])
1803 && max_nongroups[class] > 0)
1804 {
1805 register enum reg_class *p;
1806
1807 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1808 max_nongroups[class]--;
1809 p = reg_class_superclasses[class];
1810 while (*p != LIM_REG_CLASSES)
1811 max_nongroups[(int) *p++]--;
1812 }
1813 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1814 break;
1815#endif
1816
1817 /* Consider the potential reload regs that aren't
1818 yet in use as reload regs, in order of preference.
1819 Find the most preferred one that's in this class. */
1820
1821 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1822 if (potential_reload_regs[i] >= 0
1823 && TEST_HARD_REG_BIT (reg_class_contents[class],
1824 potential_reload_regs[i])
1825 /* If this reg will not be available for groups,
1826 pick one that does not foreclose possible groups.
1827 This is a kludge, and not very general,
1828 but it should be sufficient to make the 386 work,
1829 and the problem should not occur on machines with
1830 more registers. */
1831 && (max_nongroups[class] == 0
1832 || possible_group_p (potential_reload_regs[i], max_groups)))
1833 break;
1834
1835 /* If we couldn't get a register, try to get one even if we
1836 might foreclose possible groups. This may cause problems
1837 later, but that's better than aborting now, since it is
1838 possible that we will, in fact, be able to form the needed
1839 group even with this allocation. */
1840
1841 if (i >= FIRST_PSEUDO_REGISTER
1842 && (asm_noperands (max_needs[class] > 0
1843 ? max_needs_insn[class]
1844 : max_nongroups_insn[class])
1845 < 0))
1846 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1847 if (potential_reload_regs[i] >= 0
1848 && TEST_HARD_REG_BIT (reg_class_contents[class],
1849 potential_reload_regs[i]))
1850 break;
1851
1852 /* I should be the index in potential_reload_regs
1853 of the new reload reg we have found. */
1854
1855 if (i >= FIRST_PSEUDO_REGISTER)
1856 {
1857 /* There are no possible registers left to spill. */
1858 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1859 : max_nongroups_insn[class]);
1860 failure = 1;
1861 goto failed;
1862 }
1863 else
1864 something_changed
1865 |= new_spill_reg (i, class, max_needs, max_nongroups,
1866 global, dumpfile);
1867 }
1868 }
1869 }
1870
1871 /* If global-alloc was run, notify it of any register eliminations we have
1872 done. */
1873 if (global)
1874 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1875 if (ep->can_eliminate)
1876 mark_elimination (ep->from, ep->to);
1877
1878 /* Insert code to save and restore call-clobbered hard regs
1879 around calls. Tell if what mode to use so that we will process
1880 those insns in reload_as_needed if we have to. */
1881
1882 if (caller_save_needed)
1883 save_call_clobbered_regs (num_eliminable ? QImode
1884 : caller_save_spill_class != NO_REGS ? HImode
1885 : VOIDmode);
1886
1887 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1888 If that insn didn't set the register (i.e., it copied the register to
1889 memory), just delete that insn instead of the equivalencing insn plus
1890 anything now dead. If we call delete_dead_insn on that insn, we may
1891 delete the insn that actually sets the register if the register die
1892 there and that is incorrect. */
1893
1894 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1895 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1896 && GET_CODE (reg_equiv_init[i]) != NOTE)
1897 {
1898 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1899 delete_dead_insn (reg_equiv_init[i]);
1900 else
1901 {
1902 PUT_CODE (reg_equiv_init[i], NOTE);
1903 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1904 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1905 }
1906 }
1907
1908 /* Use the reload registers where necessary
1909 by generating move instructions to move the must-be-register
1910 values into or out of the reload registers. */
1911
1912 if (something_needs_reloads || something_needs_elimination
1913 || (caller_save_needed && num_eliminable)
1914 || caller_save_spill_class != NO_REGS)
1915 reload_as_needed (first, global);
1916
1917 /* If we were able to eliminate the frame pointer, show that it is no
1918 longer live at the start of any basic block. If it ls live by
1919 virtue of being in a pseudo, that pseudo will be marked live
1920 and hence the frame pointer will be known to be live via that
1921 pseudo. */
1922
1923 if (! frame_pointer_needed)
1924 for (i = 0; i < n_basic_blocks; i++)
1925 basic_block_live_at_start[i][FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1926 &= ~ ((REGSET_ELT_TYPE) 1 << (FRAME_POINTER_REGNUM % REGSET_ELT_BITS));
1927
1928 /* Come here (with failure set nonzero) if we can't get enough spill regs
1929 and we decide not to abort about it. */
1930 failed:
1931
1932 reload_in_progress = 0;
1933
1934 /* Now eliminate all pseudo regs by modifying them into
1935 their equivalent memory references.
1936 The REG-rtx's for the pseudos are modified in place,
1937 so all insns that used to refer to them now refer to memory.
1938
1939 For a reg that has a reg_equiv_address, all those insns
1940 were changed by reloading so that no insns refer to it any longer;
1941 but the DECL_RTL of a variable decl may refer to it,
1942 and if so this causes the debugging info to mention the variable. */
1943
1944 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1945 {
1946 rtx addr = 0;
1947 int in_struct = 0;
1948 if (reg_equiv_mem[i])
1949 {
1950 addr = XEXP (reg_equiv_mem[i], 0);
1951 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1952 }
1953 if (reg_equiv_address[i])
1954 addr = reg_equiv_address[i];
1955 if (addr)
1956 {
1957 if (reg_renumber[i] < 0)
1958 {
1959 rtx reg = regno_reg_rtx[i];
1960 XEXP (reg, 0) = addr;
1961 REG_USERVAR_P (reg) = 0;
1962 MEM_IN_STRUCT_P (reg) = in_struct;
1963 PUT_CODE (reg, MEM);
1964 }
1965 else if (reg_equiv_mem[i])
1966 XEXP (reg_equiv_mem[i], 0) = addr;
1967 }
1968 }
1969
1970#ifdef PRESERVE_DEATH_INFO_REGNO_P
1971 /* Make a pass over all the insns and remove death notes for things that
1972 are no longer registers or no longer die in the insn (e.g., an input
1973 and output pseudo being tied). */
1974
1975 for (insn = first; insn; insn = NEXT_INSN (insn))
1976 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1977 {
1978 rtx note, next;
1979
1980 for (note = REG_NOTES (insn); note; note = next)
1981 {
1982 next = XEXP (note, 1);
1983 if (REG_NOTE_KIND (note) == REG_DEAD
1984 && (GET_CODE (XEXP (note, 0)) != REG
1985 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
1986 remove_note (insn, note);
1987 }
1988 }
1989#endif
1990
1991 /* Indicate that we no longer have known memory locations or constants. */
1992 reg_equiv_constant = 0;
1993 reg_equiv_memory_loc = 0;
1994
1995 return failure;
1996}
1997\f
1998/* Nonzero if, after spilling reg REGNO for non-groups,
1999 it will still be possible to find a group if we still need one. */
2000
2001static int
2002possible_group_p (regno, max_groups)
2003 int regno;
2004 int *max_groups;
2005{
2006 int i;
2007 int class = (int) NO_REGS;
2008
2009 for (i = 0; i < (int) N_REG_CLASSES; i++)
2010 if (max_groups[i] > 0)
2011 {
2012 class = i;
2013 break;
2014 }
2015
2016 if (class == (int) NO_REGS)
2017 return 1;
2018
2019 /* Consider each pair of consecutive registers. */
2020 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2021 {
2022 /* Ignore pairs that include reg REGNO. */
2023 if (i == regno || i + 1 == regno)
2024 continue;
2025
2026 /* Ignore pairs that are outside the class that needs the group.
2027 ??? Here we fail to handle the case where two different classes
2028 independently need groups. But this never happens with our
2029 current machine descriptions. */
2030 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2031 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2032 continue;
2033
2034 /* A pair of consecutive regs we can still spill does the trick. */
2035 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2036 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2037 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2038 return 1;
2039
2040 /* A pair of one already spilled and one we can spill does it
2041 provided the one already spilled is not otherwise reserved. */
2042 if (spill_reg_order[i] < 0
2043 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2044 && spill_reg_order[i + 1] >= 0
2045 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2046 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2047 return 1;
2048 if (spill_reg_order[i + 1] < 0
2049 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2050 && spill_reg_order[i] >= 0
2051 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2052 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2053 return 1;
2054 }
2055
2056 return 0;
2057}
2058\f
2059/* Count any groups that can be formed from the registers recently spilled.
2060 This is done class by class, in order of ascending class number. */
2061
2062static void
2063count_possible_groups (group_size, group_mode, max_groups)
2064 int *group_size;
2065 enum machine_mode *group_mode;
2066 int *max_groups;
2067{
2068 int i;
2069 /* Now find all consecutive groups of spilled registers
2070 and mark each group off against the need for such groups.
2071 But don't count them against ordinary need, yet. */
2072
2073 for (i = 0; i < N_REG_CLASSES; i++)
2074 if (group_size[i] > 1)
2075 {
2076 HARD_REG_SET new;
2077 int j;
2078
2079 CLEAR_HARD_REG_SET (new);
2080
2081 /* Make a mask of all the regs that are spill regs in class I. */
2082 for (j = 0; j < n_spills; j++)
2083 if (TEST_HARD_REG_BIT (reg_class_contents[i], spill_regs[j])
2084 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[j])
2085 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
2086 spill_regs[j]))
2087 SET_HARD_REG_BIT (new, spill_regs[j]);
2088
2089 /* Find each consecutive group of them. */
2090 for (j = 0; j < FIRST_PSEUDO_REGISTER && max_groups[i] > 0; j++)
2091 if (TEST_HARD_REG_BIT (new, j)
2092 && j + group_size[i] <= FIRST_PSEUDO_REGISTER
2093 /* Next line in case group-mode for this class
2094 demands an even-odd pair. */
2095 && HARD_REGNO_MODE_OK (j, group_mode[i]))
2096 {
2097 int k;
2098 for (k = 1; k < group_size[i]; k++)
2099 if (! TEST_HARD_REG_BIT (new, j + k))
2100 break;
2101 if (k == group_size[i])
2102 {
2103 /* We found a group. Mark it off against this class's
2104 need for groups, and against each superclass too. */
2105 register enum reg_class *p;
2106 max_groups[i]--;
2107 p = reg_class_superclasses[i];
2108 while (*p != LIM_REG_CLASSES)
2109 max_groups[(int) *p++]--;
2110 /* Don't count these registers again. */
2111 for (k = 0; k < group_size[i]; k++)
2112 SET_HARD_REG_BIT (counted_for_groups, j + k);
2113 }
2114 /* Skip to the last reg in this group. When j is incremented
2115 above, it will then point to the first reg of the next
2116 possible group. */
2117 j += k - 1;
2118 }
2119 }
2120
2121}
2122\f
2123/* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2124 another mode that needs to be reloaded for the same register class CLASS.
2125 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2126 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2127
2128 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2129 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2130 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2131 causes unnecessary failures on machines requiring alignment of register
2132 groups when the two modes are different sizes, because the larger mode has
2133 more strict alignment rules than the smaller mode. */
2134
2135static int
2136modes_equiv_for_class_p (allocate_mode, other_mode, class)
2137 enum machine_mode allocate_mode, other_mode;
2138 enum reg_class class;
2139{
2140 register int regno;
2141 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2142 {
2143 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2144 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2145 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2146 return 0;
2147 }
2148 return 1;
2149}
2150
2151/* Handle the failure to find a register to spill.
2152 INSN should be one of the insns which needed this particular spill reg. */
2153
2154static void
2155spill_failure (insn)
2156 rtx insn;
2157{
2158 if (asm_noperands (PATTERN (insn)) >= 0)
2159 error_for_asm (insn, "`asm' needs too many reloads");
2160 else
2161 abort ();
2162}
2163
2164/* Add a new register to the tables of available spill-registers
2165 (as well as spilling all pseudos allocated to the register).
2166 I is the index of this register in potential_reload_regs.
2167 CLASS is the regclass whose need is being satisfied.
2168 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2169 so that this register can count off against them.
2170 MAX_NONGROUPS is 0 if this register is part of a group.
2171 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2172
2173static int
2174new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2175 int i;
2176 int class;
2177 int *max_needs;
2178 int *max_nongroups;
2179 int global;
2180 FILE *dumpfile;
2181{
2182 register enum reg_class *p;
2183 int val;
2184 int regno = potential_reload_regs[i];
2185
2186 if (i >= FIRST_PSEUDO_REGISTER)
2187 abort (); /* Caller failed to find any register. */
2188
2189 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2190 fatal ("fixed or forbidden register was spilled.\n\
2191This may be due to a compiler bug or to impossible asm statements.");
2192
2193 /* Make reg REGNO an additional reload reg. */
2194
2195 potential_reload_regs[i] = -1;
2196 spill_regs[n_spills] = regno;
2197 spill_reg_order[regno] = n_spills;
2198 if (dumpfile)
2199 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2200
2201 /* Clear off the needs we just satisfied. */
2202
2203 max_needs[class]--;
2204 p = reg_class_superclasses[class];
2205 while (*p != LIM_REG_CLASSES)
2206 max_needs[(int) *p++]--;
2207
2208 if (max_nongroups && max_nongroups[class] > 0)
2209 {
2210 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2211 max_nongroups[class]--;
2212 p = reg_class_superclasses[class];
2213 while (*p != LIM_REG_CLASSES)
2214 max_nongroups[(int) *p++]--;
2215 }
2216
2217 /* Spill every pseudo reg that was allocated to this reg
2218 or to something that overlaps this reg. */
2219
2220 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2221
2222 /* If there are some registers still to eliminate and this register
2223 wasn't ever used before, additional stack space may have to be
2224 allocated to store this register. Thus, we may have changed the offset
2225 between the stack and frame pointers, so mark that something has changed.
2226 (If new pseudos were spilled, thus requiring more space, VAL would have
2227 been set non-zero by the call to spill_hard_reg above since additional
2228 reloads may be needed in that case.
2229
2230 One might think that we need only set VAL to 1 if this is a call-used
2231 register. However, the set of registers that must be saved by the
2232 prologue is not identical to the call-used set. For example, the
2233 register used by the call insn for the return PC is a call-used register,
2234 but must be saved by the prologue. */
2235 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2236 val = 1;
2237
2238 regs_ever_live[spill_regs[n_spills]] = 1;
2239 n_spills++;
2240
2241 return val;
2242}
2243\f
2244/* Delete an unneeded INSN and any previous insns who sole purpose is loading
2245 data that is dead in INSN. */
2246
2247static void
2248delete_dead_insn (insn)
2249 rtx insn;
2250{
2251 rtx prev = prev_real_insn (insn);
2252 rtx prev_dest;
2253
2254 /* If the previous insn sets a register that dies in our insn, delete it
2255 too. */
2256 if (prev && GET_CODE (PATTERN (prev)) == SET
2257 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2258 && reg_mentioned_p (prev_dest, PATTERN (insn))
2259 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2260 delete_dead_insn (prev);
2261
2262 PUT_CODE (insn, NOTE);
2263 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2264 NOTE_SOURCE_FILE (insn) = 0;
2265}
2266
2267/* Modify the home of pseudo-reg I.
2268 The new home is present in reg_renumber[I].
2269
2270 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2271 or it may be -1, meaning there is none or it is not relevant.
2272 This is used so that all pseudos spilled from a given hard reg
2273 can share one stack slot. */
2274
2275static void
2276alter_reg (i, from_reg)
2277 register int i;
2278 int from_reg;
2279{
2280 /* When outputting an inline function, this can happen
2281 for a reg that isn't actually used. */
2282 if (regno_reg_rtx[i] == 0)
2283 return;
2284
2285 /* If the reg got changed to a MEM at rtl-generation time,
2286 ignore it. */
2287 if (GET_CODE (regno_reg_rtx[i]) != REG)
2288 return;
2289
2290 /* Modify the reg-rtx to contain the new hard reg
2291 number or else to contain its pseudo reg number. */
2292 REGNO (regno_reg_rtx[i])
2293 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2294
2295 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2296 allocate a stack slot for it. */
2297
2298 if (reg_renumber[i] < 0
2299 && reg_n_refs[i] > 0
2300 && reg_equiv_constant[i] == 0
2301 && reg_equiv_memory_loc[i] == 0)
2302 {
2303 register rtx x;
2304 int inherent_size = PSEUDO_REGNO_BYTES (i);
2305 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2306 int adjust = 0;
2307
2308 /* Each pseudo reg has an inherent size which comes from its own mode,
2309 and a total size which provides room for paradoxical subregs
2310 which refer to the pseudo reg in wider modes.
2311
2312 We can use a slot already allocated if it provides both
2313 enough inherent space and enough total space.
2314 Otherwise, we allocate a new slot, making sure that it has no less
2315 inherent space, and no less total space, then the previous slot. */
2316 if (from_reg == -1)
2317 {
2318 /* No known place to spill from => no slot to reuse. */
2319 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2320#if BYTES_BIG_ENDIAN
2321 /* Cancel the big-endian correction done in assign_stack_local.
2322 Get the address of the beginning of the slot.
2323 This is so we can do a big-endian correction unconditionally
2324 below. */
2325 adjust = inherent_size - total_size;
2326#endif
2327 }
2328 /* Reuse a stack slot if possible. */
2329 else if (spill_stack_slot[from_reg] != 0
2330 && spill_stack_slot_width[from_reg] >= total_size
2331 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2332 >= inherent_size))
2333 x = spill_stack_slot[from_reg];
2334 /* Allocate a bigger slot. */
2335 else
2336 {
2337 /* Compute maximum size needed, both for inherent size
2338 and for total size. */
2339 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2340 if (spill_stack_slot[from_reg])
2341 {
2342 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2343 > inherent_size)
2344 mode = GET_MODE (spill_stack_slot[from_reg]);
2345 if (spill_stack_slot_width[from_reg] > total_size)
2346 total_size = spill_stack_slot_width[from_reg];
2347 }
2348 /* Make a slot with that size. */
2349 x = assign_stack_local (mode, total_size, -1);
2350#if BYTES_BIG_ENDIAN
2351 /* Cancel the big-endian correction done in assign_stack_local.
2352 Get the address of the beginning of the slot.
2353 This is so we can do a big-endian correction unconditionally
2354 below. */
2355 adjust = GET_MODE_SIZE (mode) - total_size;
2356#endif
2357 spill_stack_slot[from_reg] = x;
2358 spill_stack_slot_width[from_reg] = total_size;
2359 }
2360
2361#if BYTES_BIG_ENDIAN
2362 /* On a big endian machine, the "address" of the slot
2363 is the address of the low part that fits its inherent mode. */
2364 if (inherent_size < total_size)
2365 adjust += (total_size - inherent_size);
2366#endif /* BYTES_BIG_ENDIAN */
2367
2368 /* If we have any adjustment to make, or if the stack slot is the
2369 wrong mode, make a new stack slot. */
2370 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2371 {
2372 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2373 plus_constant (XEXP (x, 0), adjust));
2374 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2375 }
2376
2377 /* Save the stack slot for later. */
2378 reg_equiv_memory_loc[i] = x;
2379 }
2380}
2381
2382/* Mark the slots in regs_ever_live for the hard regs
2383 used by pseudo-reg number REGNO. */
2384
2385void
2386mark_home_live (regno)
2387 int regno;
2388{
2389 register int i, lim;
2390 i = reg_renumber[regno];
2391 if (i < 0)
2392 return;
2393 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2394 while (i < lim)
2395 regs_ever_live[i++] = 1;
2396}
2397\f
2398/* This function handles the tracking of elimination offsets around branches.
2399
2400 X is a piece of RTL being scanned.
2401
2402 INSN is the insn that it came from, if any.
2403
2404 INITIAL_P is non-zero if we are to set the offset to be the initial
2405 offset and zero if we are setting the offset of the label to be the
2406 current offset. */
2407
2408static void
2409set_label_offsets (x, insn, initial_p)
2410 rtx x;
2411 rtx insn;
2412 int initial_p;
2413{
2414 enum rtx_code code = GET_CODE (x);
2415 rtx tem;
2416 int i;
2417 struct elim_table *p;
2418
2419 switch (code)
2420 {
2421 case LABEL_REF:
2422 if (LABEL_REF_NONLOCAL_P (x))
2423 return;
2424
2425 x = XEXP (x, 0);
2426
2427 /* ... fall through ... */
2428
2429 case CODE_LABEL:
2430 /* If we know nothing about this label, set the desired offsets. Note
2431 that this sets the offset at a label to be the offset before a label
2432 if we don't know anything about the label. This is not correct for
2433 the label after a BARRIER, but is the best guess we can make. If
2434 we guessed wrong, we will suppress an elimination that might have
2435 been possible had we been able to guess correctly. */
2436
2437 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2438 {
2439 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2440 offsets_at[CODE_LABEL_NUMBER (x)][i]
2441 = (initial_p ? reg_eliminate[i].initial_offset
2442 : reg_eliminate[i].offset);
2443 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2444 }
2445
2446 /* Otherwise, if this is the definition of a label and it is
2447 preceded by a BARRIER, set our offsets to the known offset of
2448 that label. */
2449
2450 else if (x == insn
2451 && (tem = prev_nonnote_insn (insn)) != 0
2452 && GET_CODE (tem) == BARRIER)
2453 {
2454 num_not_at_initial_offset = 0;
2455 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2456 {
2457 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2458 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2459 if (reg_eliminate[i].can_eliminate
2460 && (reg_eliminate[i].offset
2461 != reg_eliminate[i].initial_offset))
2462 num_not_at_initial_offset++;
2463 }
2464 }
2465
2466 else
2467 /* If neither of the above cases is true, compare each offset
2468 with those previously recorded and suppress any eliminations
2469 where the offsets disagree. */
2470
2471 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2472 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2473 != (initial_p ? reg_eliminate[i].initial_offset
2474 : reg_eliminate[i].offset))
2475 reg_eliminate[i].can_eliminate = 0;
2476
2477 return;
2478
2479 case JUMP_INSN:
2480 set_label_offsets (PATTERN (insn), insn, initial_p);
2481
2482 /* ... fall through ... */
2483
2484 case INSN:
2485 case CALL_INSN:
2486 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2487 and hence must have all eliminations at their initial offsets. */
2488 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2489 if (REG_NOTE_KIND (tem) == REG_LABEL)
2490 set_label_offsets (XEXP (tem, 0), insn, 1);
2491 return;
2492
2493 case ADDR_VEC:
2494 case ADDR_DIFF_VEC:
2495 /* Each of the labels in the address vector must be at their initial
2496 offsets. We want the first first for ADDR_VEC and the second
2497 field for ADDR_DIFF_VEC. */
2498
2499 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2500 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2501 insn, initial_p);
2502 return;
2503
2504 case SET:
2505 /* We only care about setting PC. If the source is not RETURN,
2506 IF_THEN_ELSE, or a label, disable any eliminations not at
2507 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2508 isn't one of those possibilities. For branches to a label,
2509 call ourselves recursively.
2510
2511 Note that this can disable elimination unnecessarily when we have
2512 a non-local goto since it will look like a non-constant jump to
2513 someplace in the current function. This isn't a significant
2514 problem since such jumps will normally be when all elimination
2515 pairs are back to their initial offsets. */
2516
2517 if (SET_DEST (x) != pc_rtx)
2518 return;
2519
2520 switch (GET_CODE (SET_SRC (x)))
2521 {
2522 case PC:
2523 case RETURN:
2524 return;
2525
2526 case LABEL_REF:
2527 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2528 return;
2529
2530 case IF_THEN_ELSE:
2531 tem = XEXP (SET_SRC (x), 1);
2532 if (GET_CODE (tem) == LABEL_REF)
2533 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2534 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2535 break;
2536
2537 tem = XEXP (SET_SRC (x), 2);
2538 if (GET_CODE (tem) == LABEL_REF)
2539 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2540 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2541 break;
2542 return;
2543 }
2544
2545 /* If we reach here, all eliminations must be at their initial
2546 offset because we are doing a jump to a variable address. */
2547 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2548 if (p->offset != p->initial_offset)
2549 p->can_eliminate = 0;
2550 }
2551}
2552\f
2553/* Used for communication between the next two function to properly share
2554 the vector for an ASM_OPERANDS. */
2555
2556static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2557
2558/* Scan X and replace any eliminable registers (such as fp) with a
2559 replacement (such as sp), plus an offset.
2560
2561 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2562 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2563 MEM, we are allowed to replace a sum of a register and the constant zero
2564 with the register, which we cannot do outside a MEM. In addition, we need
2565 to record the fact that a register is referenced outside a MEM.
2566
2567 If INSN is nonzero, it is the insn containing X. If we replace a REG
2568 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2569 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2570 that the REG is being modified.
2571
2572 If we see a modification to a register we know about, take the
2573 appropriate action (see case SET, below).
2574
2575 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2576 replacements done assuming all offsets are at their initial values. If
2577 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2578 encounter, return the actual location so that find_reloads will do
2579 the proper thing. */
2580
2581rtx
2582eliminate_regs (x, mem_mode, insn)
2583 rtx x;
2584 enum machine_mode mem_mode;
2585 rtx insn;
2586{
2587 enum rtx_code code = GET_CODE (x);
2588 struct elim_table *ep;
2589 int regno;
2590 rtx new;
2591 int i, j;
2592 char *fmt;
2593 int copied = 0;
2594
2595 switch (code)
2596 {
2597 case CONST_INT:
2598 case CONST_DOUBLE:
2599 case CONST:
2600 case SYMBOL_REF:
2601 case CODE_LABEL:
2602 case PC:
2603 case CC0:
2604 case ASM_INPUT:
2605 case ADDR_VEC:
2606 case ADDR_DIFF_VEC:
2607 case RETURN:
2608 return x;
2609
2610 case REG:
2611 regno = REGNO (x);
2612
2613 /* First handle the case where we encounter a bare register that
2614 is eliminable. Replace it with a PLUS. */
2615 if (regno < FIRST_PSEUDO_REGISTER)
2616 {
2617 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2618 ep++)
2619 if (ep->from_rtx == x && ep->can_eliminate)
2620 {
2621 if (! mem_mode)
2622 ep->ref_outside_mem = 1;
2623 return plus_constant (ep->to_rtx, ep->previous_offset);
2624 }
2625
2626 }
2627 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2628 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2629 {
2630 /* In this case, find_reloads would attempt to either use an
2631 incorrect address (if something is not at its initial offset)
2632 or substitute an replaced address into an insn (which loses
2633 if the offset is changed by some later action). So we simply
2634 return the replaced stack slot (assuming it is changed by
2635 elimination) and ignore the fact that this is actually a
2636 reference to the pseudo. Ensure we make a copy of the
2637 address in case it is shared. */
2638 new = eliminate_regs (reg_equiv_memory_loc[regno],
2639 mem_mode, NULL_RTX);
2640 if (new != reg_equiv_memory_loc[regno])
2641 {
2642 cannot_omit_stores[regno] = 1;
2643 return copy_rtx (new);
2644 }
2645 }
2646 return x;
2647
2648 case PLUS:
2649 /* If this is the sum of an eliminable register and a constant, rework
2650 the sum. */
2651 if (GET_CODE (XEXP (x, 0)) == REG
2652 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2653 && CONSTANT_P (XEXP (x, 1)))
2654 {
2655 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2656 ep++)
2657 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2658 {
2659 if (! mem_mode)
2660 ep->ref_outside_mem = 1;
2661
2662 /* The only time we want to replace a PLUS with a REG (this
2663 occurs when the constant operand of the PLUS is the negative
2664 of the offset) is when we are inside a MEM. We won't want
2665 to do so at other times because that would change the
2666 structure of the insn in a way that reload can't handle.
2667 We special-case the commonest situation in
2668 eliminate_regs_in_insn, so just replace a PLUS with a
2669 PLUS here, unless inside a MEM. */
2670 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2671 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2672 return ep->to_rtx;
2673 else
2674 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2675 plus_constant (XEXP (x, 1),
2676 ep->previous_offset));
2677 }
2678
2679 /* If the register is not eliminable, we are done since the other
2680 operand is a constant. */
2681 return x;
2682 }
2683
2684 /* If this is part of an address, we want to bring any constant to the
2685 outermost PLUS. We will do this by doing register replacement in
2686 our operands and seeing if a constant shows up in one of them.
2687
2688 We assume here this is part of an address (or a "load address" insn)
2689 since an eliminable register is not likely to appear in any other
2690 context.
2691
2692 If we have (plus (eliminable) (reg)), we want to produce
2693 (plus (plus (replacement) (reg) (const))). If this was part of a
2694 normal add insn, (plus (replacement) (reg)) will be pushed as a
2695 reload. This is the desired action. */
2696
2697 {
2698 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
2699 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, NULL_RTX);
2700
2701 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2702 {
2703 /* If one side is a PLUS and the other side is a pseudo that
2704 didn't get a hard register but has a reg_equiv_constant,
2705 we must replace the constant here since it may no longer
2706 be in the position of any operand. */
2707 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2708 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2709 && reg_renumber[REGNO (new1)] < 0
2710 && reg_equiv_constant != 0
2711 && reg_equiv_constant[REGNO (new1)] != 0)
2712 new1 = reg_equiv_constant[REGNO (new1)];
2713 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2714 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2715 && reg_renumber[REGNO (new0)] < 0
2716 && reg_equiv_constant[REGNO (new0)] != 0)
2717 new0 = reg_equiv_constant[REGNO (new0)];
2718
2719 new = form_sum (new0, new1);
2720
2721 /* As above, if we are not inside a MEM we do not want to
2722 turn a PLUS into something else. We might try to do so here
2723 for an addition of 0 if we aren't optimizing. */
2724 if (! mem_mode && GET_CODE (new) != PLUS)
2725 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2726 else
2727 return new;
2728 }
2729 }
2730 return x;
2731
2732 case EXPR_LIST:
2733 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2734 if (XEXP (x, 0))
2735 {
2736 new = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
2737 if (new != XEXP (x, 0))
2738 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2739 }
2740
2741 /* ... fall through ... */
2742
2743 case INSN_LIST:
2744 /* Now do eliminations in the rest of the chain. If this was
2745 an EXPR_LIST, this might result in allocating more memory than is
2746 strictly needed, but it simplifies the code. */
2747 if (XEXP (x, 1))
2748 {
2749 new = eliminate_regs (XEXP (x, 1), mem_mode, NULL_RTX);
2750 if (new != XEXP (x, 1))
2751 return gen_rtx (INSN_LIST, GET_MODE (x), XEXP (x, 0), new);
2752 }
2753 return x;
2754
2755 case CALL:
2756 case COMPARE:
2757 case MINUS:
2758 case MULT:
2759 case DIV: case UDIV:
2760 case MOD: case UMOD:
2761 case AND: case IOR: case XOR:
2762 case LSHIFT: case ASHIFT: case ROTATE:
2763 case ASHIFTRT: case LSHIFTRT: case ROTATERT:
2764 case NE: case EQ:
2765 case GE: case GT: case GEU: case GTU:
2766 case LE: case LT: case LEU: case LTU:
2767 {
2768 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
2769 rtx new1
2770 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, NULL_RTX) : 0;
2771
2772 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2773 return gen_rtx (code, GET_MODE (x), new0, new1);
2774 }
2775 return x;
2776
2777 case PRE_INC:
2778 case POST_INC:
2779 case PRE_DEC:
2780 case POST_DEC:
2781 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2782 if (ep->to_rtx == XEXP (x, 0))
2783 {
2784 int size = GET_MODE_SIZE (mem_mode);
2785
2786 /* If more bytes than MEM_MODE are pushed, account for them. */
2787#ifdef PUSH_ROUNDING
2788 if (ep->to_rtx == stack_pointer_rtx)
2789 size = PUSH_ROUNDING (size);
2790#endif
2791 if (code == PRE_DEC || code == POST_DEC)
2792 ep->offset += size;
2793 else
2794 ep->offset -= size;
2795 }
2796
2797 /* Fall through to generic unary operation case. */
2798 case USE:
2799 case STRICT_LOW_PART:
2800 case NEG: case NOT:
2801 case SIGN_EXTEND: case ZERO_EXTEND:
2802 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2803 case FLOAT: case FIX:
2804 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2805 case ABS:
2806 case SQRT:
2807 case FFS:
2808 new = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
2809 if (new != XEXP (x, 0))
2810 return gen_rtx (code, GET_MODE (x), new);
2811 return x;
2812
2813 case SUBREG:
2814 /* Similar to above processing, but preserve SUBREG_WORD.
2815 Convert (subreg (mem)) to (mem) if not paradoxical.
2816 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2817 pseudo didn't get a hard reg, we must replace this with the
2818 eliminated version of the memory location because push_reloads
2819 may do the replacement in certain circumstances. */
2820 if (GET_CODE (SUBREG_REG (x)) == REG
2821 && (GET_MODE_SIZE (GET_MODE (x))
2822 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2823 && reg_equiv_memory_loc != 0
2824 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2825 {
2826 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
2827 mem_mode, NULL_RTX);
2828
2829 /* If we didn't change anything, we must retain the pseudo. */
2830 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2831 new = XEXP (x, 0);
2832 else
2833 /* Otherwise, ensure NEW isn't shared in case we have to reload
2834 it. */
2835 new = copy_rtx (new);
2836 }
2837 else
2838 new = eliminate_regs (SUBREG_REG (x), mem_mode, NULL_RTX);
2839
2840 if (new != XEXP (x, 0))
2841 {
2842 if (GET_CODE (new) == MEM
2843 && (GET_MODE_SIZE (GET_MODE (x))
2844 <= GET_MODE_SIZE (GET_MODE (new)))
2845#if defined(BYTES_LOADS_ZERO_EXTEND) || defined(BYTE_LOADS_SIGN_EXTEND)
2846 /* On these machines we will be reloading what is
2847 inside the SUBREG if it originally was a pseudo and
2848 the inner and outer modes are both a word or
2849 smaller. So leave the SUBREG then. */
2850 && ! (GET_CODE (SUBREG_REG (x)) == REG
2851 && GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
2852 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD)
2853#endif
2854 )
2855 {
2856 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2857 enum machine_mode mode = GET_MODE (x);
2858
2859#if BYTES_BIG_ENDIAN
2860 offset += (MIN (UNITS_PER_WORD,
2861 GET_MODE_SIZE (GET_MODE (new)))
2862 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2863#endif
2864
2865 PUT_MODE (new, mode);
2866 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2867 return new;
2868 }
2869 else
2870 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2871 }
2872
2873 return x;
2874
2875 case CLOBBER:
2876 /* If clobbering a register that is the replacement register for an
2877 elimination we still think can be performed, note that it cannot
2878 be performed. Otherwise, we need not be concerned about it. */
2879 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2880 if (ep->to_rtx == XEXP (x, 0))
2881 ep->can_eliminate = 0;
2882
2883 new = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
2884 if (new != XEXP (x, 0))
2885 return gen_rtx (code, GET_MODE (x), new);
2886 return x;
2887
2888 case ASM_OPERANDS:
2889 {
2890 rtx *temp_vec;
2891 /* Properly handle sharing input and constraint vectors. */
2892 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2893 {
2894 /* When we come to a new vector not seen before,
2895 scan all its elements; keep the old vector if none
2896 of them changes; otherwise, make a copy. */
2897 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2898 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
2899 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2900 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
2901 mem_mode, NULL_RTX);
2902
2903 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2904 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
2905 break;
2906
2907 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
2908 new_asm_operands_vec = old_asm_operands_vec;
2909 else
2910 new_asm_operands_vec
2911 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
2912 }
2913
2914 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2915 if (new_asm_operands_vec == old_asm_operands_vec)
2916 return x;
2917
2918 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
2919 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2920 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
2921 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
2922 ASM_OPERANDS_SOURCE_FILE (x),
2923 ASM_OPERANDS_SOURCE_LINE (x));
2924 new->volatil = x->volatil;
2925 return new;
2926 }
2927
2928 case SET:
2929 /* Check for setting a register that we know about. */
2930 if (GET_CODE (SET_DEST (x)) == REG)
2931 {
2932 /* See if this is setting the replacement register for an
2933 elimination.
2934
2935 If DEST is the frame pointer, we do nothing because we assume that
2936 all assignments to the frame pointer are for non-local gotos and
2937 are being done at a time when they are valid and do not disturb
2938 anything else. Some machines want to eliminate a fake argument
2939 pointer with either the frame or stack pointer. Assignments to
2940 the frame pointer must not prevent this elimination. */
2941
2942 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2943 ep++)
2944 if (ep->to_rtx == SET_DEST (x)
2945 && SET_DEST (x) != frame_pointer_rtx)
2946 {
2947 /* If it is being incremented, adjust the offset. Otherwise,
2948 this elimination can't be done. */
2949 rtx src = SET_SRC (x);
2950
2951 if (GET_CODE (src) == PLUS
2952 && XEXP (src, 0) == SET_DEST (x)
2953 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2954 ep->offset -= INTVAL (XEXP (src, 1));
2955 else
2956 ep->can_eliminate = 0;
2957 }
2958
2959 /* Now check to see we are assigning to a register that can be
2960 eliminated. If so, it must be as part of a PARALLEL, since we
2961 will not have been called if this is a single SET. So indicate
2962 that we can no longer eliminate this reg. */
2963 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2964 ep++)
2965 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
2966 ep->can_eliminate = 0;
2967 }
2968
2969 /* Now avoid the loop below in this common case. */
2970 {
2971 rtx new0 = eliminate_regs (SET_DEST (x), 0, NULL_RTX);
2972 rtx new1 = eliminate_regs (SET_SRC (x), 0, NULL_RTX);
2973
2974 /* If SET_DEST changed from a REG to a MEM and INSN is non-zero,
2975 write a CLOBBER insn. */
2976 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
2977 && insn != 0)
2978 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
2979
2980 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
2981 return gen_rtx (SET, VOIDmode, new0, new1);
2982 }
2983
2984 return x;
2985
2986 case MEM:
2987 /* Our only special processing is to pass the mode of the MEM to our
2988 recursive call and copy the flags. While we are here, handle this
2989 case more efficiently. */
2990 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), NULL_RTX);
2991 if (new != XEXP (x, 0))
2992 {
2993 new = gen_rtx (MEM, GET_MODE (x), new);
2994 new->volatil = x->volatil;
2995 new->unchanging = x->unchanging;
2996 new->in_struct = x->in_struct;
2997 return new;
2998 }
2999 else
3000 return x;
3001 }
3002
3003 /* Process each of our operands recursively. If any have changed, make a
3004 copy of the rtx. */
3005 fmt = GET_RTX_FORMAT (code);
3006 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3007 {
3008 if (*fmt == 'e')
3009 {
3010 new = eliminate_regs (XEXP (x, i), mem_mode, NULL_RTX);
3011 if (new != XEXP (x, i) && ! copied)
3012 {
3013 rtx new_x = rtx_alloc (code);
3014 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
3015 + (sizeof (new_x->fld[0])
3016 * GET_RTX_LENGTH (code))));
3017 x = new_x;
3018 copied = 1;
3019 }
3020 XEXP (x, i) = new;
3021 }
3022 else if (*fmt == 'E')
3023 {
3024 int copied_vec = 0;
3025 for (j = 0; j < XVECLEN (x, i); j++)
3026 {
3027 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3028 if (new != XVECEXP (x, i, j) && ! copied_vec)
3029 {
3030 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3031 &XVECEXP (x, i, 0));
3032 if (! copied)
3033 {
3034 rtx new_x = rtx_alloc (code);
3035 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
3036 + (sizeof (new_x->fld[0])
3037 * GET_RTX_LENGTH (code))));
3038 x = new_x;
3039 copied = 1;
3040 }
3041 XVEC (x, i) = new_v;
3042 copied_vec = 1;
3043 }
3044 XVECEXP (x, i, j) = new;
3045 }
3046 }
3047 }
3048
3049 return x;
3050}
3051\f
3052/* Scan INSN and eliminate all eliminable registers in it.
3053
3054 If REPLACE is nonzero, do the replacement destructively. Also
3055 delete the insn as dead it if it is setting an eliminable register.
3056
3057 If REPLACE is zero, do all our allocations in reload_obstack.
3058
3059 If no eliminations were done and this insn doesn't require any elimination
3060 processing (these are not identical conditions: it might be updating sp,
3061 but not referencing fp; this needs to be seen during reload_as_needed so
3062 that the offset between fp and sp can be taken into consideration), zero
3063 is returned. Otherwise, 1 is returned. */
3064
3065static int
3066eliminate_regs_in_insn (insn, replace)
3067 rtx insn;
3068 int replace;
3069{
3070 rtx old_body = PATTERN (insn);
3071 rtx new_body;
3072 int val = 0;
3073 struct elim_table *ep;
3074
3075 if (! replace)
3076 push_obstacks (&reload_obstack, &reload_obstack);
3077
3078 if (GET_CODE (old_body) == SET && GET_CODE (SET_DEST (old_body)) == REG
3079 && REGNO (SET_DEST (old_body)) < FIRST_PSEUDO_REGISTER)
3080 {
3081 /* Check for setting an eliminable register. */
3082 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3083 if (ep->from_rtx == SET_DEST (old_body) && ep->can_eliminate)
3084 {
3085 /* In this case this insn isn't serving a useful purpose. We
3086 will delete it in reload_as_needed once we know that this
3087 elimination is, in fact, being done.
3088
3089 If REPLACE isn't set, we can't delete this insn, but neededn't
3090 process it since it won't be used unless something changes. */
3091 if (replace)
3092 delete_dead_insn (insn);
3093 val = 1;
3094 goto done;
3095 }
3096
3097 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3098 in the insn is the negative of the offset in FROM. Substitute
3099 (set (reg) (reg to)) for the insn and change its code.
3100
3101 We have to do this here, rather than in eliminate_regs, do that we can
3102 change the insn code. */
3103
3104 if (GET_CODE (SET_SRC (old_body)) == PLUS
3105 && GET_CODE (XEXP (SET_SRC (old_body), 0)) == REG
3106 && GET_CODE (XEXP (SET_SRC (old_body), 1)) == CONST_INT)
3107 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3108 ep++)
3109 if (ep->from_rtx == XEXP (SET_SRC (old_body), 0)
2a5f595d 3110 && ep->can_eliminate)
9bf86ebb 3111 {
2a5f595d
PR
3112 /* We must stop at the first elimination that will be used.
3113 If this one would replace the PLUS with a REG, do it
3114 now. Otherwise, quit the loop and let eliminate_regs
3115 do its normal replacement. */
3116 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_body), 1)))
3117 {
3118 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3119 SET_DEST (old_body), ep->to_rtx);
3120 INSN_CODE (insn) = -1;
3121 val = 1;
3122 goto done;
3123 }
3124
3125 break;
9bf86ebb
PR
3126 }
3127 }
3128
3129 old_asm_operands_vec = 0;
3130
3131 /* Replace the body of this insn with a substituted form. If we changed
3132 something, return non-zero. If this is the final call for this
3133 insn (REPLACE is non-zero), do the elimination in REG_NOTES as well.
3134
3135 If we are replacing a body that was a (set X (plus Y Z)), try to
3136 re-recognize the insn. We do this in case we had a simple addition
3137 but now can do this as a load-address. This saves an insn in this
3138 common case. */
3139
3140 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
3141 if (new_body != old_body)
3142 {
3143 /* If we aren't replacing things permanently and we changed something,
3144 make another copy to ensure that all the RTL is new. Otherwise
3145 things can go wrong if find_reload swaps commutative operands
3146 and one is inside RTL that has been copied while the other is not. */
3147
3148 /* Don't copy an asm_operands because (1) there's no need and (2)
3149 copy_rtx can't do it properly when there are multiple outputs. */
3150 if (! replace && asm_noperands (old_body) < 0)
3151 new_body = copy_rtx (new_body);
3152
3153 /* If we had a move insn but now we don't, rerecognize it. */
3154 if ((GET_CODE (old_body) == SET && GET_CODE (SET_SRC (old_body)) == REG
3155 && (GET_CODE (new_body) != SET
3156 || GET_CODE (SET_SRC (new_body)) != REG))
3157 /* If this was an add insn before, rerecognize. */
3158 ||
3159 (GET_CODE (old_body) == SET
3160 && GET_CODE (SET_SRC (old_body)) == PLUS))
3161 {
3162 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3163 /* If recognition fails, store the new body anyway.
3164 It's normal to have recognition failures here
3165 due to bizarre memory addresses; reloading will fix them. */
3166 PATTERN (insn) = new_body;
3167 }
3168 else
3169 PATTERN (insn) = new_body;
3170
3171 if (replace && REG_NOTES (insn))
3172 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, NULL_RTX);
3173 val = 1;
3174 }
3175
3176 /* Loop through all elimination pairs. See if any have changed and
3177 recalculate the number not at initial offset.
3178
3179 Compute the maximum offset (minimum offset if the stack does not
3180 grow downward) for each elimination pair.
3181
3182 We also detect a cases where register elimination cannot be done,
3183 namely, if a register would be both changed and referenced outside a MEM
3184 in the resulting insn since such an insn is often undefined and, even if
3185 not, we cannot know what meaning will be given to it. Note that it is
3186 valid to have a register used in an address in an insn that changes it
3187 (presumably with a pre- or post-increment or decrement).
3188
3189 If anything changes, return nonzero. */
3190
3191 num_not_at_initial_offset = 0;
3192 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3193 {
3194 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3195 ep->can_eliminate = 0;
3196
3197 ep->ref_outside_mem = 0;
3198
3199 if (ep->previous_offset != ep->offset)
3200 val = 1;
3201
3202 ep->previous_offset = ep->offset;
3203 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3204 num_not_at_initial_offset++;
3205
3206#ifdef STACK_GROWS_DOWNWARD
3207 ep->max_offset = MAX (ep->max_offset, ep->offset);
3208#else
3209 ep->max_offset = MIN (ep->max_offset, ep->offset);
3210#endif
3211 }
3212
3213 done:
3214 if (! replace)
3215 pop_obstacks ();
3216
3217 return val;
3218}
3219
3220/* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3221 replacement we currently believe is valid, mark it as not eliminable if X
3222 modifies DEST in any way other than by adding a constant integer to it.
3223
3224 If DEST is the frame pointer, we do nothing because we assume that
3225 all assignments to the frame pointer are nonlocal gotos and are being done
3226 at a time when they are valid and do not disturb anything else.
3227 Some machines want to eliminate a fake argument pointer with either the
3228 frame or stack pointer. Assignments to the frame pointer must not prevent
3229 this elimination.
3230
3231 Called via note_stores from reload before starting its passes to scan
3232 the insns of the function. */
3233
3234static void
3235mark_not_eliminable (dest, x)
3236 rtx dest;
3237 rtx x;
3238{
3239 register int i;
3240
3241 /* A SUBREG of a hard register here is just changing its mode. We should
3242 not see a SUBREG of an eliminable hard register, but check just in
3243 case. */
3244 if (GET_CODE (dest) == SUBREG)
3245 dest = SUBREG_REG (dest);
3246
3247 if (dest == frame_pointer_rtx)
3248 return;
3249
3250 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3251 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3252 && (GET_CODE (x) != SET
3253 || GET_CODE (SET_SRC (x)) != PLUS
3254 || XEXP (SET_SRC (x), 0) != dest
3255 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3256 {
3257 reg_eliminate[i].can_eliminate_previous
3258 = reg_eliminate[i].can_eliminate = 0;
3259 num_eliminable--;
3260 }
3261}
3262\f
3263/* Kick all pseudos out of hard register REGNO.
3264 If GLOBAL is nonzero, try to find someplace else to put them.
3265 If DUMPFILE is nonzero, log actions taken on that file.
3266
3267 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3268 because we found we can't eliminate some register. In the case, no pseudos
3269 are allowed to be in the register, even if they are only in a block that
3270 doesn't require spill registers, unlike the case when we are spilling this
3271 hard reg to produce another spill register.
3272
3273 Return nonzero if any pseudos needed to be kicked out. */
3274
3275static int
3276spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3277 register int regno;
3278 int global;
3279 FILE *dumpfile;
3280 int cant_eliminate;
3281{
3282 int something_changed = 0;
3283 register int i;
3284
3285 SET_HARD_REG_BIT (forbidden_regs, regno);
3286
3287 /* Spill every pseudo reg that was allocated to this reg
3288 or to something that overlaps this reg. */
3289
3290 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3291 if (reg_renumber[i] >= 0
3292 && reg_renumber[i] <= regno
3293 && (reg_renumber[i]
3294 + HARD_REGNO_NREGS (reg_renumber[i],
3295 PSEUDO_REGNO_MODE (i))
3296 > regno))
3297 {
3298 enum reg_class class = REGNO_REG_CLASS (regno);
3299
3300 /* If this register belongs solely to a basic block which needed no
3301 spilling of any class that this register is contained in,
3302 leave it be, unless we are spilling this register because
3303 it was a hard register that can't be eliminated. */
3304
3305 if (! cant_eliminate
3306 && basic_block_needs[0]
3307 && reg_basic_block[i] >= 0
3308 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3309 {
3310 enum reg_class *p;
3311
3312 for (p = reg_class_superclasses[(int) class];
3313 *p != LIM_REG_CLASSES; p++)
3314 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3315 break;
3316
3317 if (*p == LIM_REG_CLASSES)
3318 continue;
3319 }
3320
3321 /* Mark it as no longer having a hard register home. */
3322 reg_renumber[i] = -1;
3323 /* We will need to scan everything again. */
3324 something_changed = 1;
3325 if (global)
3326 retry_global_alloc (i, forbidden_regs);
3327
3328 alter_reg (i, regno);
3329 if (dumpfile)
3330 {
3331 if (reg_renumber[i] == -1)
3332 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3333 else
3334 fprintf (dumpfile, " Register %d now in %d.\n\n",
3335 i, reg_renumber[i]);
3336 }
3337 }
3338
3339 return something_changed;
3340}
3341\f
3342/* Find all paradoxical subregs within X and update reg_max_ref_width. */
3343
3344static void
3345scan_paradoxical_subregs (x)
3346 register rtx x;
3347{
3348 register int i;
3349 register char *fmt;
3350 register enum rtx_code code = GET_CODE (x);
3351
3352 switch (code)
3353 {
3354 case CONST_INT:
3355 case CONST:
3356 case SYMBOL_REF:
3357 case LABEL_REF:
3358 case CONST_DOUBLE:
3359 case CC0:
3360 case PC:
3361 case REG:
3362 case USE:
3363 case CLOBBER:
3364 return;
3365
3366 case SUBREG:
3367 if (GET_CODE (SUBREG_REG (x)) == REG
3368 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3369 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3370 = GET_MODE_SIZE (GET_MODE (x));
3371 return;
3372 }
3373
3374 fmt = GET_RTX_FORMAT (code);
3375 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3376 {
3377 if (fmt[i] == 'e')
3378 scan_paradoxical_subregs (XEXP (x, i));
3379 else if (fmt[i] == 'E')
3380 {
3381 register int j;
3382 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3383 scan_paradoxical_subregs (XVECEXP (x, i, j));
3384 }
3385 }
3386}
3387\f
3388static int
3389hard_reg_use_compare (p1, p2)
3390 struct hard_reg_n_uses *p1, *p2;
3391{
3392 int tem = p1->uses - p2->uses;
3393 if (tem != 0) return tem;
3394 /* If regs are equally good, sort by regno,
3395 so that the results of qsort leave nothing to chance. */
3396 return p1->regno - p2->regno;
3397}
3398
3399/* Choose the order to consider regs for use as reload registers
3400 based on how much trouble would be caused by spilling one.
3401 Store them in order of decreasing preference in potential_reload_regs. */
3402
3403static void
3404order_regs_for_reload ()
3405{
3406 register int i;
3407 register int o = 0;
3408 int large = 0;
3409
3410 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3411
3412 CLEAR_HARD_REG_SET (bad_spill_regs);
3413
3414 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3415 potential_reload_regs[i] = -1;
3416
3417 /* Count number of uses of each hard reg by pseudo regs allocated to it
3418 and then order them by decreasing use. */
3419
3420 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3421 {
3422 hard_reg_n_uses[i].uses = 0;
3423 hard_reg_n_uses[i].regno = i;
3424 }
3425
3426 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3427 {
3428 int regno = reg_renumber[i];
3429 if (regno >= 0)
3430 {
3431 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3432 while (regno < lim)
3433 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3434 }
3435 large += reg_n_refs[i];
3436 }
3437
3438 /* Now fixed registers (which cannot safely be used for reloading)
3439 get a very high use count so they will be considered least desirable.
3440 Registers used explicitly in the rtl code are almost as bad. */
3441
3442 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3443 {
3444 if (fixed_regs[i])
3445 {
3446 hard_reg_n_uses[i].uses += 2 * large + 2;
3447 SET_HARD_REG_BIT (bad_spill_regs, i);
3448 }
3449 else if (regs_explicitly_used[i])
3450 {
3451 hard_reg_n_uses[i].uses += large + 1;
3452#ifndef SMALL_REGISTER_CLASSES
3453 /* ??? We are doing this here because of the potential that
3454 bad code may be generated if a register explicitly used in
3455 an insn was used as a spill register for that insn. But
3456 not using these are spill registers may lose on some machine.
3457 We'll have to see how this works out. */
3458 SET_HARD_REG_BIT (bad_spill_regs, i);
3459#endif
3460 }
3461 }
3462 hard_reg_n_uses[FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3463 SET_HARD_REG_BIT (bad_spill_regs, FRAME_POINTER_REGNUM);
3464
3465#ifdef ELIMINABLE_REGS
3466 /* If registers other than the frame pointer are eliminable, mark them as
3467 poor choices. */
3468 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3469 {
3470 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3471 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3472 }
3473#endif
3474
3475 /* Prefer registers not so far used, for use in temporary loading.
3476 Among them, if REG_ALLOC_ORDER is defined, use that order.
3477 Otherwise, prefer registers not preserved by calls. */
3478
3479#ifdef REG_ALLOC_ORDER
3480 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3481 {
3482 int regno = reg_alloc_order[i];
3483
3484 if (hard_reg_n_uses[regno].uses == 0)
3485 potential_reload_regs[o++] = regno;
3486 }
3487#else
3488 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3489 {
3490 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3491 potential_reload_regs[o++] = i;
3492 }
3493 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3494 {
3495 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3496 potential_reload_regs[o++] = i;
3497 }
3498#endif
3499
3500 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3501 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3502
3503 /* Now add the regs that are already used,
3504 preferring those used less often. The fixed and otherwise forbidden
3505 registers will be at the end of this list. */
3506
3507 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3508 if (hard_reg_n_uses[i].uses != 0)
3509 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3510}
3511\f
3512/* Reload pseudo-registers into hard regs around each insn as needed.
3513 Additional register load insns are output before the insn that needs it
3514 and perhaps store insns after insns that modify the reloaded pseudo reg.
3515
3516 reg_last_reload_reg and reg_reloaded_contents keep track of
3517 which registers are already available in reload registers.
3518 We update these for the reloads that we perform,
3519 as the insns are scanned. */
3520
3521static void
3522reload_as_needed (first, live_known)
3523 rtx first;
3524 int live_known;
3525{
3526 register rtx insn;
3527 register int i;
3528 int this_block = 0;
3529 rtx x;
3530 rtx after_call = 0;
3531
3532 bzero (spill_reg_rtx, sizeof spill_reg_rtx);
3533 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3534 bzero (reg_last_reload_reg, max_regno * sizeof (rtx));
3535 reg_has_output_reload = (char *) alloca (max_regno);
3536 for (i = 0; i < n_spills; i++)
3537 {
3538 reg_reloaded_contents[i] = -1;
3539 reg_reloaded_insn[i] = 0;
3540 }
3541
3542 /* Reset all offsets on eliminable registers to their initial values. */
3543#ifdef ELIMINABLE_REGS
3544 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3545 {
3546 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3547 reg_eliminate[i].initial_offset);
3548 reg_eliminate[i].previous_offset
3549 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3550 }
3551#else
3552 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3553 reg_eliminate[0].previous_offset
3554 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3555#endif
3556
3557 num_not_at_initial_offset = 0;
3558
3559 for (insn = first; insn;)
3560 {
3561 register rtx next = NEXT_INSN (insn);
3562
3563 /* Notice when we move to a new basic block. */
3564 if (live_known && this_block + 1 < n_basic_blocks
3565 && insn == basic_block_head[this_block+1])
3566 ++this_block;
3567
3568 /* If we pass a label, copy the offsets from the label information
3569 into the current offsets of each elimination. */
3570 if (GET_CODE (insn) == CODE_LABEL)
3571 {
3572 num_not_at_initial_offset = 0;
3573 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3574 {
3575 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3576 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
3577 if (reg_eliminate[i].can_eliminate
3578 && (reg_eliminate[i].offset
3579 != reg_eliminate[i].initial_offset))
3580 num_not_at_initial_offset++;
3581 }
3582 }
3583
3584 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3585 {
3586 rtx avoid_return_reg = 0;
3587
3588#ifdef SMALL_REGISTER_CLASSES
3589 /* Set avoid_return_reg if this is an insn
3590 that might use the value of a function call. */
3591 if (GET_CODE (insn) == CALL_INSN)
3592 {
3593 if (GET_CODE (PATTERN (insn)) == SET)
3594 after_call = SET_DEST (PATTERN (insn));
3595 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3596 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3597 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3598 else
3599 after_call = 0;
3600 }
3601 else if (after_call != 0
3602 && !(GET_CODE (PATTERN (insn)) == SET
3603 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3604 {
3605 if (reg_mentioned_p (after_call, PATTERN (insn)))
3606 avoid_return_reg = after_call;
3607 after_call = 0;
3608 }
3609#endif /* SMALL_REGISTER_CLASSES */
3610
3611 /* If this is a USE and CLOBBER of a MEM, ensure that any
3612 references to eliminable registers have been removed. */
3613
3614 if ((GET_CODE (PATTERN (insn)) == USE
3615 || GET_CODE (PATTERN (insn)) == CLOBBER)
3616 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3617 XEXP (XEXP (PATTERN (insn), 0), 0)
3618 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3619 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
3620
3621 /* If we need to do register elimination processing, do so.
3622 This might delete the insn, in which case we are done. */
3623 if (num_eliminable && GET_MODE (insn) == QImode)
3624 {
3625 eliminate_regs_in_insn (insn, 1);
3626 if (GET_CODE (insn) == NOTE)
3627 {
3628 insn = next;
3629 continue;
3630 }
3631 }
3632
3633 if (GET_MODE (insn) == VOIDmode)
3634 n_reloads = 0;
3635 /* First find the pseudo regs that must be reloaded for this insn.
3636 This info is returned in the tables reload_... (see reload.h).
3637 Also modify the body of INSN by substituting RELOAD
3638 rtx's for those pseudo regs. */
3639 else
3640 {
3641 bzero (reg_has_output_reload, max_regno);
3642 CLEAR_HARD_REG_SET (reg_is_output_reload);
3643
3644 find_reloads (insn, 1, spill_indirect_levels, live_known,
3645 spill_reg_order);
3646 }
3647
3648 if (n_reloads > 0)
3649 {
3650 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3651 rtx p;
3652 int class;
3653
3654 /* If this block has not had spilling done for a
3655 particular clas and we have any non-optionals that need a
3656 spill reg in that class, abort. */
3657
3658 for (class = 0; class < N_REG_CLASSES; class++)
3659 if (basic_block_needs[class] != 0
3660 && basic_block_needs[class][this_block] == 0)
3661 for (i = 0; i < n_reloads; i++)
3662 if (class == (int) reload_reg_class[i]
3663 && reload_reg_rtx[i] == 0
3664 && ! reload_optional[i]
3665 && (reload_in[i] != 0 || reload_out[i] != 0
3666 || reload_secondary_p[i] != 0))
3667 abort ();
3668
3669 /* Now compute which reload regs to reload them into. Perhaps
3670 reusing reload regs from previous insns, or else output
3671 load insns to reload them. Maybe output store insns too.
3672 Record the choices of reload reg in reload_reg_rtx. */
3673 choose_reload_regs (insn, avoid_return_reg);
3674
3675#ifdef SMALL_REGISTER_CLASSES
3676 /* Merge any reloads that we didn't combine for fear of
3677 increasing the number of spill registers needed but now
3678 discover can be safely merged. */
3679 merge_assigned_reloads (insn);
3680#endif
3681
3682 /* Generate the insns to reload operands into or out of
3683 their reload regs. */
3684 emit_reload_insns (insn);
3685
3686 /* Substitute the chosen reload regs from reload_reg_rtx
3687 into the insn's body (or perhaps into the bodies of other
3688 load and store insn that we just made for reloading
3689 and that we moved the structure into). */
3690 subst_reloads ();
3691
3692 /* If this was an ASM, make sure that all the reload insns
3693 we have generated are valid. If not, give an error
3694 and delete them. */
3695
3696 if (asm_noperands (PATTERN (insn)) >= 0)
3697 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3698 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3699 && (recog_memoized (p) < 0
3700 || (insn_extract (p),
3701 ! constrain_operands (INSN_CODE (p), 1))))
3702 {
3703 error_for_asm (insn,
3704 "`asm' operand requires impossible reload");
3705 PUT_CODE (p, NOTE);
3706 NOTE_SOURCE_FILE (p) = 0;
3707 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3708 }
3709 }
3710 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3711 is no longer validly lying around to save a future reload.
3712 Note that this does not detect pseudos that were reloaded
3713 for this insn in order to be stored in
3714 (obeying register constraints). That is correct; such reload
3715 registers ARE still valid. */
3716 note_stores (PATTERN (insn), forget_old_reloads_1);
3717
3718 /* There may have been CLOBBER insns placed after INSN. So scan
3719 between INSN and NEXT and use them to forget old reloads. */
3720 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3721 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3722 note_stores (PATTERN (x), forget_old_reloads_1);
3723
3724#ifdef AUTO_INC_DEC
3725 /* Likewise for regs altered by auto-increment in this insn.
3726 But note that the reg-notes are not changed by reloading:
3727 they still contain the pseudo-regs, not the spill regs. */
3728 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3729 if (REG_NOTE_KIND (x) == REG_INC)
3730 {
3731 /* See if this pseudo reg was reloaded in this insn.
3732 If so, its last-reload info is still valid
3733 because it is based on this insn's reload. */
3734 for (i = 0; i < n_reloads; i++)
3735 if (reload_out[i] == XEXP (x, 0))
3736 break;
3737
3738 if (i == n_reloads)
3739 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
3740 }
3741#endif
3742 }
3743 /* A reload reg's contents are unknown after a label. */
3744 if (GET_CODE (insn) == CODE_LABEL)
3745 for (i = 0; i < n_spills; i++)
3746 {
3747 reg_reloaded_contents[i] = -1;
3748 reg_reloaded_insn[i] = 0;
3749 }
3750
3751 /* Don't assume a reload reg is still good after a call insn
3752 if it is a call-used reg. */
3753 else if (GET_CODE (insn) == CALL_INSN)
3754 for (i = 0; i < n_spills; i++)
3755 if (call_used_regs[spill_regs[i]])
3756 {
3757 reg_reloaded_contents[i] = -1;
3758 reg_reloaded_insn[i] = 0;
3759 }
3760
3761 /* In case registers overlap, allow certain insns to invalidate
3762 particular hard registers. */
3763
3764#ifdef INSN_CLOBBERS_REGNO_P
3765 for (i = 0 ; i < n_spills ; i++)
3766 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3767 {
3768 reg_reloaded_contents[i] = -1;
3769 reg_reloaded_insn[i] = 0;
3770 }
3771#endif
3772
3773 insn = next;
3774
3775#ifdef USE_C_ALLOCA
3776 alloca (0);
3777#endif
3778 }
3779}
3780
3781/* Discard all record of any value reloaded from X,
3782 or reloaded in X from someplace else;
3783 unless X is an output reload reg of the current insn.
3784
3785 X may be a hard reg (the reload reg)
3786 or it may be a pseudo reg that was reloaded from. */
3787
3788static void
3789forget_old_reloads_1 (x, ignored)
3790 rtx x;
3791 rtx ignored;
3792{
3793 register int regno;
3794 int nr;
3795 int offset = 0;
3796
3797 /* note_stores does give us subregs of hard regs. */
3798 while (GET_CODE (x) == SUBREG)
3799 {
3800 offset += SUBREG_WORD (x);
3801 x = SUBREG_REG (x);
3802 }
3803
3804 if (GET_CODE (x) != REG)
3805 return;
3806
3807 regno = REGNO (x) + offset;
3808
3809 if (regno >= FIRST_PSEUDO_REGISTER)
3810 nr = 1;
3811 else
3812 {
3813 int i;
3814 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
3815 /* Storing into a spilled-reg invalidates its contents.
3816 This can happen if a block-local pseudo is allocated to that reg
3817 and it wasn't spilled because this block's total need is 0.
3818 Then some insn might have an optional reload and use this reg. */
3819 for (i = 0; i < nr; i++)
3820 if (spill_reg_order[regno + i] >= 0
3821 /* But don't do this if the reg actually serves as an output
3822 reload reg in the current instruction. */
3823 && (n_reloads == 0
3824 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
3825 {
3826 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
3827 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
3828 }
3829 }
3830
3831 /* Since value of X has changed,
3832 forget any value previously copied from it. */
3833
3834 while (nr-- > 0)
3835 /* But don't forget a copy if this is the output reload
3836 that establishes the copy's validity. */
3837 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
3838 reg_last_reload_reg[regno + nr] = 0;
3839}
3840\f
3841/* For each reload, the mode of the reload register. */
3842static enum machine_mode reload_mode[MAX_RELOADS];
3843
3844/* For each reload, the largest number of registers it will require. */
3845static int reload_nregs[MAX_RELOADS];
3846
3847/* Comparison function for qsort to decide which of two reloads
3848 should be handled first. *P1 and *P2 are the reload numbers. */
3849
3850static int
3851reload_reg_class_lower (p1, p2)
3852 short *p1, *p2;
3853{
3854 register int r1 = *p1, r2 = *p2;
3855 register int t;
3856
3857 /* Consider required reloads before optional ones. */
3858 t = reload_optional[r1] - reload_optional[r2];
3859 if (t != 0)
3860 return t;
3861
3862 /* Count all solitary classes before non-solitary ones. */
3863 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
3864 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
3865 if (t != 0)
3866 return t;
3867
3868 /* Aside from solitaires, consider all multi-reg groups first. */
3869 t = reload_nregs[r2] - reload_nregs[r1];
3870 if (t != 0)
3871 return t;
3872
3873 /* Consider reloads in order of increasing reg-class number. */
3874 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
3875 if (t != 0)
3876 return t;
3877
3878 /* If reloads are equally urgent, sort by reload number,
3879 so that the results of qsort leave nothing to chance. */
3880 return r1 - r2;
3881}
3882\f
3883/* The following HARD_REG_SETs indicate when each hard register is
3884 used for a reload of various parts of the current insn. */
3885
3886/* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
3887static HARD_REG_SET reload_reg_used;
3888/* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
3889static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
3890/* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
3891static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
3892/* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
3893static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
3894/* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
3895static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
3896/* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
3897static HARD_REG_SET reload_reg_used_in_op_addr;
3898/* If reg is in use for a RELOAD_FOR_INSN reload. */
3899static HARD_REG_SET reload_reg_used_in_insn;
3900/* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
3901static HARD_REG_SET reload_reg_used_in_other_addr;
3902
3903/* If reg is in use as a reload reg for any sort of reload. */
3904static HARD_REG_SET reload_reg_used_at_all;
3905
3906/* If reg is use as an inherited reload. We just mark the first register
3907 in the group. */
3908static HARD_REG_SET reload_reg_used_for_inherit;
3909
3910/* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
3911 TYPE. MODE is used to indicate how many consecutive regs are
3912 actually used. */
3913
3914static void
3915mark_reload_reg_in_use (regno, opnum, type, mode)
3916 int regno;
3917 int opnum;
3918 enum reload_type type;
3919 enum machine_mode mode;
3920{
3921 int nregs = HARD_REGNO_NREGS (regno, mode);
3922 int i;
3923
3924 for (i = regno; i < nregs + regno; i++)
3925 {
3926 switch (type)
3927 {
3928 case RELOAD_OTHER:
3929 SET_HARD_REG_BIT (reload_reg_used, i);
3930 break;
3931
3932 case RELOAD_FOR_INPUT_ADDRESS:
3933 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
3934 break;
3935
3936 case RELOAD_FOR_OUTPUT_ADDRESS:
3937 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
3938 break;
3939
3940 case RELOAD_FOR_OPERAND_ADDRESS:
3941 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
3942 break;
3943
3944 case RELOAD_FOR_OTHER_ADDRESS:
3945 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
3946 break;
3947
3948 case RELOAD_FOR_INPUT:
3949 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
3950 break;
3951
3952 case RELOAD_FOR_OUTPUT:
3953 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
3954 break;
3955
3956 case RELOAD_FOR_INSN:
3957 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
3958 break;
3959 }
3960
3961 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
3962 }
3963}
3964
3965/* Similarly, but show REGNO is no longer in use for a reload. */
3966
3967static void
3968clear_reload_reg_in_use (regno, opnum, type, mode)
3969 int regno;
3970 int opnum;
3971 enum reload_type type;
3972 enum machine_mode mode;
3973{
3974 int nregs = HARD_REGNO_NREGS (regno, mode);
3975 int i;
3976
3977 for (i = regno; i < nregs + regno; i++)
3978 {
3979 switch (type)
3980 {
3981 case RELOAD_OTHER:
3982 CLEAR_HARD_REG_BIT (reload_reg_used, i);
3983 break;
3984
3985 case RELOAD_FOR_INPUT_ADDRESS:
3986 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
3987 break;
3988
3989 case RELOAD_FOR_OUTPUT_ADDRESS:
3990 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
3991 break;
3992
3993 case RELOAD_FOR_OPERAND_ADDRESS:
3994 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
3995 break;
3996
3997 case RELOAD_FOR_OTHER_ADDRESS:
3998 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
3999 break;
4000
4001 case RELOAD_FOR_INPUT:
4002 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4003 break;
4004
4005 case RELOAD_FOR_OUTPUT:
4006 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4007 break;
4008
4009 case RELOAD_FOR_INSN:
4010 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4011 break;
4012 }
4013 }
4014}
4015
4016/* 1 if reg REGNO is free as a reload reg for a reload of the sort
4017 specified by OPNUM and TYPE. */
4018
4019static int
4020reload_reg_free_p (regno, opnum, type)
4021 int regno;
4022 int opnum;
4023 enum reload_type type;
4024{
4025 int i;
4026
4027 /* In use for a RELOAD_OTHER means it's not available for anything except
4028 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
4029 to be used only for inputs. */
4030
4031 if (type != RELOAD_FOR_OTHER_ADDRESS
4032 && TEST_HARD_REG_BIT (reload_reg_used, regno))
4033 return 0;
4034
4035 switch (type)
4036 {
4037 case RELOAD_OTHER:
4038 /* In use for anything means not available for a RELOAD_OTHER. */
4039 return ! TEST_HARD_REG_BIT (reload_reg_used_at_all, regno);
4040
4041 /* The other kinds of use can sometimes share a register. */
4042 case RELOAD_FOR_INPUT:
4043 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4044 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4045 return 0;
4046
4047 /* If it is used for some other input, can't use it. */
4048 for (i = 0; i < reload_n_operands; i++)
4049 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4050 return 0;
4051
4052 /* If it is used in a later operand's address, can't use it. */
4053 for (i = opnum + 1; i < reload_n_operands; i++)
4054 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4055 return 0;
4056
4057 return 1;
4058
4059 case RELOAD_FOR_INPUT_ADDRESS:
4060 /* Can't use a register if it is used for an input address for this
4061 operand or used as an input in an earlier one. */
4062 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
4063 return 0;
4064
4065 for (i = 0; i < opnum; i++)
4066 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4067 return 0;
4068
4069 return 1;
4070
4071 case RELOAD_FOR_OUTPUT_ADDRESS:
4072 /* Can't use a register if it is used for an output address for this
4073 operand or used as an output in this or a later operand. */
4074 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4075 return 0;
4076
4077 for (i = opnum; i < reload_n_operands; i++)
4078 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4079 return 0;
4080
4081 return 1;
4082
4083 case RELOAD_FOR_OPERAND_ADDRESS:
4084 for (i = 0; i < reload_n_operands; i++)
4085 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4086 return 0;
4087
4088 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4089 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4090
4091 case RELOAD_FOR_OUTPUT:
4092 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4093 outputs, or an operand address for this or an earlier output. */
4094 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4095 return 0;
4096
4097 for (i = 0; i < reload_n_operands; i++)
4098 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4099 return 0;
4100
4101 for (i = 0; i <= opnum; i++)
4102 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4103 return 0;
4104
4105 return 1;
4106
4107 case RELOAD_FOR_INSN:
4108 for (i = 0; i < reload_n_operands; i++)
4109 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4110 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4111 return 0;
4112
4113 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4114 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4115
4116 case RELOAD_FOR_OTHER_ADDRESS:
4117 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4118 }
4119 abort ();
4120}
4121
4122/* Return 1 if the value in reload reg REGNO, as used by a reload
4123 needed for the part of the insn specified by OPNUM and TYPE,
4124 is not in use for a reload in any prior part of the insn.
4125
4126 We can assume that the reload reg was already tested for availability
4127 at the time it is needed, and we should not check this again,
4128 in case the reg has already been marked in use. */
4129
4130static int
4131reload_reg_free_before_p (regno, opnum, type)
4132 int regno;
4133 int opnum;
4134 enum reload_type type;
4135{
4136 int i;
4137
4138 switch (type)
4139 {
4140 case RELOAD_FOR_OTHER_ADDRESS:
4141 /* These always come first. */
4142 return 1;
4143
4144 case RELOAD_OTHER:
4145 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4146
4147 /* If this use is for part of the insn,
4148 check the reg is not in use for any prior part. It is tempting
4149 to try to do this by falling through from objecs that occur
4150 later in the insn to ones that occur earlier, but that will not
4151 correctly take into account the fact that here we MUST ignore
4152 things that would prevent the register from being allocated in
4153 the first place, since we know that it was allocated. */
4154
4155 case RELOAD_FOR_OUTPUT_ADDRESS:
4156 /* Earlier reloads are for earlier outputs or their addresses,
4157 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4158 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4159 RELOAD_OTHER).. */
4160 for (i = 0; i < opnum; i++)
4161 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4162 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4163 return 0;
4164
4165 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4166 return 0;
4167
4168 for (i = 0; i < reload_n_operands; i++)
4169 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4170 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4171 return 0;
4172
4173 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4174 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4175 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4176
4177 case RELOAD_FOR_OUTPUT:
4178 /* This can't be used in the output address for this operand and
4179 anything that can't be used for it, except that we've already
4180 tested for RELOAD_FOR_INSN objects. */
4181
4182 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4183 return 0;
4184
4185 for (i = 0; i < opnum; i++)
4186 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4187 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4188 return 0;
4189
4190 for (i = 0; i < reload_n_operands; i++)
4191 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4192 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4193 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4194 return 0;
4195
4196 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4197
4198 case RELOAD_FOR_OPERAND_ADDRESS:
4199 case RELOAD_FOR_INSN:
4200 /* These can't conflict with inputs, or each other, so all we have to
4201 test is input addresses and the addresses of OTHER items. */
4202
4203 for (i = 0; i < reload_n_operands; i++)
4204 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4205 return 0;
4206
4207 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4208
4209 case RELOAD_FOR_INPUT:
4210 /* The only things earlier are the address for this and
4211 earlier inputs, other inputs (which we know we don't conflict
4212 with), and addresses of RELOAD_OTHER objects. */
4213
4214 for (i = 0; i <= opnum; i++)
4215 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4216 return 0;
4217
4218 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4219
4220 case RELOAD_FOR_INPUT_ADDRESS:
4221 /* Similarly, all we have to check is for use in earlier inputs'
4222 addresses. */
4223 for (i = 0; i < opnum; i++)
4224 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4225 return 0;
4226
4227 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4228 }
4229 abort ();
4230}
4231
4232/* Return 1 if the value in reload reg REGNO, as used by a reload
4233 needed for the part of the insn specified by OPNUM and TYPE,
4234 is still available in REGNO at the end of the insn.
4235
4236 We can assume that the reload reg was already tested for availability
4237 at the time it is needed, and we should not check this again,
4238 in case the reg has already been marked in use. */
4239
4240static int
4241reload_reg_reaches_end_p (regno, opnum, type)
4242 int regno;
4243 int opnum;
4244 enum reload_type type;
4245{
4246 int i;
4247
4248 switch (type)
4249 {
4250 case RELOAD_OTHER:
4251 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4252 its value must reach the end. */
4253 return 1;
4254
4255 /* If this use is for part of the insn,
4256 its value reaches if no subsequent part uses the same register.
4257 Just like the above function, don't try to do this with lots
4258 of fallthroughs. */
4259
4260 case RELOAD_FOR_OTHER_ADDRESS:
4261 /* Here we check for everything else, since these don't conflict
4262 with anything else and everything comes later. */
4263
4264 for (i = 0; i < reload_n_operands; i++)
4265 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4266 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4267 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4268 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4269 return 0;
4270
4271 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4272 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4273 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4274
4275 case RELOAD_FOR_INPUT_ADDRESS:
4276 /* Similar, except that we check only for this and subsequent inputs
4277 and the address of only subsequent inputs and we do not need
4278 to check for RELOAD_OTHER objects since they are known not to
4279 conflict. */
4280
4281 for (i = opnum; i < reload_n_operands; i++)
4282 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4283 return 0;
4284
4285 for (i = opnum + 1; i < reload_n_operands; i++)
4286 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4287 return 0;
4288
4289 for (i = 0; i < reload_n_operands; i++)
4290 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4291 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4292 return 0;
4293
4294 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4295 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4296
4297 case RELOAD_FOR_INPUT:
4298 /* Similar to input address, except we start at the next operand for
4299 both input and input address and we do not check for
4300 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4301 would conflict. */
4302
4303 for (i = opnum + 1; i < reload_n_operands; i++)
4304 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4305 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4306 return 0;
4307
4308 /* ... fall through ... */
4309
4310 case RELOAD_FOR_OPERAND_ADDRESS:
4311 /* Check outputs and their addresses. */
4312
4313 for (i = 0; i < reload_n_operands; i++)
4314 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4315 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4316 return 0;
4317
4318 return 1;
4319
4320 case RELOAD_FOR_INSN:
4321 /* These conflict with other outputs with with RELOAD_OTHER. So
4322 we need only check for output addresses. */
4323
4324 opnum = -1;
4325
4326 /* ... fall through ... */
4327
4328 case RELOAD_FOR_OUTPUT:
4329 case RELOAD_FOR_OUTPUT_ADDRESS:
4330 /* We already know these can't conflict with a later output. So the
4331 only thing to check are later output addresses. */
4332 for (i = opnum + 1; i < reload_n_operands; i++)
4333 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4334 return 0;
4335
4336 return 1;
4337 }
4338
4339 abort ();
4340}
4341\f
4342/* Vector of reload-numbers showing the order in which the reloads should
4343 be processed. */
4344short reload_order[MAX_RELOADS];
4345
4346/* Indexed by reload number, 1 if incoming value
4347 inherited from previous insns. */
4348char reload_inherited[MAX_RELOADS];
4349
4350/* For an inherited reload, this is the insn the reload was inherited from,
4351 if we know it. Otherwise, this is 0. */
4352rtx reload_inheritance_insn[MAX_RELOADS];
4353
4354/* If non-zero, this is a place to get the value of the reload,
4355 rather than using reload_in. */
4356rtx reload_override_in[MAX_RELOADS];
4357
4358/* For each reload, the index in spill_regs of the spill register used,
4359 or -1 if we did not need one of the spill registers for this reload. */
4360int reload_spill_index[MAX_RELOADS];
4361
4362/* Index of last register assigned as a spill register. We allocate in
4363 a round-robin fashio. */
4364
4365static int last_spill_reg = 0;
4366
4367/* Find a spill register to use as a reload register for reload R.
4368 LAST_RELOAD is non-zero if this is the last reload for the insn being
4369 processed.
4370
4371 Set reload_reg_rtx[R] to the register allocated.
4372
4373 If NOERROR is nonzero, we return 1 if successful,
4374 or 0 if we couldn't find a spill reg and we didn't change anything. */
4375
4376static int
4377allocate_reload_reg (r, insn, last_reload, noerror)
4378 int r;
4379 rtx insn;
4380 int last_reload;
4381 int noerror;
4382{
4383 int i;
4384 int pass;
4385 int count;
4386 rtx new;
4387 int regno;
4388
4389 /* If we put this reload ahead, thinking it is a group,
4390 then insist on finding a group. Otherwise we can grab a
4391 reg that some other reload needs.
4392 (That can happen when we have a 68000 DATA_OR_FP_REG
4393 which is a group of data regs or one fp reg.)
4394 We need not be so restrictive if there are no more reloads
4395 for this insn.
4396
4397 ??? Really it would be nicer to have smarter handling
4398 for that kind of reg class, where a problem like this is normal.
4399 Perhaps those classes should be avoided for reloading
4400 by use of more alternatives. */
4401
4402 int force_group = reload_nregs[r] > 1 && ! last_reload;
4403
4404 /* If we want a single register and haven't yet found one,
4405 take any reg in the right class and not in use.
4406 If we want a consecutive group, here is where we look for it.
4407
4408 We use two passes so we can first look for reload regs to
4409 reuse, which are already in use for other reloads in this insn,
4410 and only then use additional registers.
4411 I think that maximizing reuse is needed to make sure we don't
4412 run out of reload regs. Suppose we have three reloads, and
4413 reloads A and B can share regs. These need two regs.
4414 Suppose A and B are given different regs.
4415 That leaves none for C. */
4416 for (pass = 0; pass < 2; pass++)
4417 {
4418 /* I is the index in spill_regs.
4419 We advance it round-robin between insns to use all spill regs
4420 equally, so that inherited reloads have a chance
4421 of leapfrogging each other. */
4422
4423 for (count = 0, i = last_spill_reg; count < n_spills; count++)
4424 {
4425 int class = (int) reload_reg_class[r];
4426
4427 i = (i + 1) % n_spills;
4428
4429 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
4430 reload_when_needed[r])
4431 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4432 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4433 /* Look first for regs to share, then for unshared. But
4434 don't share regs used for inherited reloads; they are
4435 the ones we want to preserve. */
4436 && (pass
4437 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
4438 spill_regs[i])
4439 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
4440 spill_regs[i]))))
4441 {
4442 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4443 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4444 (on 68000) got us two FP regs. If NR is 1,
4445 we would reject both of them. */
4446 if (force_group)
4447 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4448 /* If we need only one reg, we have already won. */
4449 if (nr == 1)
4450 {
4451 /* But reject a single reg if we demand a group. */
4452 if (force_group)
4453 continue;
4454 break;
4455 }
4456 /* Otherwise check that as many consecutive regs as we need
4457 are available here.
4458 Also, don't use for a group registers that are
4459 needed for nongroups. */
4460 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4461 while (nr > 1)
4462 {
4463 regno = spill_regs[i] + nr - 1;
4464 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4465 && spill_reg_order[regno] >= 0
4466 && reload_reg_free_p (regno, reload_opnum[r],
4467 reload_when_needed[r])
4468 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4469 regno)))
4470 break;
4471 nr--;
4472 }
4473 if (nr == 1)
4474 break;
4475 }
4476 }
4477
4478 /* If we found something on pass 1, omit pass 2. */
4479 if (count < n_spills)
4480 break;
4481 }
4482
4483 /* We should have found a spill register by now. */
4484 if (count == n_spills)
4485 {
4486 if (noerror)
4487 return 0;
4488 goto failure;
4489 }
4490
4491 /* I is the index in SPILL_REG_RTX of the reload register we are to
4492 allocate. Get an rtx for it and find its register number. */
4493
4494 new = spill_reg_rtx[i];
4495
4496 if (new == 0 || GET_MODE (new) != reload_mode[r])
4497 spill_reg_rtx[i] = new
4498 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4499
4500 regno = true_regnum (new);
4501
4502 /* Detect when the reload reg can't hold the reload mode.
4503 This used to be one `if', but Sequent compiler can't handle that. */
4504 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4505 {
4506 enum machine_mode test_mode = VOIDmode;
4507 if (reload_in[r])
4508 test_mode = GET_MODE (reload_in[r]);
4509 /* If reload_in[r] has VOIDmode, it means we will load it
4510 in whatever mode the reload reg has: to wit, reload_mode[r].
4511 We have already tested that for validity. */
4512 /* Aside from that, we need to test that the expressions
4513 to reload from or into have modes which are valid for this
4514 reload register. Otherwise the reload insns would be invalid. */
4515 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4516 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4517 if (! (reload_out[r] != 0
4518 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
4519 {
4520 /* The reg is OK. */
4521 last_spill_reg = i;
4522
4523 /* Mark as in use for this insn the reload regs we use
4524 for this. */
4525 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
4526 reload_when_needed[r], reload_mode[r]);
4527
4528 reload_reg_rtx[r] = new;
4529 reload_spill_index[r] = i;
4530 return 1;
4531 }
4532 }
4533
4534 /* The reg is not OK. */
4535 if (noerror)
4536 return 0;
4537
4538 failure:
4539 if (asm_noperands (PATTERN (insn)) < 0)
4540 /* It's the compiler's fault. */
4541 abort ();
4542
4543 /* It's the user's fault; the operand's mode and constraint
4544 don't match. Disable this reload so we don't crash in final. */
4545 error_for_asm (insn,
4546 "`asm' operand constraint incompatible with operand size");
4547 reload_in[r] = 0;
4548 reload_out[r] = 0;
4549 reload_reg_rtx[r] = 0;
4550 reload_optional[r] = 1;
4551 reload_secondary_p[r] = 1;
4552
4553 return 1;
4554}
4555\f
4556/* Assign hard reg targets for the pseudo-registers we must reload
4557 into hard regs for this insn.
4558 Also output the instructions to copy them in and out of the hard regs.
4559
4560 For machines with register classes, we are responsible for
4561 finding a reload reg in the proper class. */
4562
4563static void
4564choose_reload_regs (insn, avoid_return_reg)
4565 rtx insn;
4566 rtx avoid_return_reg;
4567{
4568 register int i, j;
4569 int max_group_size = 1;
4570 enum reg_class group_class = NO_REGS;
4571 int inheritance;
4572
4573 rtx save_reload_reg_rtx[MAX_RELOADS];
4574 char save_reload_inherited[MAX_RELOADS];
4575 rtx save_reload_inheritance_insn[MAX_RELOADS];
4576 rtx save_reload_override_in[MAX_RELOADS];
4577 int save_reload_spill_index[MAX_RELOADS];
4578 HARD_REG_SET save_reload_reg_used;
4579 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4580 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4581 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4582 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4583 HARD_REG_SET save_reload_reg_used_in_op_addr;
4584 HARD_REG_SET save_reload_reg_used_in_insn;
4585 HARD_REG_SET save_reload_reg_used_in_other_addr;
4586 HARD_REG_SET save_reload_reg_used_at_all;
4587
4588 bzero (reload_inherited, MAX_RELOADS);
4589 bzero (reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4590 bzero (reload_override_in, MAX_RELOADS * sizeof (rtx));
4591
4592 CLEAR_HARD_REG_SET (reload_reg_used);
4593 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
4594 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
4595 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
4596 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
4597
4598 for (i = 0; i < reload_n_operands; i++)
4599 {
4600 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
4601 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
4602 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
4603 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
4604 }
4605
4606#ifdef SMALL_REGISTER_CLASSES
4607 /* Don't bother with avoiding the return reg
4608 if we have no mandatory reload that could use it. */
4609 if (avoid_return_reg)
4610 {
4611 int do_avoid = 0;
4612 int regno = REGNO (avoid_return_reg);
4613 int nregs
4614 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4615 int r;
4616
4617 for (r = regno; r < regno + nregs; r++)
4618 if (spill_reg_order[r] >= 0)
4619 for (j = 0; j < n_reloads; j++)
4620 if (!reload_optional[j] && reload_reg_rtx[j] == 0
4621 && (reload_in[j] != 0 || reload_out[j] != 0
4622 || reload_secondary_p[j])
4623 &&
4624 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
4625 do_avoid = 1;
4626 if (!do_avoid)
4627 avoid_return_reg = 0;
4628 }
4629#endif /* SMALL_REGISTER_CLASSES */
4630
4631#if 0 /* Not needed, now that we can always retry without inheritance. */
4632 /* See if we have more mandatory reloads than spill regs.
4633 If so, then we cannot risk optimizations that could prevent
4634 reloads from sharing one spill register.
4635
4636 Since we will try finding a better register than reload_reg_rtx
4637 unless it is equal to reload_in or reload_out, count such reloads. */
4638
4639 {
4640 int tem = 0;
4641#ifdef SMALL_REGISTER_CLASSES
4642 int tem = (avoid_return_reg != 0);
4643#endif
4644 for (j = 0; j < n_reloads; j++)
4645 if (! reload_optional[j]
4646 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
4647 && (reload_reg_rtx[j] == 0
4648 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
4649 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
4650 tem++;
4651 if (tem > n_spills)
4652 must_reuse = 1;
4653 }
4654#endif
4655
4656#ifdef SMALL_REGISTER_CLASSES
4657 /* Don't use the subroutine call return reg for a reload
4658 if we are supposed to avoid it. */
4659 if (avoid_return_reg)
4660 {
4661 int regno = REGNO (avoid_return_reg);
4662 int nregs
4663 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4664 int r;
4665
4666 for (r = regno; r < regno + nregs; r++)
4667 if (spill_reg_order[r] >= 0)
4668 SET_HARD_REG_BIT (reload_reg_used, r);
4669 }
4670#endif /* SMALL_REGISTER_CLASSES */
4671
4672 /* In order to be certain of getting the registers we need,
4673 we must sort the reloads into order of increasing register class.
4674 Then our grabbing of reload registers will parallel the process
4675 that provided the reload registers.
4676
4677 Also note whether any of the reloads wants a consecutive group of regs.
4678 If so, record the maximum size of the group desired and what
4679 register class contains all the groups needed by this insn. */
4680
4681 for (j = 0; j < n_reloads; j++)
4682 {
4683 reload_order[j] = j;
4684 reload_spill_index[j] = -1;
4685
4686 reload_mode[j]
4687 = (reload_inmode[j] == VOIDmode
4688 || (GET_MODE_SIZE (reload_outmode[j])
4689 > GET_MODE_SIZE (reload_inmode[j])))
4690 ? reload_outmode[j] : reload_inmode[j];
4691
4692 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
4693
4694 if (reload_nregs[j] > 1)
4695 {
4696 max_group_size = MAX (reload_nregs[j], max_group_size);
4697 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
4698 }
4699
4700 /* If we have already decided to use a certain register,
4701 don't use it in another way. */
4702 if (reload_reg_rtx[j])
4703 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
4704 reload_when_needed[j], reload_mode[j]);
4705 }
4706
4707 if (n_reloads > 1)
4708 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
4709
4710 bcopy (reload_reg_rtx, save_reload_reg_rtx, sizeof reload_reg_rtx);
4711 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
4712 bcopy (reload_inheritance_insn, save_reload_inheritance_insn,
4713 sizeof reload_inheritance_insn);
4714 bcopy (reload_override_in, save_reload_override_in,
4715 sizeof reload_override_in);
4716 bcopy (reload_spill_index, save_reload_spill_index,
4717 sizeof reload_spill_index);
4718 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
4719 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
4720 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
4721 reload_reg_used_in_op_addr);
4722 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
4723 reload_reg_used_in_insn);
4724 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
4725 reload_reg_used_in_other_addr);
4726
4727 for (i = 0; i < reload_n_operands; i++)
4728 {
4729 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
4730 reload_reg_used_in_output[i]);
4731 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
4732 reload_reg_used_in_input[i]);
4733 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
4734 reload_reg_used_in_input_addr[i]);
4735 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
4736 reload_reg_used_in_output_addr[i]);
4737 }
4738
4739 /* If -O, try first with inheritance, then turning it off.
4740 If not -O, don't do inheritance.
4741 Using inheritance when not optimizing leads to paradoxes
4742 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
4743 because one side of the comparison might be inherited. */
4744
4745 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
4746 {
4747 /* Process the reloads in order of preference just found.
4748 Beyond this point, subregs can be found in reload_reg_rtx.
4749
4750 This used to look for an existing reloaded home for all
4751 of the reloads, and only then perform any new reloads.
4752 But that could lose if the reloads were done out of reg-class order
4753 because a later reload with a looser constraint might have an old
4754 home in a register needed by an earlier reload with a tighter constraint.
4755
4756 To solve this, we make two passes over the reloads, in the order
4757 described above. In the first pass we try to inherit a reload
4758 from a previous insn. If there is a later reload that needs a
4759 class that is a proper subset of the class being processed, we must
4760 also allocate a spill register during the first pass.
4761
4762 Then make a second pass over the reloads to allocate any reloads
4763 that haven't been given registers yet. */
4764
4765 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
4766
4767 for (j = 0; j < n_reloads; j++)
4768 {
4769 register int r = reload_order[j];
4770
4771 /* Ignore reloads that got marked inoperative. */
4772 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
4773 continue;
4774
4775 /* If find_reloads chose a to use reload_in or reload_out as a reload
4776 register, we don't need to chose one. Otherwise, try even if it found
4777 one since we might save an insn if we find the value lying around. */
4778 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
4779 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
4780 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
4781 continue;
4782
4783#if 0 /* No longer needed for correct operation.
4784 It might give better code, or might not; worth an experiment? */
4785 /* If this is an optional reload, we can't inherit from earlier insns
4786 until we are sure that any non-optional reloads have been allocated.
4787 The following code takes advantage of the fact that optional reloads
4788 are at the end of reload_order. */
4789 if (reload_optional[r] != 0)
4790 for (i = 0; i < j; i++)
4791 if ((reload_out[reload_order[i]] != 0
4792 || reload_in[reload_order[i]] != 0
4793 || reload_secondary_p[reload_order[i]])
4794 && ! reload_optional[reload_order[i]]
4795 && reload_reg_rtx[reload_order[i]] == 0)
4796 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
4797#endif
4798
4799 /* First see if this pseudo is already available as reloaded
4800 for a previous insn. We cannot try to inherit for reloads
4801 that are smaller than the maximum number of registers needed
4802 for groups unless the register we would allocate cannot be used
4803 for the groups.
4804
4805 We could check here to see if this is a secondary reload for
4806 an object that is already in a register of the desired class.
4807 This would avoid the need for the secondary reload register.
4808 But this is complex because we can't easily determine what
4809 objects might want to be loaded via this reload. So let a register
4810 be allocated here. In `emit_reload_insns' we suppress one of the
4811 loads in the case described above. */
4812
4813 if (inheritance)
4814 {
4815 register int regno = -1;
4816 enum machine_mode mode;
4817
4818 if (reload_in[r] == 0)
4819 ;
4820 else if (GET_CODE (reload_in[r]) == REG)
4821 {
4822 regno = REGNO (reload_in[r]);
4823 mode = GET_MODE (reload_in[r]);
4824 }
4825 else if (GET_CODE (reload_in_reg[r]) == REG)
4826 {
4827 regno = REGNO (reload_in_reg[r]);
4828 mode = GET_MODE (reload_in_reg[r]);
4829 }
4830#if 0
4831 /* This won't work, since REGNO can be a pseudo reg number.
4832 Also, it takes much more hair to keep track of all the things
4833 that can invalidate an inherited reload of part of a pseudoreg. */
4834 else if (GET_CODE (reload_in[r]) == SUBREG
4835 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
4836 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
4837#endif
4838
4839 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
4840 {
4841 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
4842
4843 if (reg_reloaded_contents[i] == regno
4844 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
4845 >= GET_MODE_SIZE (mode))
4846 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4847 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
4848 spill_regs[i])
4849 && (reload_nregs[r] == max_group_size
4850 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
4851 spill_regs[i]))
4852 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
4853 reload_when_needed[r])
4854 && reload_reg_free_before_p (spill_regs[i],
4855 reload_opnum[r],
4856 reload_when_needed[r]))
4857 {
4858 /* If a group is needed, verify that all the subsequent
4859 registers still have their values intact. */
4860 int nr
4861 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4862 int k;
4863
4864 for (k = 1; k < nr; k++)
4865 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
4866 != regno)
4867 break;
4868
4869 if (k == nr)
4870 {
4871 int i1;
4872
4873 /* We found a register that contains the
4874 value we need. If this register is the
4875 same as an `earlyclobber' operand of the
4876 current insn, just mark it as a place to
4877 reload from since we can't use it as the
4878 reload register itself. */
4879
4880 for (i1 = 0; i1 < n_earlyclobbers; i1++)
4881 if (reg_overlap_mentioned_for_reload_p
4882 (reg_last_reload_reg[regno],
4883 reload_earlyclobbers[i1]))
4884 break;
4885
4886 if (i1 != n_earlyclobbers
4887 /* Don't really use the inherited spill reg
4888 if we need it wider than we've got it. */
4889 || (GET_MODE_SIZE (reload_mode[r])
4890 > GET_MODE_SIZE (mode)))
4891 reload_override_in[r] = reg_last_reload_reg[regno];
4892 else
4893 {
4894 /* We can use this as a reload reg. */
4895 /* Mark the register as in use for this part of
4896 the insn. */
4897 mark_reload_reg_in_use (spill_regs[i],
4898 reload_opnum[r],
4899 reload_when_needed[r],
4900 reload_mode[r]);
4901 reload_reg_rtx[r] = reg_last_reload_reg[regno];
4902 reload_inherited[r] = 1;
4903 reload_inheritance_insn[r]
4904 = reg_reloaded_insn[i];
4905 reload_spill_index[r] = i;
4906 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
4907 spill_regs[i]);
4908 }
4909 }
4910 }
4911 }
4912 }
4913
4914 /* Here's another way to see if the value is already lying around. */
4915 if (inheritance
4916 && reload_in[r] != 0
4917 && ! reload_inherited[r]
4918 && reload_out[r] == 0
4919 && (CONSTANT_P (reload_in[r])
4920 || GET_CODE (reload_in[r]) == PLUS
4921 || GET_CODE (reload_in[r]) == REG
4922 || GET_CODE (reload_in[r]) == MEM)
4923 && (reload_nregs[r] == max_group_size
4924 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
4925 {
4926 register rtx equiv
4927 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
4928 -1, NULL_PTR, 0, reload_mode[r]);
4929 int regno;
4930
4931 if (equiv != 0)
4932 {
4933 if (GET_CODE (equiv) == REG)
4934 regno = REGNO (equiv);
4935 else if (GET_CODE (equiv) == SUBREG)
4936 {
4937 regno = REGNO (SUBREG_REG (equiv));
4938 if (regno < FIRST_PSEUDO_REGISTER)
4939 regno += SUBREG_WORD (equiv);
4940 }
4941 else
4942 abort ();
4943 }
4944
4945 /* If we found a spill reg, reject it unless it is free
4946 and of the desired class. */
4947 if (equiv != 0
4948 && ((spill_reg_order[regno] >= 0
4949 && ! reload_reg_free_before_p (regno, reload_opnum[r],
4950 reload_when_needed[r]))
4951 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
4952 regno)))
4953 equiv = 0;
4954
4955 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
4956 equiv = 0;
4957
4958 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4959 equiv = 0;
4960
4961 /* We found a register that contains the value we need.
4962 If this register is the same as an `earlyclobber' operand
4963 of the current insn, just mark it as a place to reload from
4964 since we can't use it as the reload register itself. */
4965
4966 if (equiv != 0)
4967 for (i = 0; i < n_earlyclobbers; i++)
4968 if (reg_overlap_mentioned_for_reload_p (equiv,
4969 reload_earlyclobbers[i]))
4970 {
4971 reload_override_in[r] = equiv;
4972 equiv = 0;
4973 break;
4974 }
4975
4976 /* JRV: If the equiv register we have found is explicitly
4977 clobbered in the current insn, mark but don't use, as above. */
4978
4979 if (equiv != 0 && regno_clobbered_p (regno, insn))
4980 {
4981 reload_override_in[r] = equiv;
4982 equiv = 0;
4983 }
4984
4985 /* If we found an equivalent reg, say no code need be generated
4986 to load it, and use it as our reload reg. */
4987 if (equiv != 0 && regno != FRAME_POINTER_REGNUM)
4988 {
4989 reload_reg_rtx[r] = equiv;
4990 reload_inherited[r] = 1;
4991 /* If it is a spill reg,
4992 mark the spill reg as in use for this insn. */
4993 i = spill_reg_order[regno];
4994 if (i >= 0)
4995 {
4996 mark_reload_reg_in_use (regno, reload_opnum[r],
4997 reload_when_needed[r],
4998 reload_mode[r]);
4999 SET_HARD_REG_BIT (reload_reg_used_for_inherit, regno);
5000 }
5001 }
5002 }
5003
5004 /* If we found a register to use already, or if this is an optional
5005 reload, we are done. */
5006 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5007 continue;
5008
5009#if 0 /* No longer needed for correct operation. Might or might not
5010 give better code on the average. Want to experiment? */
5011
5012 /* See if there is a later reload that has a class different from our
5013 class that intersects our class or that requires less register
5014 than our reload. If so, we must allocate a register to this
5015 reload now, since that reload might inherit a previous reload
5016 and take the only available register in our class. Don't do this
5017 for optional reloads since they will force all previous reloads
5018 to be allocated. Also don't do this for reloads that have been
5019 turned off. */
5020
5021 for (i = j + 1; i < n_reloads; i++)
5022 {
5023 int s = reload_order[i];
5024
5025 if ((reload_in[s] == 0 && reload_out[s] == 0
5026 && ! reload_secondary_p[s])
5027 || reload_optional[s])
5028 continue;
5029
5030 if ((reload_reg_class[s] != reload_reg_class[r]
5031 && reg_classes_intersect_p (reload_reg_class[r],
5032 reload_reg_class[s]))
5033 || reload_nregs[s] < reload_nregs[r])
5034 break;
5035 }
5036
5037 if (i == n_reloads)
5038 continue;
5039
5040 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5041#endif
5042 }
5043
5044 /* Now allocate reload registers for anything non-optional that
5045 didn't get one yet. */
5046 for (j = 0; j < n_reloads; j++)
5047 {
5048 register int r = reload_order[j];
5049
5050 /* Ignore reloads that got marked inoperative. */
5051 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5052 continue;
5053
5054 /* Skip reloads that already have a register allocated or are
5055 optional. */
5056 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5057 continue;
5058
5059 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5060 break;
5061 }
5062
5063 /* If that loop got all the way, we have won. */
5064 if (j == n_reloads)
5065 break;
5066
5067 fail:
5068 /* Loop around and try without any inheritance. */
5069 /* First undo everything done by the failed attempt
5070 to allocate with inheritance. */
5071 bcopy (save_reload_reg_rtx, reload_reg_rtx, sizeof reload_reg_rtx);
5072 bcopy (save_reload_inherited, reload_inherited, sizeof reload_inherited);
5073 bcopy (save_reload_inheritance_insn, reload_inheritance_insn,
5074 sizeof reload_inheritance_insn);
5075 bcopy (save_reload_override_in, reload_override_in,
5076 sizeof reload_override_in);
5077 bcopy (save_reload_spill_index, reload_spill_index,
5078 sizeof reload_spill_index);
5079 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5080 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5081 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5082 save_reload_reg_used_in_op_addr);
5083 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5084 save_reload_reg_used_in_insn);
5085 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5086 save_reload_reg_used_in_other_addr);
5087
5088 for (i = 0; i < reload_n_operands; i++)
5089 {
5090 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5091 save_reload_reg_used_in_input[i]);
5092 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5093 save_reload_reg_used_in_output[i]);
5094 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5095 save_reload_reg_used_in_input_addr[i]);
5096 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5097 save_reload_reg_used_in_output_addr[i]);
5098 }
5099 }
5100
5101 /* If we thought we could inherit a reload, because it seemed that
5102 nothing else wanted the same reload register earlier in the insn,
5103 verify that assumption, now that all reloads have been assigned. */
5104
5105 for (j = 0; j < n_reloads; j++)
5106 {
5107 register int r = reload_order[j];
5108
5109 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5110 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5111 reload_opnum[r],
5112 reload_when_needed[r]))
5113 reload_inherited[r] = 0;
5114
5115 /* If we found a better place to reload from,
5116 validate it in the same fashion, if it is a reload reg. */
5117 if (reload_override_in[r]
5118 && (GET_CODE (reload_override_in[r]) == REG
5119 || GET_CODE (reload_override_in[r]) == SUBREG))
5120 {
5121 int regno = true_regnum (reload_override_in[r]);
5122 if (spill_reg_order[regno] >= 0
5123 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5124 reload_when_needed[r]))
5125 reload_override_in[r] = 0;
5126 }
5127 }
5128
5129 /* Now that reload_override_in is known valid,
5130 actually override reload_in. */
5131 for (j = 0; j < n_reloads; j++)
5132 if (reload_override_in[j])
5133 reload_in[j] = reload_override_in[j];
5134
5135 /* If this reload won't be done because it has been cancelled or is
5136 optional and not inherited, clear reload_reg_rtx so other
5137 routines (such as subst_reloads) don't get confused. */
5138 for (j = 0; j < n_reloads; j++)
5139 if (reload_reg_rtx[j] != 0
5140 && ((reload_optional[j] && ! reload_inherited[j])
5141 || (reload_in[j] == 0 && reload_out[j] == 0
5142 && ! reload_secondary_p[j])))
5143 {
5144 int regno = true_regnum (reload_reg_rtx[j]);
5145
5146 if (spill_reg_order[regno] >= 0)
5147 clear_reload_reg_in_use (regno, reload_opnum[j],
5148 reload_when_needed[j], reload_mode[j]);
5149 reload_reg_rtx[j] = 0;
5150 }
5151
5152 /* Record which pseudos and which spill regs have output reloads. */
5153 for (j = 0; j < n_reloads; j++)
5154 {
5155 register int r = reload_order[j];
5156
5157 i = reload_spill_index[r];
5158
5159 /* I is nonneg if this reload used one of the spill regs.
5160 If reload_reg_rtx[r] is 0, this is an optional reload
5161 that we opted to ignore. */
5162 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5163 && reload_reg_rtx[r] != 0)
5164 {
5165 register int nregno = REGNO (reload_out[r]);
5166 int nr = 1;
5167
5168 if (nregno < FIRST_PSEUDO_REGISTER)
5169 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
5170
5171 while (--nr >= 0)
5172 reg_has_output_reload[nregno + nr] = 1;
5173
5174 if (i >= 0)
5175 {
5176 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5177 while (--nr >= 0)
5178 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5179 }
5180
5181 if (reload_when_needed[r] != RELOAD_OTHER
5182 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5183 && reload_when_needed[r] != RELOAD_FOR_INSN)
5184 abort ();
5185 }
5186 }
5187}
5188\f
5189/* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5190 reloads of the same item for fear that we might not have enough reload
5191 registers. However, normally they will get the same reload register
5192 and hence actually need not be loaded twice.
5193
5194 Here we check for the most common case of this phenomenon: when we have
5195 a number of reloads for the same object, each of which were allocated
5196 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5197 reload, and is not modified in the insn itself. If we find such,
5198 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5199 This will not increase the number of spill registers needed and will
5200 prevent redundant code. */
5201
5202#ifdef SMALL_REGISTER_CLASSES
5203
5204static void
5205merge_assigned_reloads (insn)
5206 rtx insn;
5207{
5208 int i, j;
5209
5210 /* Scan all the reloads looking for ones that only load values and
5211 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5212 assigned and not modified by INSN. */
5213
5214 for (i = 0; i < n_reloads; i++)
5215 {
5216 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5217 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5218 || reg_set_p (reload_reg_rtx[i], insn))
5219 continue;
5220
5221 /* Look at all other reloads. Ensure that the only use of this
5222 reload_reg_rtx is in a reload that just loads the same value
5223 as we do. Note that any secondary reloads must be of the identical
5224 class since the values, modes, and result registers are the
5225 same, so we need not do anything with any secondary reloads. */
5226
5227 for (j = 0; j < n_reloads; j++)
5228 {
5229 if (i == j || reload_reg_rtx[j] == 0
5230 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5231 reload_reg_rtx[i]))
5232 continue;
5233
5234 /* If the reload regs aren't exactly the same (e.g, different modes)
5235 or if the values are different, we can't merge anything with this
5236 reload register. */
5237
5238 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5239 || reload_out[j] != 0 || reload_in[j] == 0
5240 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5241 break;
5242 }
5243
5244 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5245 we, in fact, found any matching reloads. */
5246
5247 if (j == n_reloads)
5248 {
5249 for (j = 0; j < n_reloads; j++)
5250 if (i != j && reload_reg_rtx[j] != 0
5251 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5252 {
5253 reload_when_needed[i] = RELOAD_OTHER;
5254 reload_in[j] = 0;
5255 transfer_replacements (i, j);
5256 }
5257
5258 /* If this is now RELOAD_OTHER, look for any reloads that load
5259 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5260 if they were for inputs, RELOAD_OTHER for outputs. Note that
5261 this test is equivalent to looking for reloads for this operand
5262 number. */
5263
5264 if (reload_when_needed[i] == RELOAD_OTHER)
5265 for (j = 0; j < n_reloads; j++)
5266 if (reload_in[j] != 0
5267 && reload_when_needed[i] != RELOAD_OTHER
5268 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5269 reload_in[i]))
5270 reload_when_needed[j]
5271 = reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5272 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER;
5273 }
5274 }
5275}
5276#endif /* SMALL_RELOAD_CLASSES */
5277\f
5278/* Output insns to reload values in and out of the chosen reload regs. */
5279
5280static void
5281emit_reload_insns (insn)
5282 rtx insn;
5283{
5284 register int j;
5285 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5286 rtx other_input_address_reload_insns = 0;
5287 rtx other_input_reload_insns = 0;
5288 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5289 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5290 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5291 rtx operand_reload_insns = 0;
5292 rtx following_insn = NEXT_INSN (insn);
5293 rtx before_insn = insn;
5294 int special;
5295 /* Values to be put in spill_reg_store are put here first. */
5296 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5297
5298 for (j = 0; j < reload_n_operands; j++)
5299 input_reload_insns[j] = input_address_reload_insns[j]
5300 = output_reload_insns[j] = output_address_reload_insns[j] = 0;
5301
5302 /* If this is a CALL_INSN preceded by USE insns, any reload insns
5303 must go in front of the first USE insn, not in front of INSN. */
5304
5305 if (GET_CODE (insn) == CALL_INSN && GET_CODE (PREV_INSN (insn)) == INSN
5306 && GET_CODE (PATTERN (PREV_INSN (insn))) == USE)
5307 while (GET_CODE (PREV_INSN (before_insn)) == INSN
5308 && GET_CODE (PATTERN (PREV_INSN (before_insn))) == USE)
5309 before_insn = PREV_INSN (before_insn);
5310
5311 /* If INSN is followed by any CLOBBER insns made by find_reloads,
5312 put our reloads after them since they may otherwise be
5313 misinterpreted. */
5314
5315 while (GET_CODE (following_insn) == INSN
5316 && GET_MODE (following_insn) == DImode
5317 && GET_CODE (PATTERN (following_insn)) == CLOBBER
5318 && NEXT_INSN (following_insn) != 0)
5319 following_insn = NEXT_INSN (following_insn);
5320
5321 /* Now output the instructions to copy the data into and out of the
5322 reload registers. Do these in the order that the reloads were reported,
5323 since reloads of base and index registers precede reloads of operands
5324 and the operands may need the base and index registers reloaded. */
5325
5326 for (j = 0; j < n_reloads; j++)
5327 {
5328 register rtx old;
5329 rtx oldequiv_reg = 0;
5330 rtx store_insn = 0;
5331
5332 old = reload_in[j];
5333 if (old != 0 && ! reload_inherited[j]
5334 && ! rtx_equal_p (reload_reg_rtx[j], old)
5335 && reload_reg_rtx[j] != 0)
5336 {
5337 register rtx reloadreg = reload_reg_rtx[j];
5338 rtx oldequiv = 0;
5339 enum machine_mode mode;
5340 rtx *where;
5341
5342 /* Determine the mode to reload in.
5343 This is very tricky because we have three to choose from.
5344 There is the mode the insn operand wants (reload_inmode[J]).
5345 There is the mode of the reload register RELOADREG.
5346 There is the intrinsic mode of the operand, which we could find
5347 by stripping some SUBREGs.
5348 It turns out that RELOADREG's mode is irrelevant:
5349 we can change that arbitrarily.
5350
5351 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5352 then the reload reg may not support QImode moves, so use SImode.
5353 If foo is in memory due to spilling a pseudo reg, this is safe,
5354 because the QImode value is in the least significant part of a
5355 slot big enough for a SImode. If foo is some other sort of
5356 memory reference, then it is impossible to reload this case,
5357 so previous passes had better make sure this never happens.
5358
5359 Then consider a one-word union which has SImode and one of its
5360 members is a float, being fetched as (SUBREG:SF union:SI).
5361 We must fetch that as SFmode because we could be loading into
5362 a float-only register. In this case OLD's mode is correct.
5363
5364 Consider an immediate integer: it has VOIDmode. Here we need
5365 to get a mode from something else.
5366
5367 In some cases, there is a fourth mode, the operand's
5368 containing mode. If the insn specifies a containing mode for
5369 this operand, it overrides all others.
5370
5371 I am not sure whether the algorithm here is always right,
5372 but it does the right things in those cases. */
5373
5374 mode = GET_MODE (old);
5375 if (mode == VOIDmode)
5376 mode = reload_inmode[j];
5377
5378#ifdef SECONDARY_INPUT_RELOAD_CLASS
5379 /* If we need a secondary register for this operation, see if
5380 the value is already in a register in that class. Don't
5381 do this if the secondary register will be used as a scratch
5382 register. */
5383
5384 if (reload_secondary_reload[j] >= 0
5385 && reload_secondary_icode[j] == CODE_FOR_nothing
5386 && optimize)
5387 oldequiv
5388 = find_equiv_reg (old, insn,
5389 reload_reg_class[reload_secondary_reload[j]],
5390 -1, NULL_PTR, 0, mode);
5391#endif
5392
5393 /* If reloading from memory, see if there is a register
5394 that already holds the same value. If so, reload from there.
5395 We can pass 0 as the reload_reg_p argument because
5396 any other reload has either already been emitted,
5397 in which case find_equiv_reg will see the reload-insn,
5398 or has yet to be emitted, in which case it doesn't matter
5399 because we will use this equiv reg right away. */
5400
5401 if (oldequiv == 0 && optimize
5402 && (GET_CODE (old) == MEM
5403 || (GET_CODE (old) == REG
5404 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5405 && reg_renumber[REGNO (old)] < 0)))
5406 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
5407 -1, NULL_PTR, 0, mode);
5408
5409 if (oldequiv)
5410 {
5411 int regno = true_regnum (oldequiv);
5412
5413 /* If OLDEQUIV is a spill register, don't use it for this
5414 if any other reload needs it at an earlier stage of this insn
5415 or at this stage. */
5416 if (spill_reg_order[regno] >= 0
5417 && (! reload_reg_free_p (regno, reload_opnum[j],
5418 reload_when_needed[j])
5419 || ! reload_reg_free_before_p (regno, reload_opnum[j],
5420 reload_when_needed[j])))
5421 oldequiv = 0;
5422
5423 /* If OLDEQUIV is not a spill register,
5424 don't use it if any other reload wants it. */
5425 if (spill_reg_order[regno] < 0)
5426 {
5427 int k;
5428 for (k = 0; k < n_reloads; k++)
5429 if (reload_reg_rtx[k] != 0 && k != j
5430 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
5431 oldequiv))
5432 {
5433 oldequiv = 0;
5434 break;
5435 }
5436 }
5437
5438 /* If it is no cheaper to copy from OLDEQUIV into the
5439 reload register than it would be to move from memory,
5440 don't use it. Likewise, if we need a secondary register
5441 or memory. */
5442
5443 if (oldequiv != 0
5444 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
5445 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
5446 reload_reg_class[j])
5447 >= MEMORY_MOVE_COST (mode)))
5448#ifdef SECONDARY_INPUT_RELOAD_CLASS
5449 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5450 mode, oldequiv)
5451 != NO_REGS)
5452#endif
5453#ifdef SECONDARY_MEMORY_NEEDED
5454 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
5455 REGNO_REG_CLASS (regno),
5456 mode)
5457#endif
5458 ))
5459 oldequiv = 0;
5460 }
5461
5462 if (oldequiv == 0)
5463 oldequiv = old;
5464 else if (GET_CODE (oldequiv) == REG)
5465 oldequiv_reg = oldequiv;
5466 else if (GET_CODE (oldequiv) == SUBREG)
5467 oldequiv_reg = SUBREG_REG (oldequiv);
5468
5469 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
5470 then load RELOADREG from OLDEQUIV. */
5471
5472 if (GET_MODE (reloadreg) != mode)
5473 reloadreg = gen_lowpart_common (mode, reloadreg);
5474 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
5475 oldequiv = SUBREG_REG (oldequiv);
5476 if (GET_MODE (oldequiv) != VOIDmode
5477 && mode != GET_MODE (oldequiv))
5478 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
5479
5480 /* Switch to the right place to emit the reload insns. */
5481 switch (reload_when_needed[j])
5482 {
5483 case RELOAD_OTHER:
5484 where = &other_input_reload_insns;
5485 break;
5486 case RELOAD_FOR_INPUT:
5487 where = &input_reload_insns[reload_opnum[j]];
5488 break;
5489 case RELOAD_FOR_INPUT_ADDRESS:
5490 where = &input_address_reload_insns[reload_opnum[j]];
5491 break;
5492 case RELOAD_FOR_OUTPUT_ADDRESS:
5493 where = &output_address_reload_insns[reload_opnum[j]];
5494 break;
5495 case RELOAD_FOR_OPERAND_ADDRESS:
5496 where = &operand_reload_insns;
5497 break;
5498 case RELOAD_FOR_OTHER_ADDRESS:
5499 where = &other_input_address_reload_insns;
5500 break;
5501 default:
5502 abort ();
5503 }
5504
5505 push_to_sequence (*where);
5506 special = 0;
5507
5508 /* Auto-increment addresses must be reloaded in a special way. */
5509 if (GET_CODE (oldequiv) == POST_INC
5510 || GET_CODE (oldequiv) == POST_DEC
5511 || GET_CODE (oldequiv) == PRE_INC
5512 || GET_CODE (oldequiv) == PRE_DEC)
5513 {
5514 /* We are not going to bother supporting the case where a
5515 incremented register can't be copied directly from
5516 OLDEQUIV since this seems highly unlikely. */
5517 if (reload_secondary_reload[j] >= 0)
5518 abort ();
5519 /* Prevent normal processing of this reload. */
5520 special = 1;
5521 /* Output a special code sequence for this case. */
5522 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
5523 }
5524
5525 /* If we are reloading a pseudo-register that was set by the previous
5526 insn, see if we can get rid of that pseudo-register entirely
5527 by redirecting the previous insn into our reload register. */
5528
5529 else if (optimize && GET_CODE (old) == REG
5530 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5531 && dead_or_set_p (insn, old)
5532 /* This is unsafe if some other reload
5533 uses the same reg first. */
5534 && reload_reg_free_before_p (REGNO (reloadreg),
5535 reload_opnum[j],
5536 reload_when_needed[j]))
5537 {
5538 rtx temp = PREV_INSN (insn);
5539 while (temp && GET_CODE (temp) == NOTE)
5540 temp = PREV_INSN (temp);
5541 if (temp
5542 && GET_CODE (temp) == INSN
5543 && GET_CODE (PATTERN (temp)) == SET
5544 && SET_DEST (PATTERN (temp)) == old
5545 /* Make sure we can access insn_operand_constraint. */
5546 && asm_noperands (PATTERN (temp)) < 0
5547 /* This is unsafe if prev insn rejects our reload reg. */
5548 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
5549 reloadreg)
5550 /* This is unsafe if operand occurs more than once in current
5551 insn. Perhaps some occurrences aren't reloaded. */
5552 && count_occurrences (PATTERN (insn), old) == 1
5553 /* Don't risk splitting a matching pair of operands. */
5554 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
5555 {
5556 /* Store into the reload register instead of the pseudo. */
5557 SET_DEST (PATTERN (temp)) = reloadreg;
5558 /* If these are the only uses of the pseudo reg,
5559 pretend for GDB it lives in the reload reg we used. */
5560 if (reg_n_deaths[REGNO (old)] == 1
5561 && reg_n_sets[REGNO (old)] == 1)
5562 {
5563 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
5564 alter_reg (REGNO (old), -1);
5565 }
5566 special = 1;
5567 }
5568 }
5569
5570 /* We can't do that, so output an insn to load RELOADREG. */
5571
5572 if (! special)
5573 {
5574#ifdef SECONDARY_INPUT_RELOAD_CLASS
5575 rtx second_reload_reg = 0;
5576 enum insn_code icode;
5577
5578 /* If we have a secondary reload, pick up the secondary register
5579 and icode, if any. If OLDEQUIV and OLD are different or
5580 if this is an in-out reload, recompute whether or not we
5581 still need a secondary register and what the icode should
5582 be. If we still need a secondary register and the class or
5583 icode is different, go back to reloading from OLD if using
5584 OLDEQUIV means that we got the wrong type of register. We
5585 cannot have different class or icode due to an in-out reload
5586 because we don't make such reloads when both the input and
5587 output need secondary reload registers. */
5588
5589 if (reload_secondary_reload[j] >= 0)
5590 {
5591 int secondary_reload = reload_secondary_reload[j];
5592 rtx real_oldequiv = oldequiv;
5593 rtx real_old = old;
5594
5595 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
5596 and similarly for OLD.
5597 See comments in find_secondary_reload in reload.c. */
5598 if (GET_CODE (oldequiv) == REG
5599 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
5600 && reg_equiv_mem[REGNO (oldequiv)] != 0)
5601 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
5602
5603 if (GET_CODE (old) == REG
5604 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5605 && reg_equiv_mem[REGNO (old)] != 0)
5606 real_old = reg_equiv_mem[REGNO (old)];
5607
5608 second_reload_reg = reload_reg_rtx[secondary_reload];
5609 icode = reload_secondary_icode[j];
5610
5611 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
5612 || (reload_in[j] != 0 && reload_out[j] != 0))
5613 {
5614 enum reg_class new_class
5615 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5616 mode, real_oldequiv);
5617
5618 if (new_class == NO_REGS)
5619 second_reload_reg = 0;
5620 else
5621 {
5622 enum insn_code new_icode;
5623 enum machine_mode new_mode;
5624
5625 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
5626 REGNO (second_reload_reg)))
5627 oldequiv = old, real_oldequiv = real_old;
5628 else
5629 {
5630 new_icode = reload_in_optab[(int) mode];
5631 if (new_icode != CODE_FOR_nothing
5632 && ((insn_operand_predicate[(int) new_icode][0]
5633 && ! ((*insn_operand_predicate[(int) new_icode][0])
5634 (reloadreg, mode)))
5635 || (insn_operand_predicate[(int) new_icode][1]
5636 && ! ((*insn_operand_predicate[(int) new_icode][1])
5637 (real_oldequiv, mode)))))
5638 new_icode = CODE_FOR_nothing;
5639
5640 if (new_icode == CODE_FOR_nothing)
5641 new_mode = mode;
5642 else
5643 new_mode = insn_operand_mode[(int) new_icode][2];
5644
5645 if (GET_MODE (second_reload_reg) != new_mode)
5646 {
5647 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
5648 new_mode))
5649 oldequiv = old, real_oldequiv = real_old;
5650 else
5651 second_reload_reg
5652 = gen_rtx (REG, new_mode,
5653 REGNO (second_reload_reg));
5654 }
5655 }
5656 }
5657 }
5658
5659 /* If we still need a secondary reload register, check
5660 to see if it is being used as a scratch or intermediate
5661 register and generate code appropriately. If we need
5662 a scratch register, use REAL_OLDEQUIV since the form of
5663 the insn may depend on the actual address if it is
5664 a MEM. */
5665
5666 if (second_reload_reg)
5667 {
5668 if (icode != CODE_FOR_nothing)
5669 {
5670 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
5671 second_reload_reg));
5672 special = 1;
5673 }
5674 else
5675 {
5676 /* See if we need a scratch register to load the
5677 intermediate register (a tertiary reload). */
5678 enum insn_code tertiary_icode
5679 = reload_secondary_icode[secondary_reload];
5680
5681 if (tertiary_icode != CODE_FOR_nothing)
5682 {
5683 rtx third_reload_reg
5684 = reload_reg_rtx[reload_secondary_reload[secondary_reload]];
5685
5686 emit_insn ((GEN_FCN (tertiary_icode)
5687 (second_reload_reg, real_oldequiv,
5688 third_reload_reg)));
5689 }
5690 else
5691 gen_input_reload (second_reload_reg, oldequiv,
5692 reload_opnum[j],
5693 reload_when_needed[j]);
5694
5695 oldequiv = second_reload_reg;
5696 }
5697 }
5698 }
5699#endif
5700
5701 if (! special)
5702 gen_input_reload (reloadreg, oldequiv, reload_opnum[j],
5703 reload_when_needed[j]);
5704
5705#if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
5706 /* We may have to make a REG_DEAD note for the secondary reload
5707 register in the insns we just made. Find the last insn that
5708 mentioned the register. */
5709 if (! special && second_reload_reg
5710 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
5711 {
5712 rtx prev;
5713
5714 for (prev = get_last_insn (); prev;
5715 prev = PREV_INSN (prev))
5716 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
5717 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
5718 PATTERN (prev)))
5719 {
5720 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
5721 second_reload_reg,
5722 REG_NOTES (prev));
5723 break;
5724 }
5725 }
5726#endif
5727 }
5728
5729 /* End this sequence. */
5730 *where = get_insns ();
5731 end_sequence ();
5732 }
5733
5734 /* Add a note saying the input reload reg
5735 dies in this insn, if anyone cares. */
5736#ifdef PRESERVE_DEATH_INFO_REGNO_P
5737 if (old != 0
5738 && reload_reg_rtx[j] != old
5739 && reload_reg_rtx[j] != 0
5740 && reload_out[j] == 0
5741 && ! reload_inherited[j]
5742 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
5743 {
5744 register rtx reloadreg = reload_reg_rtx[j];
5745
5746#if 0
5747 /* We can't abort here because we need to support this for sched.c.
5748 It's not terrible to miss a REG_DEAD note, but we should try
5749 to figure out how to do this correctly. */
5750 /* The code below is incorrect for address-only reloads. */
5751 if (reload_when_needed[j] != RELOAD_OTHER
5752 && reload_when_needed[j] != RELOAD_FOR_INPUT)
5753 abort ();
5754#endif
5755
5756 /* Add a death note to this insn, for an input reload. */
5757
5758 if ((reload_when_needed[j] == RELOAD_OTHER
5759 || reload_when_needed[j] == RELOAD_FOR_INPUT)
5760 && ! dead_or_set_p (insn, reloadreg))
5761 REG_NOTES (insn)
5762 = gen_rtx (EXPR_LIST, REG_DEAD,
5763 reloadreg, REG_NOTES (insn));
5764 }
5765
5766 /* When we inherit a reload, the last marked death of the reload reg
5767 may no longer really be a death. */
5768 if (reload_reg_rtx[j] != 0
5769 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
5770 && reload_inherited[j])
5771 {
5772 /* Handle inheriting an output reload.
5773 Remove the death note from the output reload insn. */
5774 if (reload_spill_index[j] >= 0
5775 && GET_CODE (reload_in[j]) == REG
5776 && spill_reg_store[reload_spill_index[j]] != 0
5777 && find_regno_note (spill_reg_store[reload_spill_index[j]],
5778 REG_DEAD, REGNO (reload_reg_rtx[j])))
5779 remove_death (REGNO (reload_reg_rtx[j]),
5780 spill_reg_store[reload_spill_index[j]]);
5781 /* Likewise for input reloads that were inherited. */
5782 else if (reload_spill_index[j] >= 0
5783 && GET_CODE (reload_in[j]) == REG
5784 && spill_reg_store[reload_spill_index[j]] == 0
5785 && reload_inheritance_insn[j] != 0
5786 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
5787 REGNO (reload_reg_rtx[j])))
5788 remove_death (REGNO (reload_reg_rtx[j]),
5789 reload_inheritance_insn[j]);
5790 else
5791 {
5792 rtx prev;
5793
5794 /* We got this register from find_equiv_reg.
5795 Search back for its last death note and get rid of it.
5796 But don't search back too far.
5797 Don't go past a place where this reg is set,
5798 since a death note before that remains valid. */
5799 for (prev = PREV_INSN (insn);
5800 prev && GET_CODE (prev) != CODE_LABEL;
5801 prev = PREV_INSN (prev))
5802 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5803 && dead_or_set_p (prev, reload_reg_rtx[j]))
5804 {
5805 if (find_regno_note (prev, REG_DEAD,
5806 REGNO (reload_reg_rtx[j])))
5807 remove_death (REGNO (reload_reg_rtx[j]), prev);
5808 break;
5809 }
5810 }
5811 }
5812
5813 /* We might have used find_equiv_reg above to choose an alternate
5814 place from which to reload. If so, and it died, we need to remove
5815 that death and move it to one of the insns we just made. */
5816
5817 if (oldequiv_reg != 0
5818 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
5819 {
5820 rtx prev, prev1;
5821
5822 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
5823 prev = PREV_INSN (prev))
5824 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5825 && dead_or_set_p (prev, oldequiv_reg))
5826 {
5827 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
5828 {
5829 for (prev1 = this_reload_insn;
5830 prev1; prev1 = PREV_INSN (prev1))
5831 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
5832 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
5833 PATTERN (prev1)))
5834 {
5835 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
5836 oldequiv_reg,
5837 REG_NOTES (prev1));
5838 break;
5839 }
5840 remove_death (REGNO (oldequiv_reg), prev);
5841 }
5842 break;
5843 }
5844 }
5845#endif
5846
5847 /* If we are reloading a register that was recently stored in with an
5848 output-reload, see if we can prove there was
5849 actually no need to store the old value in it. */
5850
5851 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
5852 && reload_in[j] != 0
5853 && GET_CODE (reload_in[j]) == REG
5854#if 0
5855 /* There doesn't seem to be any reason to restrict this to pseudos
5856 and doing so loses in the case where we are copying from a
5857 register of the wrong class. */
5858 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
5859#endif
5860 && spill_reg_store[reload_spill_index[j]] != 0
5861 /* This is unsafe if some other reload uses the same reg first. */
5862 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
5863 reload_opnum[j], reload_when_needed[j])
5864 && dead_or_set_p (insn, reload_in[j])
5865 /* This is unsafe if operand occurs more than once in current
5866 insn. Perhaps some occurrences weren't reloaded. */
5867 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
5868 delete_output_reload (insn, j,
5869 spill_reg_store[reload_spill_index[j]]);
5870
5871 /* Input-reloading is done. Now do output-reloading,
5872 storing the value from the reload-register after the main insn
5873 if reload_out[j] is nonzero.
5874
5875 ??? At some point we need to support handling output reloads of
5876 JUMP_INSNs or insns that set cc0. */
5877 old = reload_out[j];
5878 if (old != 0
5879 && reload_reg_rtx[j] != old
5880 && reload_reg_rtx[j] != 0)
5881 {
5882 register rtx reloadreg = reload_reg_rtx[j];
5883 register rtx second_reloadreg = 0;
5884 rtx note, p;
5885 enum machine_mode mode;
5886 int special = 0;
5887
5888 /* An output operand that dies right away does need a reload,
5889 but need not be copied from it. Show the new location in the
5890 REG_UNUSED note. */
5891 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
5892 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
5893 {
5894 XEXP (note, 0) = reload_reg_rtx[j];
5895 continue;
5896 }
5897 else if (GET_CODE (old) == SCRATCH)
5898 /* If we aren't optimizing, there won't be a REG_UNUSED note,
5899 but we don't want to make an output reload. */
5900 continue;
5901
5902#if 0
5903 /* Strip off of OLD any size-increasing SUBREGs such as
5904 (SUBREG:SI foo:QI 0). */
5905
5906 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
5907 && (GET_MODE_SIZE (GET_MODE (old))
5908 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
5909 old = SUBREG_REG (old);
5910#endif
5911
5912 /* If is a JUMP_INSN, we can't support output reloads yet. */
5913 if (GET_CODE (insn) == JUMP_INSN)
5914 abort ();
5915
5916 push_to_sequence (output_reload_insns[reload_opnum[j]]);
5917
5918 /* Determine the mode to reload in.
5919 See comments above (for input reloading). */
5920
5921 mode = GET_MODE (old);
5922 if (mode == VOIDmode)
5923 {
5924 /* VOIDmode should never happen for an output. */
5925 if (asm_noperands (PATTERN (insn)) < 0)
5926 /* It's the compiler's fault. */
5927 abort ();
5928 error_for_asm (insn, "output operand is constant in `asm'");
5929 /* Prevent crash--use something we know is valid. */
5930 mode = word_mode;
5931 old = gen_rtx (REG, mode, REGNO (reloadreg));
5932 }
5933
5934 if (GET_MODE (reloadreg) != mode)
5935 reloadreg = gen_lowpart_common (mode, reloadreg);
5936
5937#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
5938
5939 /* If we need two reload regs, set RELOADREG to the intermediate
5940 one, since it will be stored into OUT. We might need a secondary
5941 register only for an input reload, so check again here. */
5942
5943 if (reload_secondary_reload[j] >= 0)
5944 {
5945 rtx real_old = old;
5946
5947 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
5948 && reg_equiv_mem[REGNO (old)] != 0)
5949 real_old = reg_equiv_mem[REGNO (old)];
5950
5951 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
5952 mode, real_old)
5953 != NO_REGS))
5954 {
5955 second_reloadreg = reloadreg;
5956 reloadreg = reload_reg_rtx[reload_secondary_reload[j]];
5957
5958 /* See if RELOADREG is to be used as a scratch register
5959 or as an intermediate register. */
5960 if (reload_secondary_icode[j] != CODE_FOR_nothing)
5961 {
5962 emit_insn ((GEN_FCN (reload_secondary_icode[j])
5963 (real_old, second_reloadreg, reloadreg)));
5964 special = 1;
5965 }
5966 else
5967 {
5968 /* See if we need both a scratch and intermediate reload
5969 register. */
5970 int secondary_reload = reload_secondary_reload[j];
5971 enum insn_code tertiary_icode
5972 = reload_secondary_icode[secondary_reload];
5973 rtx pat;
5974
5975 if (GET_MODE (reloadreg) != mode)
5976 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5977
5978 if (tertiary_icode != CODE_FOR_nothing)
5979 {
5980 rtx third_reloadreg
5981 = reload_reg_rtx[reload_secondary_reload[secondary_reload]];
5982 pat = (GEN_FCN (tertiary_icode)
5983 (reloadreg, second_reloadreg, third_reloadreg));
5984 }
5985#ifdef SECONDARY_MEMORY_NEEDED
5986 /* If we need a memory location to do the move, do it that way. */
5987 else if (GET_CODE (reloadreg) == REG
5988 && REGNO (reloadreg) < FIRST_PSEUDO_REGISTER
5989 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (reloadreg)),
5990 REGNO_REG_CLASS (REGNO (second_reloadreg)),
5991 GET_MODE (second_reloadreg)))
5992 {
5993 /* Get the memory to use and rewrite both registers
5994 to its mode. */
5995 rtx loc
5996 = get_secondary_mem (reloadreg,
5997 GET_MODE (second_reloadreg),
5998 reload_opnum[j],
5999 reload_when_needed[j]);
6000 rtx tmp_reloadreg;
6001
6002 if (GET_MODE (loc) != GET_MODE (second_reloadreg))
6003 second_reloadreg = gen_rtx (REG, GET_MODE (loc),
6004 REGNO (second_reloadreg));
6005
6006 if (GET_MODE (loc) != GET_MODE (reloadreg))
6007 tmp_reloadreg = gen_rtx (REG, GET_MODE (loc),
6008 REGNO (reloadreg));
6009 else
6010 tmp_reloadreg = reloadreg;
6011
6012 emit_move_insn (loc, second_reloadreg);
6013 pat = gen_move_insn (tmp_reloadreg, loc);
6014 }
6015#endif
6016 else
6017 pat = gen_move_insn (reloadreg, second_reloadreg);
6018
6019 emit_insn (pat);
6020 }
6021 }
6022 }
6023#endif
6024
6025 /* Output the last reload insn. */
6026 if (! special)
6027 {
6028#ifdef SECONDARY_MEMORY_NEEDED
6029 /* If we need a memory location to do the move, do it that way. */
6030 if (GET_CODE (old) == REG && REGNO (old) < FIRST_PSEUDO_REGISTER
6031 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (old)),
6032 REGNO_REG_CLASS (REGNO (reloadreg)),
6033 GET_MODE (reloadreg)))
6034 {
6035 /* Get the memory to use and rewrite both registers to
6036 its mode. */
6037 rtx loc = get_secondary_mem (old, GET_MODE (reloadreg),
6038 reload_opnum[j],
6039 reload_when_needed[j]);
6040
6041 if (GET_MODE (loc) != GET_MODE (reloadreg))
6042 reloadreg = gen_rtx (REG, GET_MODE (loc),
6043 REGNO (reloadreg));
6044
6045 if (GET_MODE (loc) != GET_MODE (old))
6046 old = gen_rtx (REG, GET_MODE (loc), REGNO (old));
6047
6048 emit_insn (gen_move_insn (loc, reloadreg));
6049 emit_insn (gen_move_insn (old, loc));
6050 }
6051 else
6052#endif
6053 emit_insn (gen_move_insn (old, reloadreg));
6054 }
6055
6056#ifdef PRESERVE_DEATH_INFO_REGNO_P
6057 /* If final will look at death notes for this reg,
6058 put one on the last output-reload insn to use it. Similarly
6059 for any secondary register. */
6060 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6061 for (p = get_last_insn (); p; p = PREV_INSN (p))
6062 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6063 && reg_overlap_mentioned_for_reload_p (reloadreg,
6064 PATTERN (p)))
6065 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6066 reloadreg, REG_NOTES (p));
6067
6068#ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6069 if (! special
6070 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
6071 for (p = get_last_insn (); p; p = PREV_INSN (p))
6072 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6073 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6074 PATTERN (p)))
6075 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6076 second_reloadreg, REG_NOTES (p));
6077#endif
6078#endif
6079 /* Look at all insns we emitted, just to be safe. */
6080 for (p = get_insns (); p; p = NEXT_INSN (p))
6081 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6082 {
6083 /* If this output reload doesn't come from a spill reg,
6084 clear any memory of reloaded copies of the pseudo reg.
6085 If this output reload comes from a spill reg,
6086 reg_has_output_reload will make this do nothing. */
6087 note_stores (PATTERN (p), forget_old_reloads_1);
6088
6089 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p)))
6090 store_insn = p;
6091 }
6092
6093 output_reload_insns[reload_opnum[j]] = get_insns ();
6094 end_sequence ();
6095
6096 }
6097
6098 if (reload_spill_index[j] >= 0)
6099 new_spill_reg_store[reload_spill_index[j]] = store_insn;
6100 }
6101
6102 /* Now write all the insns we made for reloads in the order expected by
6103 the allocation functions. Prior to the insn being reloaded, we write
6104 the following reloads:
6105
6106 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6107
6108 RELOAD_OTHER reloads.
6109
6110 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6111 the RELOAD_FOR_INPUT reload for the operand.
6112
6113 RELOAD_FOR_OPERAND_ADDRESS reloads.
6114
6115 After the insn being reloaded, we write the following:
6116
6117 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6118 the RELOAD_FOR_OUTPUT reload for that operand. */
6119
6120 emit_insns_before (other_input_address_reload_insns, before_insn);
6121 emit_insns_before (other_input_reload_insns, before_insn);
6122
6123 for (j = 0; j < reload_n_operands; j++)
6124 {
6125 emit_insns_before (input_address_reload_insns[j], before_insn);
6126 emit_insns_before (input_reload_insns[j], before_insn);
6127 }
6128
6129 emit_insns_before (operand_reload_insns, before_insn);
6130
6131 for (j = 0; j < reload_n_operands; j++)
6132 {
6133 emit_insns_before (output_address_reload_insns[j], following_insn);
6134 emit_insns_before (output_reload_insns[j], following_insn);
6135 }
6136
6137 /* Move death notes from INSN
6138 to output-operand-address and output reload insns. */
6139#ifdef PRESERVE_DEATH_INFO_REGNO_P
6140 {
6141 rtx insn1;
6142 /* Loop over those insns, last ones first. */
6143 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6144 insn1 = PREV_INSN (insn1))
6145 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6146 {
6147 rtx source = SET_SRC (PATTERN (insn1));
6148 rtx dest = SET_DEST (PATTERN (insn1));
6149
6150 /* The note we will examine next. */
6151 rtx reg_notes = REG_NOTES (insn);
6152 /* The place that pointed to this note. */
6153 rtx *prev_reg_note = &REG_NOTES (insn);
6154
6155 /* If the note is for something used in the source of this
6156 reload insn, or in the output address, move the note. */
6157 while (reg_notes)
6158 {
6159 rtx next_reg_notes = XEXP (reg_notes, 1);
6160 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6161 && GET_CODE (XEXP (reg_notes, 0)) == REG
6162 && ((GET_CODE (dest) != REG
6163 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6164 dest))
6165 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6166 source)))
6167 {
6168 *prev_reg_note = next_reg_notes;
6169 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6170 REG_NOTES (insn1) = reg_notes;
6171 }
6172 else
6173 prev_reg_note = &XEXP (reg_notes, 1);
6174
6175 reg_notes = next_reg_notes;
6176 }
6177 }
6178 }
6179#endif
6180
6181 /* For all the spill regs newly reloaded in this instruction,
6182 record what they were reloaded from, so subsequent instructions
6183 can inherit the reloads.
6184
6185 Update spill_reg_store for the reloads of this insn.
6186 Copy the elements that were updated in the loop above. */
6187
6188 for (j = 0; j < n_reloads; j++)
6189 {
6190 register int r = reload_order[j];
6191 register int i = reload_spill_index[r];
6192
6193 /* I is nonneg if this reload used one of the spill regs.
6194 If reload_reg_rtx[r] is 0, this is an optional reload
6195 that we opted to ignore.
6196
6197 Also ignore reloads that don't reach the end of the insn,
6198 since we will eventually see the one that does. */
6199
6200 if (i >= 0 && reload_reg_rtx[r] != 0
6201 && reload_reg_reaches_end_p (spill_regs[i], reload_opnum[r],
6202 reload_when_needed[r]))
6203 {
6204 /* First, clear out memory of what used to be in this spill reg.
6205 If consecutive registers are used, clear them all. */
6206 int nr
6207 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6208 int k;
6209
6210 for (k = 0; k < nr; k++)
6211 {
6212 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6213 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6214 }
6215
6216 /* Maybe the spill reg contains a copy of reload_out. */
6217 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6218 {
6219 register int nregno = REGNO (reload_out[r]);
6220 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6221 : HARD_REGNO_NREGS (nregno,
6222 GET_MODE (reload_reg_rtx[r])));
6223
6224 spill_reg_store[i] = new_spill_reg_store[i];
6225 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6226
6227 /* If NREGNO is a hard register, it may occupy more than
6228 one register. If it does, say what is in the
6229 rest of the registers assuming that both registers
6230 agree on how many words the object takes. If not,
6231 invalidate the subsequent registers. */
6232
6233 if (nregno < FIRST_PSEUDO_REGISTER)
6234 for (k = 1; k < nnr; k++)
6235 reg_last_reload_reg[nregno + k]
6236 = (nr == nnr ? gen_rtx (REG, word_mode,
6237 REGNO (reload_reg_rtx[r]) + k)
6238 : 0);
6239
6240 /* Now do the inverse operation. */
6241 for (k = 0; k < nr; k++)
6242 {
6243 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6244 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6245 : nregno + k);
6246 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6247 }
6248 }
6249
6250 /* Maybe the spill reg contains a copy of reload_in. Only do
6251 something if there will not be an output reload for
6252 the register being reloaded. */
6253 else if (reload_out[r] == 0
6254 && reload_in[r] != 0
6255 && ((GET_CODE (reload_in[r]) == REG
6256 && ! reg_has_output_reload[REGNO (reload_in[r])]
6257 || (GET_CODE (reload_in_reg[r]) == REG
6258 && ! reg_has_output_reload[REGNO (reload_in_reg[r])]))))
6259 {
6260 register int nregno;
6261 int nnr;
6262
6263 if (GET_CODE (reload_in[r]) == REG)
6264 nregno = REGNO (reload_in[r]);
6265 else
6266 nregno = REGNO (reload_in_reg[r]);
6267
6268 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6269 : HARD_REGNO_NREGS (nregno,
6270 GET_MODE (reload_reg_rtx[r])));
6271
6272 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6273
6274 if (nregno < FIRST_PSEUDO_REGISTER)
6275 for (k = 1; k < nnr; k++)
6276 reg_last_reload_reg[nregno + k]
6277 = (nr == nnr ? gen_rtx (REG, word_mode,
6278 REGNO (reload_reg_rtx[r]) + k)
6279 : 0);
6280
6281 /* Unless we inherited this reload, show we haven't
6282 recently done a store. */
6283 if (! reload_inherited[r])
6284 spill_reg_store[i] = 0;
6285
6286 for (k = 0; k < nr; k++)
6287 {
6288 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6289 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6290 : nregno + k);
6291 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6292 = insn;
6293 }
6294 }
6295 }
6296
6297 /* The following if-statement was #if 0'd in 1.34 (or before...).
6298 It's reenabled in 1.35 because supposedly nothing else
6299 deals with this problem. */
6300
6301 /* If a register gets output-reloaded from a non-spill register,
6302 that invalidates any previous reloaded copy of it.
6303 But forget_old_reloads_1 won't get to see it, because
6304 it thinks only about the original insn. So invalidate it here. */
6305 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6306 {
6307 register int nregno = REGNO (reload_out[r]);
6308 reg_last_reload_reg[nregno] = 0;
6309 }
6310 }
6311}
6312\f
6313/* Emit code to perform an input reload of IN to RELOADREG. IN is from
6314 operand OPNUM with reload type TYPE.
6315
6316 Returns first insn emitted. */
6317
6318rtx
6319gen_input_reload (reloadreg, in, opnum, type)
6320 rtx reloadreg;
6321 rtx in;
6322 int opnum;
6323 enum reload_type type;
6324{
6325 rtx last = get_last_insn ();
6326
6327 /* How to do this reload can get quite tricky. Normally, we are being
6328 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6329 register that didn't get a hard register. In that case we can just
6330 call emit_move_insn.
6331
6332 We can also be asked to reload a PLUS that adds either two registers, or
6333 a register and a constant or MEM, or a MEM and a constant. This can
6334 occur during frame pointer elimination and while reloading addresses.
6335 This case is handled by trying to emit a single insn
6336 to perform the add. If it is not valid, we use a two insn sequence.
6337
6338 Finally, we could be called to handle an 'o' constraint by putting
6339 an address into a register. In that case, we first try to do this
6340 with a named pattern of "reload_load_address". If no such pattern
6341 exists, we just emit a SET insn and hope for the best (it will normally
6342 be valid on machines that use 'o').
6343
6344 This entire process is made complex because reload will never
6345 process the insns we generate here and so we must ensure that
6346 they will fit their constraints and also by the fact that parts of
6347 IN might be being reloaded separately and replaced with spill registers.
6348 Because of this, we are, in some sense, just guessing the right approach
6349 here. The one listed above seems to work.
6350
6351 ??? At some point, this whole thing needs to be rethought. */
6352
6353 if (GET_CODE (in) == PLUS
6354 && ((GET_CODE (XEXP (in, 0)) == REG
6355 && (GET_CODE (XEXP (in, 1)) == REG
6356 || CONSTANT_P (XEXP (in, 1))
6357 || GET_CODE (XEXP (in, 1)) == MEM))
6358 || (GET_CODE (XEXP (in, 0)) == MEM
6359 && CONSTANT_P (XEXP (in, 1)))))
6360 {
6361 /* We need to compute the sum of what is either a register and a
6362 constant, a register and memory, a hard register and a pseudo
6363 register, or memory and a constant and put it into the reload
6364 register. The best possible way of doing this is if the machine
6365 has a three-operand ADD insn that accepts the required operands.
6366
6367 The simplest approach is to try to generate such an insn and see if it
6368 is recognized and matches its constraints. If so, it can be used.
6369
6370 It might be better not to actually emit the insn unless it is valid,
6371 but we need to pass the insn as an operand to `recog' and
6372 `insn_extract' and it is simpler to emit and then delete the insn if
6373 not valid than to dummy things up. */
6374
6375 rtx op0, op1, tem, insn;
6376 int code;
6377
6378 op0 = find_replacement (&XEXP (in, 0));
6379 op1 = find_replacement (&XEXP (in, 1));
6380
6381 /* Since constraint checking is strict, commutativity won't be
6382 checked, so we need to do that here to avoid spurious failure
6383 if the add instruction is two-address and the second operand
6384 of the add is the same as the reload reg, which is frequently
6385 the case. If the insn would be A = B + A, rearrange it so
6386 it will be A = A + B as constrain_operands expects. */
6387
6388 if (GET_CODE (XEXP (in, 1)) == REG
6389 && REGNO (reloadreg) == REGNO (XEXP (in, 1)))
6390 tem = op0, op0 = op1, op1 = tem;
6391
6392 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6393 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
6394
6395 insn = emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
6396 code = recog_memoized (insn);
6397
6398 if (code >= 0)
6399 {
6400 insn_extract (insn);
6401 /* We want constrain operands to treat this insn strictly in
6402 its validity determination, i.e., the way it would after reload
6403 has completed. */
6404 if (constrain_operands (code, 1))
6405 return insn;
6406 }
6407
6408 delete_insns_since (last);
6409
6410 /* If that failed, we must use a conservative two-insn sequence.
6411 use move to copy constant, MEM, or pseudo register to the reload
6412 register since "move" will be able to handle an arbitrary operand,
6413 unlike add which can't, in general. Then add the registers.
6414
6415 If there is another way to do this for a specific machine, a
6416 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6417 we emit below. */
6418
6419 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
6420 || (GET_CODE (op1) == REG
6421 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
6422 tem = op0, op0 = op1, op1 = tem;
6423
6424 emit_insn (gen_move_insn (reloadreg, op0));
6425
6426 /* If OP0 and OP1 are the same, we can use RELOADREG for OP1.
6427 This fixes a problem on the 32K where the stack pointer cannot
6428 be used as an operand of an add insn. */
6429
6430 if (rtx_equal_p (op0, op1))
6431 op1 = reloadreg;
6432
6433 emit_insn (gen_add2_insn (reloadreg, op1));
6434 }
6435
6436#ifdef SECONDARY_MEMORY_NEEDED
6437 /* If we need a memory location to do the move, do it that way. */
6438 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
6439 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
6440 REGNO_REG_CLASS (REGNO (reloadreg)),
6441 GET_MODE (reloadreg)))
6442 {
6443 /* Get the memory to use and rewrite both registers to its mode. */
6444 rtx loc = get_secondary_mem (in, GET_MODE (reloadreg), opnum, type);
6445
6446 if (GET_MODE (loc) != GET_MODE (reloadreg))
6447 reloadreg = gen_rtx (REG, GET_MODE (loc), REGNO (reloadreg));
6448
6449 if (GET_MODE (loc) != GET_MODE (in))
6450 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
6451
6452 emit_insn (gen_move_insn (loc, in));
6453 emit_insn (gen_move_insn (reloadreg, loc));
6454 }
6455#endif
6456
6457 /* If IN is a simple operand, use gen_move_insn. */
6458 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
6459 emit_insn (gen_move_insn (reloadreg, in));
6460
6461#ifdef HAVE_reload_load_address
6462 else if (HAVE_reload_load_address)
6463 emit_insn (gen_reload_load_address (reloadreg, in));
6464#endif
6465
6466 /* Otherwise, just write (set REGLOADREG IN) and hope for the best. */
6467 else
6468 emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
6469
6470 /* Return the first insn emitted.
6471 We can not just return get_last_insn, because there may have
6472 been multiple instructions emitted. Also note that gen_move_insn may
6473 emit more than one insn itself, so we can not assume that there is one
6474 insn emitted per emit_insn_before call. */
6475
6476 return last ? NEXT_INSN (last) : get_insns ();
6477}
6478\f
6479/* Delete a previously made output-reload
6480 whose result we now believe is not needed.
6481 First we double-check.
6482
6483 INSN is the insn now being processed.
6484 OUTPUT_RELOAD_INSN is the insn of the output reload.
6485 J is the reload-number for this insn. */
6486
6487static void
6488delete_output_reload (insn, j, output_reload_insn)
6489 rtx insn;
6490 int j;
6491 rtx output_reload_insn;
6492{
6493 register rtx i1;
6494
6495 /* Get the raw pseudo-register referred to. */
6496
6497 rtx reg = reload_in[j];
6498 while (GET_CODE (reg) == SUBREG)
6499 reg = SUBREG_REG (reg);
6500
6501 /* If the pseudo-reg we are reloading is no longer referenced
6502 anywhere between the store into it and here,
6503 and no jumps or labels intervene, then the value can get
6504 here through the reload reg alone.
6505 Otherwise, give up--return. */
6506 for (i1 = NEXT_INSN (output_reload_insn);
6507 i1 != insn; i1 = NEXT_INSN (i1))
6508 {
6509 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
6510 return;
6511 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
6512 && reg_mentioned_p (reg, PATTERN (i1)))
6513 return;
6514 }
6515
6516 if (cannot_omit_stores[REGNO (reg)])
6517 return;
6518
6519 /* If this insn will store in the pseudo again,
6520 the previous store can be removed. */
6521 if (reload_out[j] == reload_in[j])
6522 delete_insn (output_reload_insn);
6523
6524 /* See if the pseudo reg has been completely replaced
6525 with reload regs. If so, delete the store insn
6526 and forget we had a stack slot for the pseudo. */
6527 else if (reg_n_deaths[REGNO (reg)] == 1
6528 && reg_basic_block[REGNO (reg)] >= 0
6529 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
6530 {
6531 rtx i2;
6532
6533 /* We know that it was used only between here
6534 and the beginning of the current basic block.
6535 (We also know that the last use before INSN was
6536 the output reload we are thinking of deleting, but never mind that.)
6537 Search that range; see if any ref remains. */
6538 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6539 {
6540 rtx set = single_set (i2);
6541
6542 /* Uses which just store in the pseudo don't count,
6543 since if they are the only uses, they are dead. */
6544 if (set != 0 && SET_DEST (set) == reg)
6545 continue;
6546 if (GET_CODE (i2) == CODE_LABEL
6547 || GET_CODE (i2) == JUMP_INSN)
6548 break;
6549 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
6550 && reg_mentioned_p (reg, PATTERN (i2)))
6551 /* Some other ref remains;
6552 we can't do anything. */
6553 return;
6554 }
6555
6556 /* Delete the now-dead stores into this pseudo. */
6557 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6558 {
6559 rtx set = single_set (i2);
6560
6561 if (set != 0 && SET_DEST (set) == reg)
6562 delete_insn (i2);
6563 if (GET_CODE (i2) == CODE_LABEL
6564 || GET_CODE (i2) == JUMP_INSN)
6565 break;
6566 }
6567
6568 /* For the debugging info,
6569 say the pseudo lives in this reload reg. */
6570 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
6571 alter_reg (REGNO (reg), -1);
6572 }
6573}
6574\f
6575/* Output reload-insns to reload VALUE into RELOADREG.
6576 VALUE is an autoincrement or autodecrement RTX whose operand
6577 is a register or memory location;
6578 so reloading involves incrementing that location.
6579
6580 INC_AMOUNT is the number to increment or decrement by (always positive).
6581 This cannot be deduced from VALUE. */
6582
6583static void
6584inc_for_reload (reloadreg, value, inc_amount)
6585 rtx reloadreg;
6586 rtx value;
6587 int inc_amount;
6588{
6589 /* REG or MEM to be copied and incremented. */
6590 rtx incloc = XEXP (value, 0);
6591 /* Nonzero if increment after copying. */
6592 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
6593 rtx last;
6594 rtx inc;
6595 rtx add_insn;
6596 int code;
6597
6598 /* No hard register is equivalent to this register after
6599 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
6600 we could inc/dec that register as well (maybe even using it for
6601 the source), but I'm not sure it's worth worrying about. */
6602 if (GET_CODE (incloc) == REG)
6603 reg_last_reload_reg[REGNO (incloc)] = 0;
6604
6605 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
6606 inc_amount = - inc_amount;
6607
6608 inc = GEN_INT (inc_amount);
6609
6610 /* If this is post-increment, first copy the location to the reload reg. */
6611 if (post)
6612 emit_insn (gen_move_insn (reloadreg, incloc));
6613
6614 /* See if we can directly increment INCLOC. Use a method similar to that
6615 in gen_input_reload. */
6616
6617 last = get_last_insn ();
6618 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
6619 gen_rtx (PLUS, GET_MODE (incloc),
6620 incloc, inc)));
6621
6622 code = recog_memoized (add_insn);
6623 if (code >= 0)
6624 {
6625 insn_extract (add_insn);
6626 if (constrain_operands (code, 1))
6627 {
6628 /* If this is a pre-increment and we have incremented the value
6629 where it lives, copy the incremented value to RELOADREG to
6630 be used as an address. */
6631
6632 if (! post)
6633 emit_insn (gen_move_insn (reloadreg, incloc));
6634
6635 return;
6636 }
6637 }
6638
6639 delete_insns_since (last);
6640
6641 /* If couldn't do the increment directly, must increment in RELOADREG.
6642 The way we do this depends on whether this is pre- or post-increment.
6643 For pre-increment, copy INCLOC to the reload register, increment it
6644 there, then save back. */
6645
6646 if (! post)
6647 {
6648 emit_insn (gen_move_insn (reloadreg, incloc));
6649 emit_insn (gen_add2_insn (reloadreg, inc));
6650 emit_insn (gen_move_insn (incloc, reloadreg));
6651 }
6652 else
6653 {
6654 /* Postincrement.
6655 Because this might be a jump insn or a compare, and because RELOADREG
6656 may not be available after the insn in an input reload, we must do
6657 the incrementation before the insn being reloaded for.
6658
6659 We have already copied INCLOC to RELOADREG. Increment the copy in
6660 RELOADREG, save that back, then decrement RELOADREG so it has
6661 the original value. */
6662
6663 emit_insn (gen_add2_insn (reloadreg, inc));
6664 emit_insn (gen_move_insn (incloc, reloadreg));
6665 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
6666 }
6667
6668 return;
6669}
6670\f
6671/* Return 1 if we are certain that the constraint-string STRING allows
6672 the hard register REG. Return 0 if we can't be sure of this. */
6673
6674static int
6675constraint_accepts_reg_p (string, reg)
6676 char *string;
6677 rtx reg;
6678{
6679 int value = 0;
6680 int regno = true_regnum (reg);
6681 int c;
6682
6683 /* Initialize for first alternative. */
6684 value = 0;
6685 /* Check that each alternative contains `g' or `r'. */
6686 while (1)
6687 switch (c = *string++)
6688 {
6689 case 0:
6690 /* If an alternative lacks `g' or `r', we lose. */
6691 return value;
6692 case ',':
6693 /* If an alternative lacks `g' or `r', we lose. */
6694 if (value == 0)
6695 return 0;
6696 /* Initialize for next alternative. */
6697 value = 0;
6698 break;
6699 case 'g':
6700 case 'r':
6701 /* Any general reg wins for this alternative. */
6702 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
6703 value = 1;
6704 break;
6705 default:
6706 /* Any reg in specified class wins for this alternative. */
6707 {
6708 enum reg_class class = REG_CLASS_FROM_LETTER (c);
6709
6710 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
6711 value = 1;
6712 }
6713 }
6714}
6715\f
6716/* Return the number of places FIND appears within X, but don't count
6717 an occurrence if some SET_DEST is FIND. */
6718
6719static int
6720count_occurrences (x, find)
6721 register rtx x, find;
6722{
6723 register int i, j;
6724 register enum rtx_code code;
6725 register char *format_ptr;
6726 int count;
6727
6728 if (x == find)
6729 return 1;
6730 if (x == 0)
6731 return 0;
6732
6733 code = GET_CODE (x);
6734
6735 switch (code)
6736 {
6737 case REG:
6738 case QUEUED:
6739 case CONST_INT:
6740 case CONST_DOUBLE:
6741 case SYMBOL_REF:
6742 case CODE_LABEL:
6743 case PC:
6744 case CC0:
6745 return 0;
6746
6747 case SET:
6748 if (SET_DEST (x) == find)
6749 return count_occurrences (SET_SRC (x), find);
6750 break;
6751 }
6752
6753 format_ptr = GET_RTX_FORMAT (code);
6754 count = 0;
6755
6756 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6757 {
6758 switch (*format_ptr++)
6759 {
6760 case 'e':
6761 count += count_occurrences (XEXP (x, i), find);
6762 break;
6763
6764 case 'E':
6765 if (XVEC (x, i) != NULL)
6766 {
6767 for (j = 0; j < XVECLEN (x, i); j++)
6768 count += count_occurrences (XVECEXP (x, i, j), find);
6769 }
6770 break;
6771 }
6772 }
6773 return count;
6774}