Updated to libg++ 2.4
[unix-history] / gnu / usr.bin / cc / common / combine.c
CommitLineData
9bf86ebb
PR
1/* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21/* This module is essentially the "combiner" phase of the U. of Arizona
22 Portable Optimizer, but redone to work on our list-structured
23 representation for RTL instead of their string representation.
24
25 The LOG_LINKS of each insn identify the most recent assignment
26 to each REG used in the insn. It is a list of previous insns,
27 each of which contains a SET for a REG that is used in this insn
28 and not used or set in between. LOG_LINKs never cross basic blocks.
29 They were set up by the preceding pass (lifetime analysis).
30
31 We try to combine each pair of insns joined by a logical link.
32 We also try to combine triples of insns A, B and C when
33 C has a link back to B and B has a link back to A.
34
35 LOG_LINKS does not have links for use of the CC0. They don't
36 need to, because the insn that sets the CC0 is always immediately
37 before the insn that tests it. So we always regard a branch
38 insn as having a logical link to the preceding insn. The same is true
39 for an insn explicitly using CC0.
40
41 We check (with use_crosses_set_p) to avoid combining in such a way
42 as to move a computation to a place where its value would be different.
43
44 Combination is done by mathematically substituting the previous
45 insn(s) values for the regs they set into the expressions in
46 the later insns that refer to these regs. If the result is a valid insn
47 for our target machine, according to the machine description,
48 we install it, delete the earlier insns, and update the data flow
49 information (LOG_LINKS and REG_NOTES) for what we did.
50
51 There are a few exceptions where the dataflow information created by
52 flow.c aren't completely updated:
53
54 - reg_live_length is not updated
55 - reg_n_refs is not adjusted in the rare case when a register is
56 no longer required in a computation
57 - there are extremely rare cases (see distribute_regnotes) when a
58 REG_DEAD note is lost
59 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
60 removed because there is no way to know which register it was
61 linking
62
63 To simplify substitution, we combine only when the earlier insn(s)
64 consist of only a single assignment. To simplify updating afterward,
65 we never combine when a subroutine call appears in the middle.
66
67 Since we do not represent assignments to CC0 explicitly except when that
68 is all an insn does, there is no LOG_LINKS entry in an insn that uses
69 the condition code for the insn that set the condition code.
70 Fortunately, these two insns must be consecutive.
71 Therefore, every JUMP_INSN is taken to have an implicit logical link
72 to the preceding insn. This is not quite right, since non-jumps can
73 also use the condition code; but in practice such insns would not
74 combine anyway. */
75
76#include "config.h"
77#include "gvarargs.h"
78#include "rtl.h"
79#include "flags.h"
80#include "regs.h"
81#include "hard-reg-set.h"
82#include "expr.h"
83#include "basic-block.h"
84#include "insn-config.h"
85#include "insn-flags.h"
86#include "insn-codes.h"
87#include "insn-attr.h"
88#include "recog.h"
89#include "real.h"
90#include <stdio.h>
91
92/* It is not safe to use ordinary gen_lowpart in combine.
93 Use gen_lowpart_for_combine instead. See comments there. */
94#define gen_lowpart dont_use_gen_lowpart_you_dummy
95
96/* If byte loads either zero- or sign- extend, define BYTE_LOADS_EXTEND
97 for cases when we don't care which is true. Define LOAD_EXTEND to
98 be ZERO_EXTEND or SIGN_EXTEND, depending on which was defined. */
99
100#ifdef BYTE_LOADS_ZERO_EXTEND
101#define BYTE_LOADS_EXTEND
102#define LOAD_EXTEND ZERO_EXTEND
103#endif
104
105#ifdef BYTE_LOADS_SIGN_EXTEND
106#define BYTE_LOADS_EXTEND
107#define LOAD_EXTEND SIGN_EXTEND
108#endif
109
110/* Number of attempts to combine instructions in this function. */
111
112static int combine_attempts;
113
114/* Number of attempts that got as far as substitution in this function. */
115
116static int combine_merges;
117
118/* Number of instructions combined with added SETs in this function. */
119
120static int combine_extras;
121
122/* Number of instructions combined in this function. */
123
124static int combine_successes;
125
126/* Totals over entire compilation. */
127
128static int total_attempts, total_merges, total_extras, total_successes;
129\f
130/* Vector mapping INSN_UIDs to cuids.
131 The cuids are like uids but increase monotonically always.
132 Combine always uses cuids so that it can compare them.
133 But actually renumbering the uids, which we used to do,
134 proves to be a bad idea because it makes it hard to compare
135 the dumps produced by earlier passes with those from later passes. */
136
137static int *uid_cuid;
138
139/* Get the cuid of an insn. */
140
141#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
142
143/* Maximum register number, which is the size of the tables below. */
144
145static int combine_max_regno;
146
147/* Record last point of death of (hard or pseudo) register n. */
148
149static rtx *reg_last_death;
150
151/* Record last point of modification of (hard or pseudo) register n. */
152
153static rtx *reg_last_set;
154
155/* Record the cuid of the last insn that invalidated memory
156 (anything that writes memory, and subroutine calls, but not pushes). */
157
158static int mem_last_set;
159
160/* Record the cuid of the last CALL_INSN
161 so we can tell whether a potential combination crosses any calls. */
162
163static int last_call_cuid;
164
165/* When `subst' is called, this is the insn that is being modified
166 (by combining in a previous insn). The PATTERN of this insn
167 is still the old pattern partially modified and it should not be
168 looked at, but this may be used to examine the successors of the insn
169 to judge whether a simplification is valid. */
170
171static rtx subst_insn;
172
173/* This is the lowest CUID that `subst' is currently dealing with.
174 get_last_value will not return a value if the register was set at or
175 after this CUID. If not for this mechanism, we could get confused if
176 I2 or I1 in try_combine were an insn that used the old value of a register
177 to obtain a new value. In that case, we might erroneously get the
178 new value of the register when we wanted the old one. */
179
180static int subst_low_cuid;
181
182/* This is the value of undobuf.num_undo when we started processing this
183 substitution. This will prevent gen_rtx_combine from re-used a piece
184 from the previous expression. Doing so can produce circular rtl
185 structures. */
186
187static int previous_num_undos;
188\f
189/* The next group of arrays allows the recording of the last value assigned
190 to (hard or pseudo) register n. We use this information to see if a
191 operation being processed is redundant given a prior operation performed
192 on the register. For example, an `and' with a constant is redundant if
193 all the zero bits are already known to be turned off.
194
195 We use an approach similar to that used by cse, but change it in the
196 following ways:
197
198 (1) We do not want to reinitialize at each label.
199 (2) It is useful, but not critical, to know the actual value assigned
200 to a register. Often just its form is helpful.
201
202 Therefore, we maintain the following arrays:
203
204 reg_last_set_value the last value assigned
205 reg_last_set_label records the value of label_tick when the
206 register was assigned
207 reg_last_set_table_tick records the value of label_tick when a
208 value using the register is assigned
209 reg_last_set_invalid set to non-zero when it is not valid
210 to use the value of this register in some
211 register's value
212
213 To understand the usage of these tables, it is important to understand
214 the distinction between the value in reg_last_set_value being valid
215 and the register being validly contained in some other expression in the
216 table.
217
218 Entry I in reg_last_set_value is valid if it is non-zero, and either
219 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
220
221 Register I may validly appear in any expression returned for the value
222 of another register if reg_n_sets[i] is 1. It may also appear in the
223 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
224 reg_last_set_invalid[j] is zero.
225
226 If an expression is found in the table containing a register which may
227 not validly appear in an expression, the register is replaced by
228 something that won't match, (clobber (const_int 0)).
229
230 reg_last_set_invalid[i] is set non-zero when register I is being assigned
231 to and reg_last_set_table_tick[i] == label_tick. */
232
233/* Record last value assigned to (hard or pseudo) register n. */
234
235static rtx *reg_last_set_value;
236
237/* Record the value of label_tick when the value for register n is placed in
238 reg_last_set_value[n]. */
239
240static int *reg_last_set_label;
241
242/* Record the value of label_tick when an expression involving register n
243 is placed in reg_last_set_value. */
244
245static int *reg_last_set_table_tick;
246
247/* Set non-zero if references to register n in expressions should not be
248 used. */
249
250static char *reg_last_set_invalid;
251
252/* Incremented for each label. */
253
254static int label_tick;
255
256/* Some registers that are set more than once and used in more than one
257 basic block are nevertheless always set in similar ways. For example,
258 a QImode register may be loaded from memory in two places on a machine
259 where byte loads zero extend.
260
261 We record in the following array what we know about the nonzero
262 bits of a register, specifically which bits are known to be zero.
263
264 If an entry is zero, it means that we don't know anything special. */
265
266static unsigned HOST_WIDE_INT *reg_nonzero_bits;
267
268/* Mode used to compute significance in reg_nonzero_bits. It is the largest
269 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
270
271static enum machine_mode nonzero_bits_mode;
272
273/* Nonzero if we know that a register has some leading bits that are always
274 equal to the sign bit. */
275
276static char *reg_sign_bit_copies;
277
278/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
279 It is zero while computing them and after combine has completed. This
280 former test prevents propagating values based on previously set values,
281 which can be incorrect if a variable is modified in a loop. */
282
283static int nonzero_sign_valid;
284
285/* These arrays are maintained in parallel with reg_last_set_value
286 and are used to store the mode in which the register was last set,
287 the bits that were known to be zero when it was last set, and the
288 number of sign bits copies it was known to have when it was last set. */
289
290static enum machine_mode *reg_last_set_mode;
291static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
292static char *reg_last_set_sign_bit_copies;
293\f
294/* Record one modification to rtl structure
295 to be undone by storing old_contents into *where.
296 is_int is 1 if the contents are an int. */
297
298struct undo
299{
300 int is_int;
301 union {rtx rtx; int i;} old_contents;
302 union {rtx *rtx; int *i;} where;
303};
304
305/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
306 num_undo says how many are currently recorded.
307
308 storage is nonzero if we must undo the allocation of new storage.
309 The value of storage is what to pass to obfree.
310
311 other_insn is nonzero if we have modified some other insn in the process
312 of working on subst_insn. It must be verified too. */
313
314#define MAX_UNDO 50
315
316struct undobuf
317{
318 int num_undo;
319 char *storage;
320 struct undo undo[MAX_UNDO];
321 rtx other_insn;
322};
323
324static struct undobuf undobuf;
325
326/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
327 insn. The substitution can be undone by undo_all. If INTO is already
328 set to NEWVAL, do not record this change. Because computing NEWVAL might
329 also call SUBST, we have to compute it before we put anything into
330 the undo table. */
331
332#define SUBST(INTO, NEWVAL) \
333 do { rtx _new = (NEWVAL); \
334 if (undobuf.num_undo < MAX_UNDO) \
335 { \
336 undobuf.undo[undobuf.num_undo].is_int = 0; \
337 undobuf.undo[undobuf.num_undo].where.rtx = &INTO; \
338 undobuf.undo[undobuf.num_undo].old_contents.rtx = INTO; \
339 INTO = _new; \
340 if (undobuf.undo[undobuf.num_undo].old_contents.rtx != INTO) \
341 undobuf.num_undo++; \
342 } \
343 } while (0)
344
345/* Similar to SUBST, but NEWVAL is an int. INTO will normally be an XINT
346 expression.
347 Note that substitution for the value of a CONST_INT is not safe. */
348
349#define SUBST_INT(INTO, NEWVAL) \
350 do { if (undobuf.num_undo < MAX_UNDO) \
351{ \
352 undobuf.undo[undobuf.num_undo].is_int = 1; \
353 undobuf.undo[undobuf.num_undo].where.i = (int *) &INTO; \
354 undobuf.undo[undobuf.num_undo].old_contents.i = INTO; \
355 INTO = NEWVAL; \
356 if (undobuf.undo[undobuf.num_undo].old_contents.i != INTO) \
357 undobuf.num_undo++; \
358 } \
359 } while (0)
360
361/* Number of times the pseudo being substituted for
362 was found and replaced. */
363
364static int n_occurrences;
365
366static void set_nonzero_bits_and_sign_copies ();
367static void setup_incoming_promotions ();
368static void move_deaths ();
369rtx remove_death ();
370static void record_value_for_reg ();
371static void record_dead_and_set_regs ();
372static int use_crosses_set_p ();
373static rtx try_combine ();
374static rtx *find_split_point ();
375static rtx subst ();
376static void undo_all ();
377static int reg_dead_at_p ();
378static rtx expand_compound_operation ();
379static rtx expand_field_assignment ();
380static rtx make_extraction ();
381static int get_pos_from_mask ();
382static rtx force_to_mode ();
383static rtx known_cond ();
384static rtx make_field_assignment ();
385static rtx make_compound_operation ();
386static rtx apply_distributive_law ();
387static rtx simplify_and_const_int ();
388static unsigned HOST_WIDE_INT nonzero_bits ();
389static int num_sign_bit_copies ();
390static int merge_outer_ops ();
391static rtx simplify_shift_const ();
392static int recog_for_combine ();
393static rtx gen_lowpart_for_combine ();
394static rtx gen_rtx_combine ();
395static rtx gen_binary ();
396static rtx gen_unary ();
397static enum rtx_code simplify_comparison ();
398static int reversible_comparison_p ();
399static int get_last_value_validate ();
400static rtx get_last_value ();
401static void distribute_notes ();
402static void distribute_links ();
403\f
404/* Main entry point for combiner. F is the first insn of the function.
405 NREGS is the first unused pseudo-reg number. */
406
407void
408combine_instructions (f, nregs)
409 rtx f;
410 int nregs;
411{
412 register rtx insn, next, prev;
413 register int i;
414 register rtx links, nextlinks;
415
416 combine_attempts = 0;
417 combine_merges = 0;
418 combine_extras = 0;
419 combine_successes = 0;
420 undobuf.num_undo = previous_num_undos = 0;
421
422 combine_max_regno = nregs;
423
424 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
425 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
426 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
427 reg_last_set_table_tick = (int *) alloca (nregs * sizeof (int));
428 reg_last_set_label = (int *) alloca (nregs * sizeof (int));
429 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
430 reg_last_set_mode
431 = (enum machine_mode *) alloca (nregs * sizeof (enum machine_mode));
432 reg_last_set_nonzero_bits
433 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
434 reg_last_set_sign_bit_copies
435 = (char *) alloca (nregs * sizeof (char));
436
437 reg_nonzero_bits
438 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
439 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
440
441 bzero (reg_last_death, nregs * sizeof (rtx));
442 bzero (reg_last_set, nregs * sizeof (rtx));
443 bzero (reg_last_set_value, nregs * sizeof (rtx));
444 bzero (reg_last_set_table_tick, nregs * sizeof (int));
445 bzero (reg_last_set_label, nregs * sizeof (int));
446 bzero (reg_last_set_invalid, nregs * sizeof (char));
447 bzero (reg_last_set_mode, nregs * sizeof (enum machine_mode));
448 bzero (reg_last_set_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
449 bzero (reg_last_set_sign_bit_copies, nregs * sizeof (char));
450 bzero (reg_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
451 bzero (reg_sign_bit_copies, nregs * sizeof (char));
452
453 init_recog_no_volatile ();
454
455 /* Compute maximum uid value so uid_cuid can be allocated. */
456
457 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
458 if (INSN_UID (insn) > i)
459 i = INSN_UID (insn);
460
461 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
462
463 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
464
465 /* Don't use reg_nonzero_bits when computing it. This can cause problems
466 when, for example, we have j <<= 1 in a loop. */
467
468 nonzero_sign_valid = 0;
469
470 /* Compute the mapping from uids to cuids.
471 Cuids are numbers assigned to insns, like uids,
472 except that cuids increase monotonically through the code.
473
474 Scan all SETs and see if we can deduce anything about what
475 bits are known to be zero for some registers and how many copies
476 of the sign bit are known to exist for those registers.
477
478 Also set any known values so that we can use it while searching
479 for what bits are known to be set. */
480
481 label_tick = 1;
482
483 setup_incoming_promotions ();
484
485 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
486 {
487 INSN_CUID (insn) = ++i;
488 subst_low_cuid = i;
489 subst_insn = insn;
490
491 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
492 {
493 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies);
494 record_dead_and_set_regs (insn);
495 }
496
497 if (GET_CODE (insn) == CODE_LABEL)
498 label_tick++;
499 }
500
501 nonzero_sign_valid = 1;
502
503 /* Now scan all the insns in forward order. */
504
505 label_tick = 1;
506 last_call_cuid = 0;
507 mem_last_set = 0;
508 bzero (reg_last_death, nregs * sizeof (rtx));
509 bzero (reg_last_set, nregs * sizeof (rtx));
510 bzero (reg_last_set_value, nregs * sizeof (rtx));
511 bzero (reg_last_set_table_tick, nregs * sizeof (int));
512 bzero (reg_last_set_label, nregs * sizeof (int));
513 bzero (reg_last_set_invalid, nregs * sizeof (char));
514
515 setup_incoming_promotions ();
516
517 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
518 {
519 next = 0;
520
521 if (GET_CODE (insn) == CODE_LABEL)
522 label_tick++;
523
524 else if (GET_CODE (insn) == INSN
525 || GET_CODE (insn) == CALL_INSN
526 || GET_CODE (insn) == JUMP_INSN)
527 {
528 /* Try this insn with each insn it links back to. */
529
530 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
531 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
532 goto retry;
533
534 /* Try each sequence of three linked insns ending with this one. */
535
536 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
537 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
538 nextlinks = XEXP (nextlinks, 1))
539 if ((next = try_combine (insn, XEXP (links, 0),
540 XEXP (nextlinks, 0))) != 0)
541 goto retry;
542
543#ifdef HAVE_cc0
544 /* Try to combine a jump insn that uses CC0
545 with a preceding insn that sets CC0, and maybe with its
546 logical predecessor as well.
547 This is how we make decrement-and-branch insns.
548 We need this special code because data flow connections
549 via CC0 do not get entered in LOG_LINKS. */
550
551 if (GET_CODE (insn) == JUMP_INSN
552 && (prev = prev_nonnote_insn (insn)) != 0
553 && GET_CODE (prev) == INSN
554 && sets_cc0_p (PATTERN (prev)))
555 {
556 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
557 goto retry;
558
559 for (nextlinks = LOG_LINKS (prev); nextlinks;
560 nextlinks = XEXP (nextlinks, 1))
561 if ((next = try_combine (insn, prev,
562 XEXP (nextlinks, 0))) != 0)
563 goto retry;
564 }
565
566 /* Do the same for an insn that explicitly references CC0. */
567 if (GET_CODE (insn) == INSN
568 && (prev = prev_nonnote_insn (insn)) != 0
569 && GET_CODE (prev) == INSN
570 && sets_cc0_p (PATTERN (prev))
571 && GET_CODE (PATTERN (insn)) == SET
572 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
573 {
574 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
575 goto retry;
576
577 for (nextlinks = LOG_LINKS (prev); nextlinks;
578 nextlinks = XEXP (nextlinks, 1))
579 if ((next = try_combine (insn, prev,
580 XEXP (nextlinks, 0))) != 0)
581 goto retry;
582 }
583
584 /* Finally, see if any of the insns that this insn links to
585 explicitly references CC0. If so, try this insn, that insn,
586 and its predecessor if it sets CC0. */
587 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
588 if (GET_CODE (XEXP (links, 0)) == INSN
589 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
590 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
591 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
592 && GET_CODE (prev) == INSN
593 && sets_cc0_p (PATTERN (prev))
594 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
595 goto retry;
596#endif
597
598 /* Try combining an insn with two different insns whose results it
599 uses. */
600 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
601 for (nextlinks = XEXP (links, 1); nextlinks;
602 nextlinks = XEXP (nextlinks, 1))
603 if ((next = try_combine (insn, XEXP (links, 0),
604 XEXP (nextlinks, 0))) != 0)
605 goto retry;
606
607 if (GET_CODE (insn) != NOTE)
608 record_dead_and_set_regs (insn);
609
610 retry:
611 ;
612 }
613 }
614
615 total_attempts += combine_attempts;
616 total_merges += combine_merges;
617 total_extras += combine_extras;
618 total_successes += combine_successes;
619
620 nonzero_sign_valid = 0;
621}
622\f
623/* Set up any promoted values for incoming argument registers. */
624
625static void
626setup_incoming_promotions ()
627{
628#ifdef PROMOTE_FUNCTION_ARGS
629 int regno;
630 rtx reg;
631 enum machine_mode mode;
632 int unsignedp;
633 rtx first = get_insns ();
634
635 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
636 if (FUNCTION_ARG_REGNO_P (regno)
637 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
638 record_value_for_reg (reg, first,
639 gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
640 GET_MODE (reg),
641 gen_rtx (CLOBBER, mode, const0_rtx)));
642#endif
643}
644\f
645/* Called via note_stores. If X is a pseudo that is used in more than
646 one basic block, is narrower that HOST_BITS_PER_WIDE_INT, and is being
647 set, record what bits are known zero. If we are clobbering X,
648 ignore this "set" because the clobbered value won't be used.
649
650 If we are setting only a portion of X and we can't figure out what
651 portion, assume all bits will be used since we don't know what will
652 be happening.
653
654 Similarly, set how many bits of X are known to be copies of the sign bit
655 at all locations in the function. This is the smallest number implied
656 by any set of X. */
657
658static void
659set_nonzero_bits_and_sign_copies (x, set)
660 rtx x;
661 rtx set;
662{
663 int num;
664
665 if (GET_CODE (x) == REG
666 && REGNO (x) >= FIRST_PSEUDO_REGISTER
667 && reg_n_sets[REGNO (x)] > 1
668 && reg_basic_block[REGNO (x)] < 0
669 /* If this register is undefined at the start of the file, we can't
670 say what its contents were. */
671 && ! (basic_block_live_at_start[0][REGNO (x) / REGSET_ELT_BITS]
672 & ((REGSET_ELT_TYPE) 1 << (REGNO (x) % REGSET_ELT_BITS)))
673 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
674 {
675 if (GET_CODE (set) == CLOBBER)
676 {
677 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
678 reg_sign_bit_copies[REGNO (x)] = 0;
679 return;
680 }
681
682 /* If this is a complex assignment, see if we can convert it into a
683 simple assignment. */
684 set = expand_field_assignment (set);
685
686 /* If this is a simple assignment, or we have a paradoxical SUBREG,
687 set what we know about X. */
688
689 if (SET_DEST (set) == x
690 || (GET_CODE (SET_DEST (set)) == SUBREG
691 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
692 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
693 && SUBREG_REG (SET_DEST (set)) == x))
694 {
695 rtx src = SET_SRC (set);
696
697#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
698 /* If X is narrower than a word and SRC is a non-negative
699 constant that would appear negative in the mode of X,
700 sign-extend it for use in reg_nonzero_bits because some
701 machines (maybe most) will actually do the sign-extension
702 and this is the conservative approach.
703
704 ??? For 2.5, try to tighten up the MD files in this regard
705 instead of this kludge. */
706
707 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
708 && GET_CODE (src) == CONST_INT
709 && INTVAL (src) > 0
710 && 0 != (INTVAL (src)
711 & ((HOST_WIDE_INT) 1
712 << GET_MODE_BITSIZE (GET_MODE (x)))))
713 src = GEN_INT (INTVAL (src)
714 | ((HOST_WIDE_INT) (-1)
715 << GET_MODE_BITSIZE (GET_MODE (x))));
716#endif
717
718 reg_nonzero_bits[REGNO (x)]
719 |= nonzero_bits (src, nonzero_bits_mode);
720 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
721 if (reg_sign_bit_copies[REGNO (x)] == 0
722 || reg_sign_bit_copies[REGNO (x)] > num)
723 reg_sign_bit_copies[REGNO (x)] = num;
724 }
725 else
726 {
727 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
728 reg_sign_bit_copies[REGNO (x)] = 0;
729 }
730 }
731}
732\f
733/* See if INSN can be combined into I3. PRED and SUCC are optionally
734 insns that were previously combined into I3 or that will be combined
735 into the merger of INSN and I3.
736
737 Return 0 if the combination is not allowed for any reason.
738
739 If the combination is allowed, *PDEST will be set to the single
740 destination of INSN and *PSRC to the single source, and this function
741 will return 1. */
742
743static int
744can_combine_p (insn, i3, pred, succ, pdest, psrc)
745 rtx insn;
746 rtx i3;
747 rtx pred, succ;
748 rtx *pdest, *psrc;
749{
750 int i;
751 rtx set = 0, src, dest;
752 rtx p, link;
753 int all_adjacent = (succ ? (next_active_insn (insn) == succ
754 && next_active_insn (succ) == i3)
755 : next_active_insn (insn) == i3);
756
757 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
758 or a PARALLEL consisting of such a SET and CLOBBERs.
759
760 If INSN has CLOBBER parallel parts, ignore them for our processing.
761 By definition, these happen during the execution of the insn. When it
762 is merged with another insn, all bets are off. If they are, in fact,
763 needed and aren't also supplied in I3, they may be added by
764 recog_for_combine. Otherwise, it won't match.
765
766 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
767 note.
768
769 Get the source and destination of INSN. If more than one, can't
770 combine. */
771
772 if (GET_CODE (PATTERN (insn)) == SET)
773 set = PATTERN (insn);
774 else if (GET_CODE (PATTERN (insn)) == PARALLEL
775 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
776 {
777 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
778 {
779 rtx elt = XVECEXP (PATTERN (insn), 0, i);
780
781 switch (GET_CODE (elt))
782 {
783 /* We can ignore CLOBBERs. */
784 case CLOBBER:
785 break;
786
787 case SET:
788 /* Ignore SETs whose result isn't used but not those that
789 have side-effects. */
790 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
791 && ! side_effects_p (elt))
792 break;
793
794 /* If we have already found a SET, this is a second one and
795 so we cannot combine with this insn. */
796 if (set)
797 return 0;
798
799 set = elt;
800 break;
801
802 default:
803 /* Anything else means we can't combine. */
804 return 0;
805 }
806 }
807
808 if (set == 0
809 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
810 so don't do anything with it. */
811 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
812 return 0;
813 }
814 else
815 return 0;
816
817 if (set == 0)
818 return 0;
819
820 set = expand_field_assignment (set);
821 src = SET_SRC (set), dest = SET_DEST (set);
822
823 /* Don't eliminate a store in the stack pointer. */
824 if (dest == stack_pointer_rtx
825 /* Don't install a subreg involving two modes not tieable.
826 It can worsen register allocation, and can even make invalid reload
827 insns, since the reg inside may need to be copied from in the
828 outside mode, and that may be invalid if it is an fp reg copied in
829 integer mode. As a special exception, we can allow this if
830 I3 is simply copying DEST, a REG, to CC0. */
831 || (GET_CODE (src) == SUBREG
832 && ! MODES_TIEABLE_P (GET_MODE (src), GET_MODE (SUBREG_REG (src)))
833#ifdef HAVE_cc0
834 && ! (GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
835 && SET_DEST (PATTERN (i3)) == cc0_rtx
836 && GET_CODE (dest) == REG && dest == SET_SRC (PATTERN (i3)))
837#endif
838 )
839 /* If we couldn't eliminate a field assignment, we can't combine. */
840 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
841 /* Don't combine with an insn that sets a register to itself if it has
842 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
843 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
844 /* Can't merge a function call. */
845 || GET_CODE (src) == CALL
846 /* Don't substitute into an incremented register. */
847 || FIND_REG_INC_NOTE (i3, dest)
848 || (succ && FIND_REG_INC_NOTE (succ, dest))
849 /* Don't combine the end of a libcall into anything. */
850 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
851 /* Make sure that DEST is not used after SUCC but before I3. */
852 || (succ && ! all_adjacent
853 && reg_used_between_p (dest, succ, i3))
854 /* Make sure that the value that is to be substituted for the register
855 does not use any registers whose values alter in between. However,
856 If the insns are adjacent, a use can't cross a set even though we
857 think it might (this can happen for a sequence of insns each setting
858 the same destination; reg_last_set of that register might point to
859 a NOTE). Also, don't move a volatile asm or UNSPEC_VOLATILE across
860 any other insns. */
861 || (! all_adjacent
862 && (use_crosses_set_p (src, INSN_CUID (insn))
863 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
864 || GET_CODE (src) == UNSPEC_VOLATILE))
865 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
866 better register allocation by not doing the combine. */
867 || find_reg_note (i3, REG_NO_CONFLICT, dest)
868 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
869 /* Don't combine across a CALL_INSN, because that would possibly
870 change whether the life span of some REGs crosses calls or not,
871 and it is a pain to update that information.
872 Exception: if source is a constant, moving it later can't hurt.
873 Accept that special case, because it helps -fforce-addr a lot. */
874 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
875 return 0;
876
877 /* DEST must either be a REG or CC0. */
878 if (GET_CODE (dest) == REG)
879 {
880 /* If register alignment is being enforced for multi-word items in all
881 cases except for parameters, it is possible to have a register copy
882 insn referencing a hard register that is not allowed to contain the
883 mode being copied and which would not be valid as an operand of most
884 insns. Eliminate this problem by not combining with such an insn.
885
886 Also, on some machines we don't want to extend the life of a hard
887 register. */
888
889 if (GET_CODE (src) == REG
890 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
891 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
892#ifdef SMALL_REGISTER_CLASSES
893 /* Don't extend the life of a hard register. */
894 || REGNO (src) < FIRST_PSEUDO_REGISTER
895#else
896 || (REGNO (src) < FIRST_PSEUDO_REGISTER
897 && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))
898#endif
899 ))
900 return 0;
901 }
902 else if (GET_CODE (dest) != CC0)
903 return 0;
904
905 /* Don't substitute for a register intended as a clobberable operand.
906 Similarly, don't substitute an expression containing a register that
907 will be clobbered in I3. */
908 if (GET_CODE (PATTERN (i3)) == PARALLEL)
909 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
910 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
911 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
912 src)
913 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
914 return 0;
915
916 /* If INSN contains anything volatile, or is an `asm' (whether volatile
917 or not), reject, unless nothing volatile comes between it and I3,
918 with the exception of SUCC. */
919
920 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
921 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
922 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
923 && p != succ && volatile_refs_p (PATTERN (p)))
924 return 0;
925
926 /* If INSN or I2 contains an autoincrement or autodecrement,
927 make sure that register is not used between there and I3,
928 and not already used in I3 either.
929 Also insist that I3 not be a jump; if it were one
930 and the incremented register were spilled, we would lose. */
931
932#ifdef AUTO_INC_DEC
933 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
934 if (REG_NOTE_KIND (link) == REG_INC
935 && (GET_CODE (i3) == JUMP_INSN
936 || reg_used_between_p (XEXP (link, 0), insn, i3)
937 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
938 return 0;
939#endif
940
941#ifdef HAVE_cc0
942 /* Don't combine an insn that follows a CC0-setting insn.
943 An insn that uses CC0 must not be separated from the one that sets it.
944 We do, however, allow I2 to follow a CC0-setting insn if that insn
945 is passed as I1; in that case it will be deleted also.
946 We also allow combining in this case if all the insns are adjacent
947 because that would leave the two CC0 insns adjacent as well.
948 It would be more logical to test whether CC0 occurs inside I1 or I2,
949 but that would be much slower, and this ought to be equivalent. */
950
951 p = prev_nonnote_insn (insn);
952 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
953 && ! all_adjacent)
954 return 0;
955#endif
956
957 /* If we get here, we have passed all the tests and the combination is
958 to be allowed. */
959
960 *pdest = dest;
961 *psrc = src;
962
963 return 1;
964}
965\f
966/* LOC is the location within I3 that contains its pattern or the component
967 of a PARALLEL of the pattern. We validate that it is valid for combining.
968
969 One problem is if I3 modifies its output, as opposed to replacing it
970 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
971 so would produce an insn that is not equivalent to the original insns.
972
973 Consider:
974
975 (set (reg:DI 101) (reg:DI 100))
976 (set (subreg:SI (reg:DI 101) 0) <foo>)
977
978 This is NOT equivalent to:
979
980 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
981 (set (reg:DI 101) (reg:DI 100))])
982
983 Not only does this modify 100 (in which case it might still be valid
984 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
985
986 We can also run into a problem if I2 sets a register that I1
987 uses and I1 gets directly substituted into I3 (not via I2). In that
988 case, we would be getting the wrong value of I2DEST into I3, so we
989 must reject the combination. This case occurs when I2 and I1 both
990 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
991 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
992 of a SET must prevent combination from occurring.
993
994 On machines where SMALL_REGISTER_CLASSES is defined, we don't combine
995 if the destination of a SET is a hard register.
996
997 Before doing the above check, we first try to expand a field assignment
998 into a set of logical operations.
999
1000 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1001 we place a register that is both set and used within I3. If more than one
1002 such register is detected, we fail.
1003
1004 Return 1 if the combination is valid, zero otherwise. */
1005
1006static int
1007combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1008 rtx i3;
1009 rtx *loc;
1010 rtx i2dest;
1011 rtx i1dest;
1012 int i1_not_in_src;
1013 rtx *pi3dest_killed;
1014{
1015 rtx x = *loc;
1016
1017 if (GET_CODE (x) == SET)
1018 {
1019 rtx set = expand_field_assignment (x);
1020 rtx dest = SET_DEST (set);
1021 rtx src = SET_SRC (set);
1022 rtx inner_dest = dest, inner_src = src;
1023
1024 SUBST (*loc, set);
1025
1026 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1027 || GET_CODE (inner_dest) == SUBREG
1028 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1029 inner_dest = XEXP (inner_dest, 0);
1030
1031 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1032 was added. */
1033#if 0
1034 while (GET_CODE (inner_src) == STRICT_LOW_PART
1035 || GET_CODE (inner_src) == SUBREG
1036 || GET_CODE (inner_src) == ZERO_EXTRACT)
1037 inner_src = XEXP (inner_src, 0);
1038
1039 /* If it is better that two different modes keep two different pseudos,
1040 avoid combining them. This avoids producing the following pattern
1041 on a 386:
1042 (set (subreg:SI (reg/v:QI 21) 0)
1043 (lshiftrt:SI (reg/v:SI 20)
1044 (const_int 24)))
1045 If that were made, reload could not handle the pair of
1046 reg 20/21, since it would try to get any GENERAL_REGS
1047 but some of them don't handle QImode. */
1048
1049 if (rtx_equal_p (inner_src, i2dest)
1050 && GET_CODE (inner_dest) == REG
1051 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1052 return 0;
1053#endif
1054
1055 /* Check for the case where I3 modifies its output, as
1056 discussed above. */
1057 if ((inner_dest != dest
1058 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1059 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
1060 /* This is the same test done in can_combine_p except that we
1061 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
1062 CALL operation. */
1063 || (GET_CODE (inner_dest) == REG
1064 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1065#ifdef SMALL_REGISTER_CLASSES
1066 && GET_CODE (src) != CALL
1067#else
1068 && ! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1069 GET_MODE (inner_dest))
1070#endif
1071 )
1072
1073 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1074 return 0;
1075
1076 /* If DEST is used in I3, it is being killed in this insn,
1077 so record that for later.
1078 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1079 STACK_POINTER_REGNUM, since these are always considered to be
1080 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
1081 if (pi3dest_killed && GET_CODE (dest) == REG
1082 && reg_referenced_p (dest, PATTERN (i3))
1083 && REGNO (dest) != FRAME_POINTER_REGNUM
1084#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1085 && (REGNO (dest) != ARG_POINTER_REGNUM
1086 || ! fixed_regs [REGNO (dest)])
1087#endif
1088 && REGNO (dest) != STACK_POINTER_REGNUM)
1089 {
1090 if (*pi3dest_killed)
1091 return 0;
1092
1093 *pi3dest_killed = dest;
1094 }
1095 }
1096
1097 else if (GET_CODE (x) == PARALLEL)
1098 {
1099 int i;
1100
1101 for (i = 0; i < XVECLEN (x, 0); i++)
1102 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1103 i1_not_in_src, pi3dest_killed))
1104 return 0;
1105 }
1106
1107 return 1;
1108}
1109\f
1110/* Try to combine the insns I1 and I2 into I3.
1111 Here I1 and I2 appear earlier than I3.
1112 I1 can be zero; then we combine just I2 into I3.
1113
1114 It we are combining three insns and the resulting insn is not recognized,
1115 try splitting it into two insns. If that happens, I2 and I3 are retained
1116 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1117 are pseudo-deleted.
1118
1119 If we created two insns, return I2; otherwise return I3.
1120 Return 0 if the combination does not work. Then nothing is changed. */
1121
1122static rtx
1123try_combine (i3, i2, i1)
1124 register rtx i3, i2, i1;
1125{
1126 /* New patterns for I3 and I3, respectively. */
1127 rtx newpat, newi2pat = 0;
1128 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1129 int added_sets_1, added_sets_2;
1130 /* Total number of SETs to put into I3. */
1131 int total_sets;
1132 /* Nonzero is I2's body now appears in I3. */
1133 int i2_is_used;
1134 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1135 int insn_code_number, i2_code_number, other_code_number;
1136 /* Contains I3 if the destination of I3 is used in its source, which means
1137 that the old life of I3 is being killed. If that usage is placed into
1138 I2 and not in I3, a REG_DEAD note must be made. */
1139 rtx i3dest_killed = 0;
1140 /* SET_DEST and SET_SRC of I2 and I1. */
1141 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1142 /* PATTERN (I2), or a copy of it in certain cases. */
1143 rtx i2pat;
1144 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
1145 int i2dest_in_i2src, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1146 int i1_feeds_i3 = 0;
1147 /* Notes that must be added to REG_NOTES in I3 and I2. */
1148 rtx new_i3_notes, new_i2_notes;
1149
1150 int maxreg;
1151 rtx temp;
1152 register rtx link;
1153 int i;
1154
1155 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1156 This can occur when flow deletes an insn that it has merged into an
1157 auto-increment address. We also can't do anything if I3 has a
1158 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1159 libcall. */
1160
1161 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1162 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1163 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
1164 || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
1165 return 0;
1166
1167 combine_attempts++;
1168
1169 undobuf.num_undo = previous_num_undos = 0;
1170 undobuf.other_insn = 0;
1171
1172 /* Save the current high-water-mark so we can free storage if we didn't
1173 accept this combination. */
1174 undobuf.storage = (char *) oballoc (0);
1175
1176 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1177 code below, set I1 to be the earlier of the two insns. */
1178 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1179 temp = i1, i1 = i2, i2 = temp;
1180
1181 /* First check for one important special-case that the code below will
1182 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1183 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1184 we may be able to replace that destination with the destination of I3.
1185 This occurs in the common code where we compute both a quotient and
1186 remainder into a structure, in which case we want to do the computation
1187 directly into the structure to avoid register-register copies.
1188
1189 We make very conservative checks below and only try to handle the
1190 most common cases of this. For example, we only handle the case
1191 where I2 and I3 are adjacent to avoid making difficult register
1192 usage tests. */
1193
1194 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1195 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1196 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1197#ifdef SMALL_REGISTER_CLASSES
1198 && (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1199 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER)
1200#endif
1201 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1202 && GET_CODE (PATTERN (i2)) == PARALLEL
1203 && ! side_effects_p (SET_DEST (PATTERN (i3)))
1204 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1205 below would need to check what is inside (and reg_overlap_mentioned_p
1206 doesn't support those codes anyway). Don't allow those destinations;
1207 the resulting insn isn't likely to be recognized anyway. */
1208 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1209 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1210 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1211 SET_DEST (PATTERN (i3)))
1212 && next_real_insn (i2) == i3)
1213 {
1214 rtx p2 = PATTERN (i2);
1215
1216 /* Make sure that the destination of I3,
1217 which we are going to substitute into one output of I2,
1218 is not used within another output of I2. We must avoid making this:
1219 (parallel [(set (mem (reg 69)) ...)
1220 (set (reg 69) ...)])
1221 which is not well-defined as to order of actions.
1222 (Besides, reload can't handle output reloads for this.)
1223
1224 The problem can also happen if the dest of I3 is a memory ref,
1225 if another dest in I2 is an indirect memory ref. */
1226 for (i = 0; i < XVECLEN (p2, 0); i++)
1227 if (GET_CODE (XVECEXP (p2, 0, i)) == SET
1228 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1229 SET_DEST (XVECEXP (p2, 0, i))))
1230 break;
1231
1232 if (i == XVECLEN (p2, 0))
1233 for (i = 0; i < XVECLEN (p2, 0); i++)
1234 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1235 {
1236 combine_merges++;
1237
1238 subst_insn = i3;
1239 subst_low_cuid = INSN_CUID (i2);
1240
1241 added_sets_2 = 0;
1242 i2dest = SET_SRC (PATTERN (i3));
1243
1244 /* Replace the dest in I2 with our dest and make the resulting
1245 insn the new pattern for I3. Then skip to where we
1246 validate the pattern. Everything was set up above. */
1247 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1248 SET_DEST (PATTERN (i3)));
1249
1250 newpat = p2;
1251 goto validate_replacement;
1252 }
1253 }
1254
1255#ifndef HAVE_cc0
1256 /* If we have no I1 and I2 looks like:
1257 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1258 (set Y OP)])
1259 make up a dummy I1 that is
1260 (set Y OP)
1261 and change I2 to be
1262 (set (reg:CC X) (compare:CC Y (const_int 0)))
1263
1264 (We can ignore any trailing CLOBBERs.)
1265
1266 This undoes a previous combination and allows us to match a branch-and-
1267 decrement insn. */
1268
1269 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1270 && XVECLEN (PATTERN (i2), 0) >= 2
1271 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1272 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1273 == MODE_CC)
1274 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1275 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1276 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1277 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1278 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1279 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1280 {
1281 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1282 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1283 break;
1284
1285 if (i == 1)
1286 {
1287 /* We make I1 with the same INSN_UID as I2. This gives it
1288 the same INSN_CUID for value tracking. Our fake I1 will
1289 never appear in the insn stream so giving it the same INSN_UID
1290 as I2 will not cause a problem. */
1291
1292 i1 = gen_rtx (INSN, VOIDmode, INSN_UID (i2), 0, i2,
1293 XVECEXP (PATTERN (i2), 0, 1), -1, 0, 0);
1294
1295 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1296 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1297 SET_DEST (PATTERN (i1)));
1298 }
1299 }
1300#endif
1301
1302 /* Verify that I2 and I1 are valid for combining. */
1303 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1304 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1305 {
1306 undo_all ();
1307 return 0;
1308 }
1309
1310 /* Record whether I2DEST is used in I2SRC and similarly for the other
1311 cases. Knowing this will help in register status updating below. */
1312 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1313 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1314 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1315
1316 /* See if I1 directly feeds into I3. It does if I1DEST is not used
1317 in I2SRC. */
1318 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1319
1320 /* Ensure that I3's pattern can be the destination of combines. */
1321 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1322 i1 && i2dest_in_i1src && i1_feeds_i3,
1323 &i3dest_killed))
1324 {
1325 undo_all ();
1326 return 0;
1327 }
1328
1329 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1330 We used to do this EXCEPT in one case: I3 has a post-inc in an
1331 output operand. However, that exception can give rise to insns like
1332 mov r3,(r3)+
1333 which is a famous insn on the PDP-11 where the value of r3 used as the
1334 source was model-dependent. Avoid this sort of thing. */
1335
1336#if 0
1337 if (!(GET_CODE (PATTERN (i3)) == SET
1338 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1339 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1340 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1341 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1342 /* It's not the exception. */
1343#endif
1344#ifdef AUTO_INC_DEC
1345 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1346 if (REG_NOTE_KIND (link) == REG_INC
1347 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1348 || (i1 != 0
1349 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1350 {
1351 undo_all ();
1352 return 0;
1353 }
1354#endif
1355
1356 /* See if the SETs in I1 or I2 need to be kept around in the merged
1357 instruction: whenever the value set there is still needed past I3.
1358 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1359
1360 For the SET in I1, we have two cases: If I1 and I2 independently
1361 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1362 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1363 in I1 needs to be kept around unless I1DEST dies or is set in either
1364 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1365 I1DEST. If so, we know I1 feeds into I2. */
1366
1367 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1368
1369 added_sets_1
1370 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1371 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1372
1373 /* If the set in I2 needs to be kept around, we must make a copy of
1374 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1375 PATTERN (I2), we are only substituting for the original I1DEST, not into
1376 an already-substituted copy. This also prevents making self-referential
1377 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1378 I2DEST. */
1379
1380 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1381 ? gen_rtx (SET, VOIDmode, i2dest, i2src)
1382 : PATTERN (i2));
1383
1384 if (added_sets_2)
1385 i2pat = copy_rtx (i2pat);
1386
1387 combine_merges++;
1388
1389 /* Substitute in the latest insn for the regs set by the earlier ones. */
1390
1391 maxreg = max_reg_num ();
1392
1393 subst_insn = i3;
1394
1395 /* It is possible that the source of I2 or I1 may be performing an
1396 unneeded operation, such as a ZERO_EXTEND of something that is known
1397 to have the high part zero. Handle that case by letting subst look at
1398 the innermost one of them.
1399
1400 Another way to do this would be to have a function that tries to
1401 simplify a single insn instead of merging two or more insns. We don't
1402 do this because of the potential of infinite loops and because
1403 of the potential extra memory required. However, doing it the way
1404 we are is a bit of a kludge and doesn't catch all cases.
1405
1406 But only do this if -fexpensive-optimizations since it slows things down
1407 and doesn't usually win. */
1408
1409 if (flag_expensive_optimizations)
1410 {
1411 /* Pass pc_rtx so no substitutions are done, just simplifications.
1412 The cases that we are interested in here do not involve the few
1413 cases were is_replaced is checked. */
1414 if (i1)
1415 {
1416 subst_low_cuid = INSN_CUID (i1);
1417 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1418 }
1419 else
1420 {
1421 subst_low_cuid = INSN_CUID (i2);
1422 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1423 }
1424
1425 previous_num_undos = undobuf.num_undo;
1426 }
1427
1428#ifndef HAVE_cc0
1429 /* Many machines that don't use CC0 have insns that can both perform an
1430 arithmetic operation and set the condition code. These operations will
1431 be represented as a PARALLEL with the first element of the vector
1432 being a COMPARE of an arithmetic operation with the constant zero.
1433 The second element of the vector will set some pseudo to the result
1434 of the same arithmetic operation. If we simplify the COMPARE, we won't
1435 match such a pattern and so will generate an extra insn. Here we test
1436 for this case, where both the comparison and the operation result are
1437 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1438 I2SRC. Later we will make the PARALLEL that contains I2. */
1439
1440 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1441 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1442 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1443 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1444 {
1445 rtx *cc_use;
1446 enum machine_mode compare_mode;
1447
1448 newpat = PATTERN (i3);
1449 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1450
1451 i2_is_used = 1;
1452
1453#ifdef EXTRA_CC_MODES
1454 /* See if a COMPARE with the operand we substituted in should be done
1455 with the mode that is currently being used. If not, do the same
1456 processing we do in `subst' for a SET; namely, if the destination
1457 is used only once, try to replace it with a register of the proper
1458 mode and also replace the COMPARE. */
1459 if (undobuf.other_insn == 0
1460 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1461 &undobuf.other_insn))
1462 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1463 i2src, const0_rtx))
1464 != GET_MODE (SET_DEST (newpat))))
1465 {
1466 int regno = REGNO (SET_DEST (newpat));
1467 rtx new_dest = gen_rtx (REG, compare_mode, regno);
1468
1469 if (regno < FIRST_PSEUDO_REGISTER
1470 || (reg_n_sets[regno] == 1 && ! added_sets_2
1471 && ! REG_USERVAR_P (SET_DEST (newpat))))
1472 {
1473 if (regno >= FIRST_PSEUDO_REGISTER)
1474 SUBST (regno_reg_rtx[regno], new_dest);
1475
1476 SUBST (SET_DEST (newpat), new_dest);
1477 SUBST (XEXP (*cc_use, 0), new_dest);
1478 SUBST (SET_SRC (newpat),
1479 gen_rtx_combine (COMPARE, compare_mode,
1480 i2src, const0_rtx));
1481 }
1482 else
1483 undobuf.other_insn = 0;
1484 }
1485#endif
1486 }
1487 else
1488#endif
1489 {
1490 n_occurrences = 0; /* `subst' counts here */
1491
1492 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1493 need to make a unique copy of I2SRC each time we substitute it
1494 to avoid self-referential rtl. */
1495
1496 subst_low_cuid = INSN_CUID (i2);
1497 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1498 ! i1_feeds_i3 && i1dest_in_i1src);
1499 previous_num_undos = undobuf.num_undo;
1500
1501 /* Record whether i2's body now appears within i3's body. */
1502 i2_is_used = n_occurrences;
1503 }
1504
1505 /* If we already got a failure, don't try to do more. Otherwise,
1506 try to substitute in I1 if we have it. */
1507
1508 if (i1 && GET_CODE (newpat) != CLOBBER)
1509 {
1510 /* Before we can do this substitution, we must redo the test done
1511 above (see detailed comments there) that ensures that I1DEST
1512 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1513
1514 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1515 0, NULL_PTR))
1516 {
1517 undo_all ();
1518 return 0;
1519 }
1520
1521 n_occurrences = 0;
1522 subst_low_cuid = INSN_CUID (i1);
1523 newpat = subst (newpat, i1dest, i1src, 0, 0);
1524 previous_num_undos = undobuf.num_undo;
1525 }
1526
1527 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1528 to count all the ways that I2SRC and I1SRC can be used. */
1529 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
1530 && i2_is_used + added_sets_2 > 1)
1531 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
1532 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1533 > 1))
1534 /* Fail if we tried to make a new register (we used to abort, but there's
1535 really no reason to). */
1536 || max_reg_num () != maxreg
1537 /* Fail if we couldn't do something and have a CLOBBER. */
1538 || GET_CODE (newpat) == CLOBBER)
1539 {
1540 undo_all ();
1541 return 0;
1542 }
1543
1544 /* If the actions of the earlier insns must be kept
1545 in addition to substituting them into the latest one,
1546 we must make a new PARALLEL for the latest insn
1547 to hold additional the SETs. */
1548
1549 if (added_sets_1 || added_sets_2)
1550 {
1551 combine_extras++;
1552
1553 if (GET_CODE (newpat) == PARALLEL)
1554 {
1555 rtvec old = XVEC (newpat, 0);
1556 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1557 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1558 bcopy (&old->elem[0], &XVECEXP (newpat, 0, 0),
1559 sizeof (old->elem[0]) * old->num_elem);
1560 }
1561 else
1562 {
1563 rtx old = newpat;
1564 total_sets = 1 + added_sets_1 + added_sets_2;
1565 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1566 XVECEXP (newpat, 0, 0) = old;
1567 }
1568
1569 if (added_sets_1)
1570 XVECEXP (newpat, 0, --total_sets)
1571 = (GET_CODE (PATTERN (i1)) == PARALLEL
1572 ? gen_rtx (SET, VOIDmode, i1dest, i1src) : PATTERN (i1));
1573
1574 if (added_sets_2)
1575 {
1576 /* If there is no I1, use I2's body as is. We used to also not do
1577 the subst call below if I2 was substituted into I3,
1578 but that could lose a simplification. */
1579 if (i1 == 0)
1580 XVECEXP (newpat, 0, --total_sets) = i2pat;
1581 else
1582 /* See comment where i2pat is assigned. */
1583 XVECEXP (newpat, 0, --total_sets)
1584 = subst (i2pat, i1dest, i1src, 0, 0);
1585 }
1586 }
1587
1588 /* We come here when we are replacing a destination in I2 with the
1589 destination of I3. */
1590 validate_replacement:
1591
1592 /* Is the result of combination a valid instruction? */
1593 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1594
1595 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1596 the second SET's destination is a register that is unused. In that case,
1597 we just need the first SET. This can occur when simplifying a divmod
1598 insn. We *must* test for this case here because the code below that
1599 splits two independent SETs doesn't handle this case correctly when it
1600 updates the register status. Also check the case where the first
1601 SET's destination is unused. That would not cause incorrect code, but
1602 does cause an unneeded insn to remain. */
1603
1604 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1605 && XVECLEN (newpat, 0) == 2
1606 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1607 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1608 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1609 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1610 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1611 && asm_noperands (newpat) < 0)
1612 {
1613 newpat = XVECEXP (newpat, 0, 0);
1614 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1615 }
1616
1617 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1618 && XVECLEN (newpat, 0) == 2
1619 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1620 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1621 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1622 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1623 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1624 && asm_noperands (newpat) < 0)
1625 {
1626 newpat = XVECEXP (newpat, 0, 1);
1627 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1628 }
1629
1630 /* See if this is an XOR. If so, perhaps the problem is that the
1631 constant is out of range. Replace it with a complemented XOR with
1632 a complemented constant; it might be in range. */
1633
1634 else if (insn_code_number < 0 && GET_CODE (newpat) == SET
1635 && GET_CODE (SET_SRC (newpat)) == XOR
1636 && GET_CODE (XEXP (SET_SRC (newpat), 1)) == CONST_INT
1637 && ((temp = simplify_unary_operation (NOT,
1638 GET_MODE (SET_SRC (newpat)),
1639 XEXP (SET_SRC (newpat), 1),
1640 GET_MODE (SET_SRC (newpat))))
1641 != 0))
1642 {
1643 enum machine_mode i_mode = GET_MODE (SET_SRC (newpat));
1644 rtx pat
1645 = gen_rtx_combine (SET, VOIDmode, SET_DEST (newpat),
1646 gen_unary (NOT, i_mode,
1647 gen_binary (XOR, i_mode,
1648 XEXP (SET_SRC (newpat), 0),
1649 temp)));
1650
1651 insn_code_number = recog_for_combine (&pat, i3, &new_i3_notes);
1652 if (insn_code_number >= 0)
1653 newpat = pat;
1654 }
1655
1656 /* If we were combining three insns and the result is a simple SET
1657 with no ASM_OPERANDS that wasn't recognized, try to split it into two
1658 insns. There are two ways to do this. It can be split using a
1659 machine-specific method (like when you have an addition of a large
1660 constant) or by combine in the function find_split_point. */
1661
1662 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1663 && asm_noperands (newpat) < 0)
1664 {
1665 rtx m_split, *split;
1666 rtx ni2dest = i2dest;
1667
1668 /* See if the MD file can split NEWPAT. If it can't, see if letting it
1669 use I2DEST as a scratch register will help. In the latter case,
1670 convert I2DEST to the mode of the source of NEWPAT if we can. */
1671
1672 m_split = split_insns (newpat, i3);
1673
1674 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
1675 inputs of NEWPAT. */
1676
1677 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
1678 possible to try that as a scratch reg. This would require adding
1679 more code to make it work though. */
1680
1681 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
1682 {
1683 /* If I2DEST is a hard register or the only use of a pseudo,
1684 we can change its mode. */
1685 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
1686 && GET_MODE (SET_DEST (newpat)) != VOIDmode
1687 && GET_CODE (i2dest) == REG
1688 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1689 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1690 && ! REG_USERVAR_P (i2dest))))
1691 ni2dest = gen_rtx (REG, GET_MODE (SET_DEST (newpat)),
1692 REGNO (i2dest));
1693
1694 m_split = split_insns (gen_rtx (PARALLEL, VOIDmode,
1695 gen_rtvec (2, newpat,
1696 gen_rtx (CLOBBER,
1697 VOIDmode,
1698 ni2dest))),
1699 i3);
1700 }
1701
1702 if (m_split && GET_CODE (m_split) == SEQUENCE
1703 && XVECLEN (m_split, 0) == 2
1704 && (next_real_insn (i2) == i3
1705 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1706 INSN_CUID (i2))))
1707 {
1708 rtx i2set, i3set;
1709 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
1710 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
1711
1712 i3set = single_set (XVECEXP (m_split, 0, 1));
1713 i2set = single_set (XVECEXP (m_split, 0, 0));
1714
1715 /* In case we changed the mode of I2DEST, replace it in the
1716 pseudo-register table here. We can't do it above in case this
1717 code doesn't get executed and we do a split the other way. */
1718
1719 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1720 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1721
1722 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1723
1724 /* If I2 or I3 has multiple SETs, we won't know how to track
1725 register status, so don't use these insns. */
1726
1727 if (i2_code_number >= 0 && i2set && i3set)
1728 insn_code_number = recog_for_combine (&newi3pat, i3,
1729 &new_i3_notes);
1730
1731 if (insn_code_number >= 0)
1732 newpat = newi3pat;
1733
1734 /* It is possible that both insns now set the destination of I3.
1735 If so, we must show an extra use of it. */
1736
1737 if (insn_code_number >= 0 && GET_CODE (SET_DEST (i3set)) == REG
1738 && GET_CODE (SET_DEST (i2set)) == REG
1739 && REGNO (SET_DEST (i3set)) == REGNO (SET_DEST (i2set)))
1740 reg_n_sets[REGNO (SET_DEST (i2set))]++;
1741 }
1742
1743 /* If we can split it and use I2DEST, go ahead and see if that
1744 helps things be recognized. Verify that none of the registers
1745 are set between I2 and I3. */
1746 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
1747#ifdef HAVE_cc0
1748 && GET_CODE (i2dest) == REG
1749#endif
1750 /* We need I2DEST in the proper mode. If it is a hard register
1751 or the only use of a pseudo, we can change its mode. */
1752 && (GET_MODE (*split) == GET_MODE (i2dest)
1753 || GET_MODE (*split) == VOIDmode
1754 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1755 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1756 && ! REG_USERVAR_P (i2dest)))
1757 && (next_real_insn (i2) == i3
1758 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1759 /* We can't overwrite I2DEST if its value is still used by
1760 NEWPAT. */
1761 && ! reg_referenced_p (i2dest, newpat))
1762 {
1763 rtx newdest = i2dest;
1764
1765 /* Get NEWDEST as a register in the proper mode. We have already
1766 validated that we can do this. */
1767 if (GET_MODE (i2dest) != GET_MODE (*split)
1768 && GET_MODE (*split) != VOIDmode)
1769 {
1770 newdest = gen_rtx (REG, GET_MODE (*split), REGNO (i2dest));
1771
1772 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1773 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
1774 }
1775
1776 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1777 an ASHIFT. This can occur if it was inside a PLUS and hence
1778 appeared to be a memory address. This is a kludge. */
1779 if (GET_CODE (*split) == MULT
1780 && GET_CODE (XEXP (*split, 1)) == CONST_INT
1781 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1782 SUBST (*split, gen_rtx_combine (ASHIFT, GET_MODE (*split),
1783 XEXP (*split, 0), GEN_INT (i)));
1784
1785#ifdef INSN_SCHEDULING
1786 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
1787 be written as a ZERO_EXTEND. */
1788 if (GET_CODE (*split) == SUBREG
1789 && GET_CODE (SUBREG_REG (*split)) == MEM)
1790 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, GET_MODE (*split),
1791 XEXP (*split, 0)));
1792#endif
1793
1794 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
1795 SUBST (*split, newdest);
1796 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1797 if (i2_code_number >= 0)
1798 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1799 }
1800 }
1801
1802 /* Check for a case where we loaded from memory in a narrow mode and
1803 then sign extended it, but we need both registers. In that case,
1804 we have a PARALLEL with both loads from the same memory location.
1805 We can split this into a load from memory followed by a register-register
1806 copy. This saves at least one insn, more if register allocation can
1807 eliminate the copy. */
1808
1809 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1810 && GET_CODE (newpat) == PARALLEL
1811 && XVECLEN (newpat, 0) == 2
1812 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1813 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
1814 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1815 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1816 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
1817 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1818 INSN_CUID (i2))
1819 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1820 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1821 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1822 SET_SRC (XVECEXP (newpat, 0, 1)))
1823 && ! find_reg_note (i3, REG_UNUSED,
1824 SET_DEST (XVECEXP (newpat, 0, 0))))
1825 {
1826 rtx ni2dest;
1827
1828 newi2pat = XVECEXP (newpat, 0, 0);
1829 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
1830 newpat = XVECEXP (newpat, 0, 1);
1831 SUBST (SET_SRC (newpat),
1832 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
1833 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1834 if (i2_code_number >= 0)
1835 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1836
1837 if (insn_code_number >= 0)
1838 {
1839 rtx insn;
1840 rtx link;
1841
1842 /* If we will be able to accept this, we have made a change to the
1843 destination of I3. This can invalidate a LOG_LINKS pointing
1844 to I3. No other part of combine.c makes such a transformation.
1845
1846 The new I3 will have a destination that was previously the
1847 destination of I1 or I2 and which was used in i2 or I3. Call
1848 distribute_links to make a LOG_LINK from the next use of
1849 that destination. */
1850
1851 PATTERN (i3) = newpat;
1852 distribute_links (gen_rtx (INSN_LIST, VOIDmode, i3, NULL_RTX));
1853
1854 /* I3 now uses what used to be its destination and which is
1855 now I2's destination. That means we need a LOG_LINK from
1856 I3 to I2. But we used to have one, so we still will.
1857
1858 However, some later insn might be using I2's dest and have
1859 a LOG_LINK pointing at I3. We must remove this link.
1860 The simplest way to remove the link is to point it at I1,
1861 which we know will be a NOTE. */
1862
1863 for (insn = NEXT_INSN (i3);
1864 insn && GET_CODE (insn) != CODE_LABEL
1865 && GET_CODE (PREV_INSN (insn)) != JUMP_INSN;
1866 insn = NEXT_INSN (insn))
1867 {
1868 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1869 && reg_referenced_p (ni2dest, PATTERN (insn)))
1870 {
1871 for (link = LOG_LINKS (insn); link;
1872 link = XEXP (link, 1))
1873 if (XEXP (link, 0) == i3)
1874 XEXP (link, 0) = i1;
1875
1876 break;
1877 }
1878 }
1879 }
1880 }
1881
1882 /* Similarly, check for a case where we have a PARALLEL of two independent
1883 SETs but we started with three insns. In this case, we can do the sets
1884 as two separate insns. This case occurs when some SET allows two
1885 other insns to combine, but the destination of that SET is still live. */
1886
1887 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1888 && GET_CODE (newpat) == PARALLEL
1889 && XVECLEN (newpat, 0) == 2
1890 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1891 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
1892 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
1893 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1894 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1895 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1896 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1897 INSN_CUID (i2))
1898 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
1899 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
1900 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
1901 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1902 XVECEXP (newpat, 0, 0))
1903 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
1904 XVECEXP (newpat, 0, 1)))
1905 {
1906 newi2pat = XVECEXP (newpat, 0, 1);
1907 newpat = XVECEXP (newpat, 0, 0);
1908
1909 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1910 if (i2_code_number >= 0)
1911 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1912 }
1913
1914 /* If it still isn't recognized, fail and change things back the way they
1915 were. */
1916 if ((insn_code_number < 0
1917 /* Is the result a reasonable ASM_OPERANDS? */
1918 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
1919 {
1920 undo_all ();
1921 return 0;
1922 }
1923
1924 /* If we had to change another insn, make sure it is valid also. */
1925 if (undobuf.other_insn)
1926 {
1927 rtx other_notes = REG_NOTES (undobuf.other_insn);
1928 rtx other_pat = PATTERN (undobuf.other_insn);
1929 rtx new_other_notes;
1930 rtx note, next;
1931
1932 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
1933 &new_other_notes);
1934
1935 if (other_code_number < 0 && ! check_asm_operands (other_pat))
1936 {
1937 undo_all ();
1938 return 0;
1939 }
1940
1941 PATTERN (undobuf.other_insn) = other_pat;
1942
1943 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
1944 are still valid. Then add any non-duplicate notes added by
1945 recog_for_combine. */
1946 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
1947 {
1948 next = XEXP (note, 1);
1949
1950 if (REG_NOTE_KIND (note) == REG_UNUSED
1951 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1952 {
1953 if (GET_CODE (XEXP (note, 0)) == REG)
1954 reg_n_deaths[REGNO (XEXP (note, 0))]--;
1955
1956 remove_note (undobuf.other_insn, note);
1957 }
1958 }
1959
1960 for (note = new_other_notes; note; note = XEXP (note, 1))
1961 if (GET_CODE (XEXP (note, 0)) == REG)
1962 reg_n_deaths[REGNO (XEXP (note, 0))]++;
1963
1964 distribute_notes (new_other_notes, undobuf.other_insn,
1965 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
1966 }
1967
1968 /* We now know that we can do this combination. Merge the insns and
1969 update the status of registers and LOG_LINKS. */
1970
1971 {
1972 rtx i3notes, i2notes, i1notes = 0;
1973 rtx i3links, i2links, i1links = 0;
1974 rtx midnotes = 0;
1975 int all_adjacent = (next_real_insn (i2) == i3
1976 && (i1 == 0 || next_real_insn (i1) == i2));
1977 register int regno;
1978 /* Compute which registers we expect to eliminate. */
1979 rtx elim_i2 = (newi2pat || i2dest_in_i2src || i2dest_in_i1src
1980 ? 0 : i2dest);
1981 rtx elim_i1 = i1 == 0 || i1dest_in_i1src ? 0 : i1dest;
1982
1983 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
1984 clear them. */
1985 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
1986 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
1987 if (i1)
1988 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
1989
1990 /* Ensure that we do not have something that should not be shared but
1991 occurs multiple times in the new insns. Check this by first
1992 resetting all the `used' flags and then copying anything is shared. */
1993
1994 reset_used_flags (i3notes);
1995 reset_used_flags (i2notes);
1996 reset_used_flags (i1notes);
1997 reset_used_flags (newpat);
1998 reset_used_flags (newi2pat);
1999 if (undobuf.other_insn)
2000 reset_used_flags (PATTERN (undobuf.other_insn));
2001
2002 i3notes = copy_rtx_if_shared (i3notes);
2003 i2notes = copy_rtx_if_shared (i2notes);
2004 i1notes = copy_rtx_if_shared (i1notes);
2005 newpat = copy_rtx_if_shared (newpat);
2006 newi2pat = copy_rtx_if_shared (newi2pat);
2007 if (undobuf.other_insn)
2008 reset_used_flags (PATTERN (undobuf.other_insn));
2009
2010 INSN_CODE (i3) = insn_code_number;
2011 PATTERN (i3) = newpat;
2012 if (undobuf.other_insn)
2013 INSN_CODE (undobuf.other_insn) = other_code_number;
2014
2015 /* We had one special case above where I2 had more than one set and
2016 we replaced a destination of one of those sets with the destination
2017 of I3. In that case, we have to update LOG_LINKS of insns later
2018 in this basic block. Note that this (expensive) case is rare. */
2019
2020 if (GET_CODE (PATTERN (i2)) == PARALLEL)
2021 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2022 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2023 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2024 && ! find_reg_note (i2, REG_UNUSED,
2025 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2026 {
2027 register rtx insn;
2028
2029 for (insn = NEXT_INSN (i2); insn; insn = NEXT_INSN (insn))
2030 {
2031 if (insn != i3 && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2032 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
2033 if (XEXP (link, 0) == i2)
2034 XEXP (link, 0) = i3;
2035
2036 if (GET_CODE (insn) == CODE_LABEL
2037 || GET_CODE (insn) == JUMP_INSN)
2038 break;
2039 }
2040 }
2041
2042 LOG_LINKS (i3) = 0;
2043 REG_NOTES (i3) = 0;
2044 LOG_LINKS (i2) = 0;
2045 REG_NOTES (i2) = 0;
2046
2047 if (newi2pat)
2048 {
2049 INSN_CODE (i2) = i2_code_number;
2050 PATTERN (i2) = newi2pat;
2051 }
2052 else
2053 {
2054 PUT_CODE (i2, NOTE);
2055 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2056 NOTE_SOURCE_FILE (i2) = 0;
2057 }
2058
2059 if (i1)
2060 {
2061 LOG_LINKS (i1) = 0;
2062 REG_NOTES (i1) = 0;
2063 PUT_CODE (i1, NOTE);
2064 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2065 NOTE_SOURCE_FILE (i1) = 0;
2066 }
2067
2068 /* Get death notes for everything that is now used in either I3 or
2069 I2 and used to die in a previous insn. */
2070
2071 move_deaths (newpat, i1 ? INSN_CUID (i1) : INSN_CUID (i2), i3, &midnotes);
2072 if (newi2pat)
2073 move_deaths (newi2pat, INSN_CUID (i1), i2, &midnotes);
2074
2075 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2076 if (i3notes)
2077 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2078 elim_i2, elim_i1);
2079 if (i2notes)
2080 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2081 elim_i2, elim_i1);
2082 if (i1notes)
2083 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2084 elim_i2, elim_i1);
2085 if (midnotes)
2086 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2087 elim_i2, elim_i1);
2088
2089 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2090 know these are REG_UNUSED and want them to go to the desired insn,
2091 so we always pass it as i3. We have not counted the notes in
2092 reg_n_deaths yet, so we need to do so now. */
2093
2094 if (newi2pat && new_i2_notes)
2095 {
2096 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2097 if (GET_CODE (XEXP (temp, 0)) == REG)
2098 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2099
2100 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2101 }
2102
2103 if (new_i3_notes)
2104 {
2105 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2106 if (GET_CODE (XEXP (temp, 0)) == REG)
2107 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2108
2109 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2110 }
2111
2112 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
2113 put a REG_DEAD note for it somewhere. Similarly for I2 and I1.
2114 Show an additional death due to the REG_DEAD note we make here. If
2115 we discard it in distribute_notes, we will decrement it again. */
2116
2117 if (i3dest_killed)
2118 {
2119 if (GET_CODE (i3dest_killed) == REG)
2120 reg_n_deaths[REGNO (i3dest_killed)]++;
2121
2122 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed,
2123 NULL_RTX),
2124 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2125 NULL_RTX, NULL_RTX);
2126 }
2127
2128 /* For I2 and I1, we have to be careful. If NEWI2PAT exists and sets
2129 I2DEST or I1DEST, the death must be somewhere before I2, not I3. If
2130 we passed I3 in that case, it might delete I2. */
2131
2132 if (i2dest_in_i2src)
2133 {
2134 if (GET_CODE (i2dest) == REG)
2135 reg_n_deaths[REGNO (i2dest)]++;
2136
2137 if (newi2pat && reg_set_p (i2dest, newi2pat))
2138 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2139 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2140 else
2141 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2142 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2143 NULL_RTX, NULL_RTX);
2144 }
2145
2146 if (i1dest_in_i1src)
2147 {
2148 if (GET_CODE (i1dest) == REG)
2149 reg_n_deaths[REGNO (i1dest)]++;
2150
2151 if (newi2pat && reg_set_p (i1dest, newi2pat))
2152 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2153 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2154 else
2155 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2156 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2157 NULL_RTX, NULL_RTX);
2158 }
2159
2160 distribute_links (i3links);
2161 distribute_links (i2links);
2162 distribute_links (i1links);
2163
2164 if (GET_CODE (i2dest) == REG)
2165 {
2166 rtx link;
2167 rtx i2_insn = 0, i2_val = 0, set;
2168
2169 /* The insn that used to set this register doesn't exist, and
2170 this life of the register may not exist either. See if one of
2171 I3's links points to an insn that sets I2DEST. If it does,
2172 that is now the last known value for I2DEST. If we don't update
2173 this and I2 set the register to a value that depended on its old
2174 contents, we will get confused. If this insn is used, thing
2175 will be set correctly in combine_instructions. */
2176
2177 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2178 if ((set = single_set (XEXP (link, 0))) != 0
2179 && rtx_equal_p (i2dest, SET_DEST (set)))
2180 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2181
2182 record_value_for_reg (i2dest, i2_insn, i2_val);
2183
2184 /* If the reg formerly set in I2 died only once and that was in I3,
2185 zero its use count so it won't make `reload' do any work. */
2186 if (! added_sets_2 && newi2pat == 0)
2187 {
2188 regno = REGNO (i2dest);
2189 reg_n_sets[regno]--;
2190 if (reg_n_sets[regno] == 0
2191 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2192 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
2193 reg_n_refs[regno] = 0;
2194 }
2195 }
2196
2197 if (i1 && GET_CODE (i1dest) == REG)
2198 {
2199 rtx link;
2200 rtx i1_insn = 0, i1_val = 0, set;
2201
2202 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2203 if ((set = single_set (XEXP (link, 0))) != 0
2204 && rtx_equal_p (i1dest, SET_DEST (set)))
2205 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2206
2207 record_value_for_reg (i1dest, i1_insn, i1_val);
2208
2209 regno = REGNO (i1dest);
2210 if (! added_sets_1)
2211 {
2212 reg_n_sets[regno]--;
2213 if (reg_n_sets[regno] == 0
2214 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2215 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
2216 reg_n_refs[regno] = 0;
2217 }
2218 }
2219
2220 /* Update reg_nonzero_bits et al for any changes that may have been made
2221 to this insn. */
2222
2223 note_stores (newpat, set_nonzero_bits_and_sign_copies);
2224 if (newi2pat)
2225 note_stores (newi2pat, set_nonzero_bits_and_sign_copies);
2226
2227 /* If I3 is now an unconditional jump, ensure that it has a
2228 BARRIER following it since it may have initially been a
2229 conditional jump. It may also be the last nonnote insn. */
2230
2231 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
2232 && ((temp = next_nonnote_insn (i3)) == NULL_RTX
2233 || GET_CODE (temp) != BARRIER))
2234 emit_barrier_after (i3);
2235 }
2236
2237 combine_successes++;
2238
2239 return newi2pat ? i2 : i3;
2240}
2241\f
2242/* Undo all the modifications recorded in undobuf. */
2243
2244static void
2245undo_all ()
2246{
2247 register int i;
2248 if (undobuf.num_undo > MAX_UNDO)
2249 undobuf.num_undo = MAX_UNDO;
2250 for (i = undobuf.num_undo - 1; i >= 0; i--)
2251 {
2252 if (undobuf.undo[i].is_int)
2253 *undobuf.undo[i].where.i = undobuf.undo[i].old_contents.i;
2254 else
2255 *undobuf.undo[i].where.rtx = undobuf.undo[i].old_contents.rtx;
2256
2257 }
2258
2259 obfree (undobuf.storage);
2260 undobuf.num_undo = 0;
2261}
2262\f
2263/* Find the innermost point within the rtx at LOC, possibly LOC itself,
2264 where we have an arithmetic expression and return that point. LOC will
2265 be inside INSN.
2266
2267 try_combine will call this function to see if an insn can be split into
2268 two insns. */
2269
2270static rtx *
2271find_split_point (loc, insn)
2272 rtx *loc;
2273 rtx insn;
2274{
2275 rtx x = *loc;
2276 enum rtx_code code = GET_CODE (x);
2277 rtx *split;
2278 int len = 0, pos, unsignedp;
2279 rtx inner;
2280
2281 /* First special-case some codes. */
2282 switch (code)
2283 {
2284 case SUBREG:
2285#ifdef INSN_SCHEDULING
2286 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2287 point. */
2288 if (GET_CODE (SUBREG_REG (x)) == MEM)
2289 return loc;
2290#endif
2291 return find_split_point (&SUBREG_REG (x), insn);
2292
2293 case MEM:
2294#ifdef HAVE_lo_sum
2295 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2296 using LO_SUM and HIGH. */
2297 if (GET_CODE (XEXP (x, 0)) == CONST
2298 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2299 {
2300 SUBST (XEXP (x, 0),
2301 gen_rtx_combine (LO_SUM, Pmode,
2302 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2303 XEXP (x, 0)));
2304 return &XEXP (XEXP (x, 0), 0);
2305 }
2306#endif
2307
2308 /* If we have a PLUS whose second operand is a constant and the
2309 address is not valid, perhaps will can split it up using
2310 the machine-specific way to split large constants. We use
2311 the first psuedo-reg (one of the virtual regs) as a placeholder;
2312 it will not remain in the result. */
2313 if (GET_CODE (XEXP (x, 0)) == PLUS
2314 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2315 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2316 {
2317 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2318 rtx seq = split_insns (gen_rtx (SET, VOIDmode, reg, XEXP (x, 0)),
2319 subst_insn);
2320
2321 /* This should have produced two insns, each of which sets our
2322 placeholder. If the source of the second is a valid address,
2323 we can make put both sources together and make a split point
2324 in the middle. */
2325
2326 if (seq && XVECLEN (seq, 0) == 2
2327 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2328 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2329 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2330 && ! reg_mentioned_p (reg,
2331 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2332 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2333 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2334 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2335 && memory_address_p (GET_MODE (x),
2336 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2337 {
2338 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2339 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2340
2341 /* Replace the placeholder in SRC2 with SRC1. If we can
2342 find where in SRC2 it was placed, that can become our
2343 split point and we can replace this address with SRC2.
2344 Just try two obvious places. */
2345
2346 src2 = replace_rtx (src2, reg, src1);
2347 split = 0;
2348 if (XEXP (src2, 0) == src1)
2349 split = &XEXP (src2, 0);
2350 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2351 && XEXP (XEXP (src2, 0), 0) == src1)
2352 split = &XEXP (XEXP (src2, 0), 0);
2353
2354 if (split)
2355 {
2356 SUBST (XEXP (x, 0), src2);
2357 return split;
2358 }
2359 }
2360
2361 /* If that didn't work, perhaps the first operand is complex and
2362 needs to be computed separately, so make a split point there.
2363 This will occur on machines that just support REG + CONST
2364 and have a constant moved through some previous computation. */
2365
2366 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2367 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2368 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2369 == 'o')))
2370 return &XEXP (XEXP (x, 0), 0);
2371 }
2372 break;
2373
2374 case SET:
2375#ifdef HAVE_cc0
2376 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2377 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2378 we need to put the operand into a register. So split at that
2379 point. */
2380
2381 if (SET_DEST (x) == cc0_rtx
2382 && GET_CODE (SET_SRC (x)) != COMPARE
2383 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2384 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2385 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2386 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2387 return &SET_SRC (x);
2388#endif
2389
2390 /* See if we can split SET_SRC as it stands. */
2391 split = find_split_point (&SET_SRC (x), insn);
2392 if (split && split != &SET_SRC (x))
2393 return split;
2394
2395 /* See if this is a bitfield assignment with everything constant. If
2396 so, this is an IOR of an AND, so split it into that. */
2397 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2398 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2399 <= HOST_BITS_PER_WIDE_INT)
2400 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2401 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2402 && GET_CODE (SET_SRC (x)) == CONST_INT
2403 && ((INTVAL (XEXP (SET_DEST (x), 1))
2404 + INTVAL (XEXP (SET_DEST (x), 2)))
2405 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2406 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2407 {
2408 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2409 int len = INTVAL (XEXP (SET_DEST (x), 1));
2410 int src = INTVAL (SET_SRC (x));
2411 rtx dest = XEXP (SET_DEST (x), 0);
2412 enum machine_mode mode = GET_MODE (dest);
2413 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
2414
2415#if BITS_BIG_ENDIAN
2416 pos = GET_MODE_BITSIZE (mode) - len - pos;
2417#endif
2418
2419 if (src == mask)
2420 SUBST (SET_SRC (x),
2421 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
2422 else
2423 SUBST (SET_SRC (x),
2424 gen_binary (IOR, mode,
2425 gen_binary (AND, mode, dest,
2426 GEN_INT (~ (mask << pos)
2427 & GET_MODE_MASK (mode))),
2428 GEN_INT (src << pos)));
2429
2430 SUBST (SET_DEST (x), dest);
2431
2432 split = find_split_point (&SET_SRC (x), insn);
2433 if (split && split != &SET_SRC (x))
2434 return split;
2435 }
2436
2437 /* Otherwise, see if this is an operation that we can split into two.
2438 If so, try to split that. */
2439 code = GET_CODE (SET_SRC (x));
2440
2441 switch (code)
2442 {
2443 case AND:
2444 /* If we are AND'ing with a large constant that is only a single
2445 bit and the result is only being used in a context where we
2446 need to know if it is zero or non-zero, replace it with a bit
2447 extraction. This will avoid the large constant, which might
2448 have taken more than one insn to make. If the constant were
2449 not a valid argument to the AND but took only one insn to make,
2450 this is no worse, but if it took more than one insn, it will
2451 be better. */
2452
2453 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2454 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2455 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2456 && GET_CODE (SET_DEST (x)) == REG
2457 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2458 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2459 && XEXP (*split, 0) == SET_DEST (x)
2460 && XEXP (*split, 1) == const0_rtx)
2461 {
2462 SUBST (SET_SRC (x),
2463 make_extraction (GET_MODE (SET_DEST (x)),
2464 XEXP (SET_SRC (x), 0),
2465 pos, NULL_RTX, 1, 1, 0, 0));
2466 return find_split_point (loc, insn);
2467 }
2468 break;
2469
2470 case SIGN_EXTEND:
2471 inner = XEXP (SET_SRC (x), 0);
2472 pos = 0;
2473 len = GET_MODE_BITSIZE (GET_MODE (inner));
2474 unsignedp = 0;
2475 break;
2476
2477 case SIGN_EXTRACT:
2478 case ZERO_EXTRACT:
2479 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2480 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2481 {
2482 inner = XEXP (SET_SRC (x), 0);
2483 len = INTVAL (XEXP (SET_SRC (x), 1));
2484 pos = INTVAL (XEXP (SET_SRC (x), 2));
2485
2486#if BITS_BIG_ENDIAN
2487 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
2488#endif
2489 unsignedp = (code == ZERO_EXTRACT);
2490 }
2491 break;
2492 }
2493
2494 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2495 {
2496 enum machine_mode mode = GET_MODE (SET_SRC (x));
2497
2498 /* For unsigned, we have a choice of a shift followed by an
2499 AND or two shifts. Use two shifts for field sizes where the
2500 constant might be too large. We assume here that we can
2501 always at least get 8-bit constants in an AND insn, which is
2502 true for every current RISC. */
2503
2504 if (unsignedp && len <= 8)
2505 {
2506 SUBST (SET_SRC (x),
2507 gen_rtx_combine
2508 (AND, mode,
2509 gen_rtx_combine (LSHIFTRT, mode,
2510 gen_lowpart_for_combine (mode, inner),
2511 GEN_INT (pos)),
2512 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
2513
2514 split = find_split_point (&SET_SRC (x), insn);
2515 if (split && split != &SET_SRC (x))
2516 return split;
2517 }
2518 else
2519 {
2520 SUBST (SET_SRC (x),
2521 gen_rtx_combine
2522 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
2523 gen_rtx_combine (ASHIFT, mode,
2524 gen_lowpart_for_combine (mode, inner),
2525 GEN_INT (GET_MODE_BITSIZE (mode)
2526 - len - pos)),
2527 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
2528
2529 split = find_split_point (&SET_SRC (x), insn);
2530 if (split && split != &SET_SRC (x))
2531 return split;
2532 }
2533 }
2534
2535 /* See if this is a simple operation with a constant as the second
2536 operand. It might be that this constant is out of range and hence
2537 could be used as a split point. */
2538 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2539 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2540 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2541 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2542 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2543 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2544 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2545 == 'o'))))
2546 return &XEXP (SET_SRC (x), 1);
2547
2548 /* Finally, see if this is a simple operation with its first operand
2549 not in a register. The operation might require this operand in a
2550 register, so return it as a split point. We can always do this
2551 because if the first operand were another operation, we would have
2552 already found it as a split point. */
2553 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2554 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2555 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2556 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2557 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2558 return &XEXP (SET_SRC (x), 0);
2559
2560 return 0;
2561
2562 case AND:
2563 case IOR:
2564 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2565 it is better to write this as (not (ior A B)) so we can split it.
2566 Similarly for IOR. */
2567 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2568 {
2569 SUBST (*loc,
2570 gen_rtx_combine (NOT, GET_MODE (x),
2571 gen_rtx_combine (code == IOR ? AND : IOR,
2572 GET_MODE (x),
2573 XEXP (XEXP (x, 0), 0),
2574 XEXP (XEXP (x, 1), 0))));
2575 return find_split_point (loc, insn);
2576 }
2577
2578 /* Many RISC machines have a large set of logical insns. If the
2579 second operand is a NOT, put it first so we will try to split the
2580 other operand first. */
2581 if (GET_CODE (XEXP (x, 1)) == NOT)
2582 {
2583 rtx tem = XEXP (x, 0);
2584 SUBST (XEXP (x, 0), XEXP (x, 1));
2585 SUBST (XEXP (x, 1), tem);
2586 }
2587 break;
2588 }
2589
2590 /* Otherwise, select our actions depending on our rtx class. */
2591 switch (GET_RTX_CLASS (code))
2592 {
2593 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2594 case '3':
2595 split = find_split_point (&XEXP (x, 2), insn);
2596 if (split)
2597 return split;
2598 /* ... fall through ... */
2599 case '2':
2600 case 'c':
2601 case '<':
2602 split = find_split_point (&XEXP (x, 1), insn);
2603 if (split)
2604 return split;
2605 /* ... fall through ... */
2606 case '1':
2607 /* Some machines have (and (shift ...) ...) insns. If X is not
2608 an AND, but XEXP (X, 0) is, use it as our split point. */
2609 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2610 return &XEXP (x, 0);
2611
2612 split = find_split_point (&XEXP (x, 0), insn);
2613 if (split)
2614 return split;
2615 return loc;
2616 }
2617
2618 /* Otherwise, we don't have a split point. */
2619 return 0;
2620}
2621\f
2622/* Throughout X, replace FROM with TO, and return the result.
2623 The result is TO if X is FROM;
2624 otherwise the result is X, but its contents may have been modified.
2625 If they were modified, a record was made in undobuf so that
2626 undo_all will (among other things) return X to its original state.
2627
2628 If the number of changes necessary is too much to record to undo,
2629 the excess changes are not made, so the result is invalid.
2630 The changes already made can still be undone.
2631 undobuf.num_undo is incremented for such changes, so by testing that
2632 the caller can tell whether the result is valid.
2633
2634 `n_occurrences' is incremented each time FROM is replaced.
2635
2636 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2637
2638 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
2639 by copying if `n_occurrences' is non-zero. */
2640
2641static rtx
2642subst (x, from, to, in_dest, unique_copy)
2643 register rtx x, from, to;
2644 int in_dest;
2645 int unique_copy;
2646{
2647 register char *fmt;
2648 register int len, i;
2649 register enum rtx_code code = GET_CODE (x), orig_code = code;
2650 rtx temp;
2651 enum machine_mode mode = GET_MODE (x);
2652 enum machine_mode op0_mode = VOIDmode;
2653 rtx other_insn;
2654 rtx *cc_use;
2655 int n_restarts = 0;
2656
2657/* FAKE_EXTEND_SAFE_P (MODE, FROM) is 1 if (subreg:MODE FROM 0) is a safe
2658 replacement for (zero_extend:MODE FROM) or (sign_extend:MODE FROM).
2659 If it is 0, that cannot be done. We can now do this for any MEM
2660 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be reloaded.
2661 If not for that, MEM's would very rarely be safe. */
2662
2663/* Reject MODEs bigger than a word, because we might not be able
2664 to reference a two-register group starting with an arbitrary register
2665 (and currently gen_lowpart might crash for a SUBREG). */
2666
2667#define FAKE_EXTEND_SAFE_P(MODE, FROM) \
2668 (GET_MODE_SIZE (MODE) <= UNITS_PER_WORD)
2669
2670/* Two expressions are equal if they are identical copies of a shared
2671 RTX or if they are both registers with the same register number
2672 and mode. */
2673
2674#define COMBINE_RTX_EQUAL_P(X,Y) \
2675 ((X) == (Y) \
2676 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
2677 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
2678
2679 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
2680 {
2681 n_occurrences++;
2682 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
2683 }
2684
2685 /* If X and FROM are the same register but different modes, they will
2686 not have been seen as equal above. However, flow.c will make a
2687 LOG_LINKS entry for that case. If we do nothing, we will try to
2688 rerecognize our original insn and, when it succeeds, we will
2689 delete the feeding insn, which is incorrect.
2690
2691 So force this insn not to match in this (rare) case. */
2692 if (! in_dest && code == REG && GET_CODE (from) == REG
2693 && REGNO (x) == REGNO (from))
2694 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
2695
2696 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
2697 of which may contain things that can be combined. */
2698 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
2699 return x;
2700
2701 /* It is possible to have a subexpression appear twice in the insn.
2702 Suppose that FROM is a register that appears within TO.
2703 Then, after that subexpression has been scanned once by `subst',
2704 the second time it is scanned, TO may be found. If we were
2705 to scan TO here, we would find FROM within it and create a
2706 self-referent rtl structure which is completely wrong. */
2707 if (COMBINE_RTX_EQUAL_P (x, to))
2708 return to;
2709
2710 len = GET_RTX_LENGTH (code);
2711 fmt = GET_RTX_FORMAT (code);
2712
2713 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
2714 set up to skip this common case. All other cases where we want to
2715 suppress replacing something inside a SET_SRC are handled via the
2716 IN_DEST operand. */
2717 if (code == SET
2718 && (GET_CODE (SET_DEST (x)) == REG
2719 || GET_CODE (SET_DEST (x)) == CC0
2720 || GET_CODE (SET_DEST (x)) == PC))
2721 fmt = "ie";
2722
2723 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a constant. */
2724 if (fmt[0] == 'e')
2725 op0_mode = GET_MODE (XEXP (x, 0));
2726
2727 for (i = 0; i < len; i++)
2728 {
2729 if (fmt[i] == 'E')
2730 {
2731 register int j;
2732 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2733 {
2734 register rtx new;
2735 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
2736 {
2737 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2738 n_occurrences++;
2739 }
2740 else
2741 {
2742 new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy);
2743
2744 /* If this substitution failed, this whole thing fails. */
2745 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2746 return new;
2747 }
2748
2749 SUBST (XVECEXP (x, i, j), new);
2750 }
2751 }
2752 else if (fmt[i] == 'e')
2753 {
2754 register rtx new;
2755
2756 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
2757 {
2758 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2759 n_occurrences++;
2760 }
2761 else
2762 /* If we are in a SET_DEST, suppress most cases unless we
2763 have gone inside a MEM, in which case we want to
2764 simplify the address. We assume here that things that
2765 are actually part of the destination have their inner
2766 parts in the first expression. This is true for SUBREG,
2767 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
2768 things aside from REG and MEM that should appear in a
2769 SET_DEST. */
2770 new = subst (XEXP (x, i), from, to,
2771 (((in_dest
2772 && (code == SUBREG || code == STRICT_LOW_PART
2773 || code == ZERO_EXTRACT))
2774 || code == SET)
2775 && i == 0), unique_copy);
2776
2777 /* If we found that we will have to reject this combination,
2778 indicate that by returning the CLOBBER ourselves, rather than
2779 an expression containing it. This will speed things up as
2780 well as prevent accidents where two CLOBBERs are considered
2781 to be equal, thus producing an incorrect simplification. */
2782
2783 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2784 return new;
2785
2786 SUBST (XEXP (x, i), new);
2787 }
2788 }
2789
2790 /* We come back to here if we have replaced the expression with one of
2791 a different code and it is likely that further simplification will be
2792 possible. */
2793
2794 restart:
2795
2796 /* If we have restarted more than 4 times, we are probably looping, so
2797 give up. */
2798 if (++n_restarts > 4)
2799 return x;
2800
2801 /* If we are restarting at all, it means that we no longer know the
2802 original mode of operand 0 (since we have probably changed the
2803 form of X). */
2804
2805 if (n_restarts > 1)
2806 op0_mode = VOIDmode;
2807
2808 code = GET_CODE (x);
2809
2810 /* If this is a commutative operation, put a constant last and a complex
2811 expression first. We don't need to do this for comparisons here. */
2812 if (GET_RTX_CLASS (code) == 'c'
2813 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2814 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
2815 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
2816 || (GET_CODE (XEXP (x, 0)) == SUBREG
2817 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
2818 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
2819 {
2820 temp = XEXP (x, 0);
2821 SUBST (XEXP (x, 0), XEXP (x, 1));
2822 SUBST (XEXP (x, 1), temp);
2823 }
2824
2825 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
2826 sign extension of a PLUS with a constant, reverse the order of the sign
2827 extension and the addition. Note that this not the same as the original
2828 code, but overflow is undefined for signed values. Also note that the
2829 PLUS will have been partially moved "inside" the sign-extension, so that
2830 the first operand of X will really look like:
2831 (ashiftrt (plus (ashift A C4) C5) C4).
2832 We convert this to
2833 (plus (ashiftrt (ashift A C4) C2) C4)
2834 and replace the first operand of X with that expression. Later parts
2835 of this function may simplify the expression further.
2836
2837 For example, if we start with (mult (sign_extend (plus A C1)) C2),
2838 we swap the SIGN_EXTEND and PLUS. Later code will apply the
2839 distributive law to produce (plus (mult (sign_extend X) C1) C3).
2840
2841 We do this to simplify address expressions. */
2842
2843 if ((code == PLUS || code == MINUS || code == MULT)
2844 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
2845 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
2846 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
2847 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
2848 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2849 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
2850 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
2851 && (temp = simplify_binary_operation (ASHIFTRT, mode,
2852 XEXP (XEXP (XEXP (x, 0), 0), 1),
2853 XEXP (XEXP (x, 0), 1))) != 0)
2854 {
2855 rtx new
2856 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
2857 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
2858 INTVAL (XEXP (XEXP (x, 0), 1)));
2859
2860 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
2861 INTVAL (XEXP (XEXP (x, 0), 1)));
2862
2863 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
2864 }
2865
2866 /* If this is a simple operation applied to an IF_THEN_ELSE, try
2867 applying it to the arms of the IF_THEN_ELSE. This often simplifies
2868 things. Don't deal with operations that change modes here. */
2869
2870 if ((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
2871 && GET_CODE (XEXP (x, 0)) == IF_THEN_ELSE)
2872 {
2873 /* Don't do this by using SUBST inside X since we might be messing
2874 up a shared expression. */
2875 rtx cond = XEXP (XEXP (x, 0), 0);
2876 rtx t_arm = subst (gen_binary (code, mode, XEXP (XEXP (x, 0), 1),
2877 XEXP (x, 1)),
2878 pc_rtx, pc_rtx, 0, 0);
2879 rtx f_arm = subst (gen_binary (code, mode, XEXP (XEXP (x, 0), 2),
2880 XEXP (x, 1)),
2881 pc_rtx, pc_rtx, 0, 0);
2882
2883
2884 x = gen_rtx (IF_THEN_ELSE, mode, cond, t_arm, f_arm);
2885 goto restart;
2886 }
2887
2888 else if (GET_RTX_CLASS (code) == '1'
2889 && GET_CODE (XEXP (x, 0)) == IF_THEN_ELSE
2890 && GET_MODE (XEXP (x, 0)) == mode)
2891 {
2892 rtx cond = XEXP (XEXP (x, 0), 0);
2893 rtx t_arm = subst (gen_unary (code, mode, XEXP (XEXP (x, 0), 1)),
2894 pc_rtx, pc_rtx, 0, 0);
2895 rtx f_arm = subst (gen_unary (code, mode, XEXP (XEXP (x, 0), 2)),
2896 pc_rtx, pc_rtx, 0, 0);
2897
2898 x = gen_rtx_combine (IF_THEN_ELSE, mode, cond, t_arm, f_arm);
2899 goto restart;
2900 }
2901
2902 /* Try to fold this expression in case we have constants that weren't
2903 present before. */
2904 temp = 0;
2905 switch (GET_RTX_CLASS (code))
2906 {
2907 case '1':
2908 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
2909 break;
2910 case '<':
2911 temp = simplify_relational_operation (code, op0_mode,
2912 XEXP (x, 0), XEXP (x, 1));
2913#ifdef FLOAT_STORE_FLAG_VALUE
2914 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2915 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
2916 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
2917#endif
2918 break;
2919 case 'c':
2920 case '2':
2921 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
2922 break;
2923 case 'b':
2924 case '3':
2925 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
2926 XEXP (x, 1), XEXP (x, 2));
2927 break;
2928 }
2929
2930 if (temp)
2931 x = temp, code = GET_CODE (temp);
2932
2933 /* First see if we can apply the inverse distributive law. */
2934 if (code == PLUS || code == MINUS || code == IOR || code == XOR)
2935 {
2936 x = apply_distributive_law (x);
2937 code = GET_CODE (x);
2938 }
2939
2940 /* If CODE is an associative operation not otherwise handled, see if we
2941 can associate some operands. This can win if they are constants or
2942 if they are logically related (i.e. (a & b) & a. */
2943 if ((code == PLUS || code == MINUS
2944 || code == MULT || code == AND || code == IOR || code == XOR
2945 || code == DIV || code == UDIV
2946 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
2947 && GET_MODE_CLASS (mode) == MODE_INT)
2948 {
2949 if (GET_CODE (XEXP (x, 0)) == code)
2950 {
2951 rtx other = XEXP (XEXP (x, 0), 0);
2952 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
2953 rtx inner_op1 = XEXP (x, 1);
2954 rtx inner;
2955
2956 /* Make sure we pass the constant operand if any as the second
2957 one if this is a commutative operation. */
2958 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
2959 {
2960 rtx tem = inner_op0;
2961 inner_op0 = inner_op1;
2962 inner_op1 = tem;
2963 }
2964 inner = simplify_binary_operation (code == MINUS ? PLUS
2965 : code == DIV ? MULT
2966 : code == UDIV ? MULT
2967 : code,
2968 mode, inner_op0, inner_op1);
2969
2970 /* For commutative operations, try the other pair if that one
2971 didn't simplify. */
2972 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
2973 {
2974 other = XEXP (XEXP (x, 0), 1);
2975 inner = simplify_binary_operation (code, mode,
2976 XEXP (XEXP (x, 0), 0),
2977 XEXP (x, 1));
2978 }
2979
2980 if (inner)
2981 {
2982 x = gen_binary (code, mode, other, inner);
2983 goto restart;
2984
2985 }
2986 }
2987 }
2988
2989 /* A little bit of algebraic simplification here. */
2990 switch (code)
2991 {
2992 case MEM:
2993 /* Ensure that our address has any ASHIFTs converted to MULT in case
2994 address-recognizing predicates are called later. */
2995 temp = make_compound_operation (XEXP (x, 0), MEM);
2996 SUBST (XEXP (x, 0), temp);
2997 break;
2998
2999 case SUBREG:
3000 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
3001 is paradoxical. If we can't do that safely, then it becomes
3002 something nonsensical so that this combination won't take place. */
3003
3004 if (GET_CODE (SUBREG_REG (x)) == MEM
3005 && (GET_MODE_SIZE (mode)
3006 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3007 {
3008 rtx inner = SUBREG_REG (x);
3009 int endian_offset = 0;
3010 /* Don't change the mode of the MEM
3011 if that would change the meaning of the address. */
3012 if (MEM_VOLATILE_P (SUBREG_REG (x))
3013 || mode_dependent_address_p (XEXP (inner, 0)))
3014 return gen_rtx (CLOBBER, mode, const0_rtx);
3015
3016#if BYTES_BIG_ENDIAN
3017 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3018 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
3019 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
3020 endian_offset -= UNITS_PER_WORD - GET_MODE_SIZE (GET_MODE (inner));
3021#endif
3022 /* Note if the plus_constant doesn't make a valid address
3023 then this combination won't be accepted. */
3024 x = gen_rtx (MEM, mode,
3025 plus_constant (XEXP (inner, 0),
3026 (SUBREG_WORD (x) * UNITS_PER_WORD
3027 + endian_offset)));
3028 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
3029 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
3030 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
3031 return x;
3032 }
3033
3034 /* If we are in a SET_DEST, these other cases can't apply. */
3035 if (in_dest)
3036 return x;
3037
3038 /* Changing mode twice with SUBREG => just change it once,
3039 or not at all if changing back to starting mode. */
3040 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
3041 {
3042 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
3043 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
3044 return SUBREG_REG (SUBREG_REG (x));
3045
3046 SUBST_INT (SUBREG_WORD (x),
3047 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
3048 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
3049 }
3050
3051 /* SUBREG of a hard register => just change the register number
3052 and/or mode. If the hard register is not valid in that mode,
3053 suppress this combination. If the hard register is the stack,
3054 frame, or argument pointer, leave this as a SUBREG. */
3055
3056 if (GET_CODE (SUBREG_REG (x)) == REG
3057 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
3058 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
3059#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3060 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
3061#endif
3062 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
3063 {
3064 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3065 mode))
3066 return gen_rtx (REG, mode,
3067 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
3068 else
3069 return gen_rtx (CLOBBER, mode, const0_rtx);
3070 }
3071
3072 /* For a constant, try to pick up the part we want. Handle a full
3073 word and low-order part. Only do this if we are narrowing
3074 the constant; if it is being widened, we have no idea what
3075 the extra bits will have been set to. */
3076
3077 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3078 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3079 && GET_MODE_SIZE (op0_mode) < UNITS_PER_WORD
3080 && GET_MODE_CLASS (mode) == MODE_INT)
3081 {
3082 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
3083 0, op0_mode);
3084 if (temp)
3085 return temp;
3086 }
3087
3088 /* If we want a subreg of a constant, at offset 0,
3089 take the low bits. On a little-endian machine, that's
3090 always valid. On a big-endian machine, it's valid
3091 only if the constant's mode fits in one word. */
3092 if (CONSTANT_P (SUBREG_REG (x)) && subreg_lowpart_p (x)
3093 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (op0_mode)
3094#if WORDS_BIG_ENDIAN
3095 && GET_MODE_BITSIZE (op0_mode) <= BITS_PER_WORD
3096#endif
3097 )
3098 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3099
3100 /* If we are narrowing the object, we need to see if we can simplify
3101 the expression for the object knowing that we only need the
3102 low-order bits. */
3103
3104 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
3105 && subreg_lowpart_p (x))
3106 return force_to_mode (SUBREG_REG (x), mode, GET_MODE_BITSIZE (mode),
3107 NULL_RTX);
3108 break;
3109
3110 case NOT:
3111 /* (not (plus X -1)) can become (neg X). */
3112 if (GET_CODE (XEXP (x, 0)) == PLUS
3113 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
3114 {
3115 x = gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
3116 goto restart;
3117 }
3118
3119 /* Similarly, (not (neg X)) is (plus X -1). */
3120 if (GET_CODE (XEXP (x, 0)) == NEG)
3121 {
3122 x = gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3123 goto restart;
3124 }
3125
3126 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
3127 if (GET_CODE (XEXP (x, 0)) == XOR
3128 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3129 && (temp = simplify_unary_operation (NOT, mode,
3130 XEXP (XEXP (x, 0), 1),
3131 mode)) != 0)
3132 {
3133 SUBST (XEXP (XEXP (x, 0), 1), temp);
3134 return XEXP (x, 0);
3135 }
3136
3137 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3138 other than 1, but that is not valid. We could do a similar
3139 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3140 but this doesn't seem common enough to bother with. */
3141 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3142 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3143 {
3144 x = gen_rtx (ROTATE, mode, gen_unary (NOT, mode, const1_rtx),
3145 XEXP (XEXP (x, 0), 1));
3146 goto restart;
3147 }
3148
3149 if (GET_CODE (XEXP (x, 0)) == SUBREG
3150 && subreg_lowpart_p (XEXP (x, 0))
3151 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3152 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3153 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3154 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3155 {
3156 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3157
3158 x = gen_rtx (ROTATE, inner_mode,
3159 gen_unary (NOT, inner_mode, const1_rtx),
3160 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
3161 x = gen_lowpart_for_combine (mode, x);
3162 goto restart;
3163 }
3164
3165#if STORE_FLAG_VALUE == -1
3166 /* (not (comparison foo bar)) can be done by reversing the comparison
3167 code if valid. */
3168 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3169 && reversible_comparison_p (XEXP (x, 0)))
3170 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3171 mode, XEXP (XEXP (x, 0), 0),
3172 XEXP (XEXP (x, 0), 1));
3173
3174 /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
3175 is (lt foo (const_int 0)), so we can perform the above
3176 simplification. */
3177
3178 if (XEXP (x, 1) == const1_rtx
3179 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3180 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3181 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3182 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
3183#endif
3184
3185 /* Apply De Morgan's laws to reduce number of patterns for machines
3186 with negating logical insns (and-not, nand, etc.). If result has
3187 only one NOT, put it first, since that is how the patterns are
3188 coded. */
3189
3190 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3191 {
3192 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3193
3194 if (GET_CODE (in1) == NOT)
3195 in1 = XEXP (in1, 0);
3196 else
3197 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3198
3199 if (GET_CODE (in2) == NOT)
3200 in2 = XEXP (in2, 0);
3201 else if (GET_CODE (in2) == CONST_INT
3202 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3203 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
3204 else
3205 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3206
3207 if (GET_CODE (in2) == NOT)
3208 {
3209 rtx tem = in2;
3210 in2 = in1; in1 = tem;
3211 }
3212
3213 x = gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3214 mode, in1, in2);
3215 goto restart;
3216 }
3217 break;
3218
3219 case NEG:
3220 /* (neg (plus X 1)) can become (not X). */
3221 if (GET_CODE (XEXP (x, 0)) == PLUS
3222 && XEXP (XEXP (x, 0), 1) == const1_rtx)
3223 {
3224 x = gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
3225 goto restart;
3226 }
3227
3228 /* Similarly, (neg (not X)) is (plus X 1). */
3229 if (GET_CODE (XEXP (x, 0)) == NOT)
3230 {
3231 x = gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), const1_rtx);
3232 goto restart;
3233 }
3234
3235 /* (neg (minus X Y)) can become (minus Y X). */
3236 if (GET_CODE (XEXP (x, 0)) == MINUS
3237 && (GET_MODE_CLASS (mode) != MODE_FLOAT
3238 /* x-y != -(y-x) with IEEE floating point. */
3239 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT))
3240 {
3241 x = gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3242 XEXP (XEXP (x, 0), 0));
3243 goto restart;
3244 }
3245
3246 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
3247 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
3248 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
3249 {
3250 x = gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3251 goto restart;
3252 }
3253
3254 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3255 if we can then eliminate the NEG (e.g.,
3256 if the operand is a constant). */
3257
3258 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3259 {
3260 temp = simplify_unary_operation (NEG, mode,
3261 XEXP (XEXP (x, 0), 0), mode);
3262 if (temp)
3263 {
3264 SUBST (XEXP (XEXP (x, 0), 0), temp);
3265 return XEXP (x, 0);
3266 }
3267 }
3268
3269 temp = expand_compound_operation (XEXP (x, 0));
3270
3271 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3272 replaced by (lshiftrt X C). This will convert
3273 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3274
3275 if (GET_CODE (temp) == ASHIFTRT
3276 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3277 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
3278 {
3279 x = simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3280 INTVAL (XEXP (temp, 1)));
3281 goto restart;
3282 }
3283
3284 /* If X has only a single bit that might be nonzero, say, bit I, convert
3285 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3286 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3287 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3288 or a SUBREG of one since we'd be making the expression more
3289 complex if it was just a register. */
3290
3291 if (GET_CODE (temp) != REG
3292 && ! (GET_CODE (temp) == SUBREG
3293 && GET_CODE (SUBREG_REG (temp)) == REG)
3294 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
3295 {
3296 rtx temp1 = simplify_shift_const
3297 (NULL_RTX, ASHIFTRT, mode,
3298 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
3299 GET_MODE_BITSIZE (mode) - 1 - i),
3300 GET_MODE_BITSIZE (mode) - 1 - i);
3301
3302 /* If all we did was surround TEMP with the two shifts, we
3303 haven't improved anything, so don't use it. Otherwise,
3304 we are better off with TEMP1. */
3305 if (GET_CODE (temp1) != ASHIFTRT
3306 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3307 || XEXP (XEXP (temp1, 0), 0) != temp)
3308 {
3309 x = temp1;
3310 goto restart;
3311 }
3312 }
3313 break;
3314
3315 case FLOAT_TRUNCATE:
3316 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3317 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3318 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3319 return XEXP (XEXP (x, 0), 0);
3320 break;
3321
3322#ifdef HAVE_cc0
3323 case COMPARE:
3324 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3325 using cc0, in which case we want to leave it as a COMPARE
3326 so we can distinguish it from a register-register-copy. */
3327 if (XEXP (x, 1) == const0_rtx)
3328 return XEXP (x, 0);
3329
3330 /* In IEEE floating point, x-0 is not the same as x. */
3331 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3332 || GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_INT)
3333 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3334 return XEXP (x, 0);
3335 break;
3336#endif
3337
3338 case CONST:
3339 /* (const (const X)) can become (const X). Do it this way rather than
3340 returning the inner CONST since CONST can be shared with a
3341 REG_EQUAL note. */
3342 if (GET_CODE (XEXP (x, 0)) == CONST)
3343 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3344 break;
3345
3346#ifdef HAVE_lo_sum
3347 case LO_SUM:
3348 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3349 can add in an offset. find_split_point will split this address up
3350 again if it doesn't match. */
3351 if (GET_CODE (XEXP (x, 0)) == HIGH
3352 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3353 return XEXP (x, 1);
3354 break;
3355#endif
3356
3357 case PLUS:
3358 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3359 outermost. That's because that's the way indexed addresses are
3360 supposed to appear. This code used to check many more cases, but
3361 they are now checked elsewhere. */
3362 if (GET_CODE (XEXP (x, 0)) == PLUS
3363 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3364 return gen_binary (PLUS, mode,
3365 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3366 XEXP (x, 1)),
3367 XEXP (XEXP (x, 0), 1));
3368
3369 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3370 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3371 bit-field and can be replaced by either a sign_extend or a
3372 sign_extract. The `and' may be a zero_extend. */
3373 if (GET_CODE (XEXP (x, 0)) == XOR
3374 && GET_CODE (XEXP (x, 1)) == CONST_INT
3375 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3376 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3377 && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
3378 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3379 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3380 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3381 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
3382 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
3383 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3384 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3385 == i + 1))))
3386 {
3387 x = simplify_shift_const
3388 (NULL_RTX, ASHIFTRT, mode,
3389 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3390 XEXP (XEXP (XEXP (x, 0), 0), 0),
3391 GET_MODE_BITSIZE (mode) - (i + 1)),
3392 GET_MODE_BITSIZE (mode) - (i + 1));
3393 goto restart;
3394 }
3395
3396 /* If only the low-order bit of X is possible nonzero, (plus x -1)
3397 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3398 the bitsize of the mode - 1. This allows simplification of
3399 "a = (b & 8) == 0;" */
3400 if (XEXP (x, 1) == constm1_rtx
3401 && GET_CODE (XEXP (x, 0)) != REG
3402 && ! (GET_CODE (XEXP (x,0)) == SUBREG
3403 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
3404 && nonzero_bits (XEXP (x, 0), mode) == 1)
3405 {
3406 x = simplify_shift_const
3407 (NULL_RTX, ASHIFTRT, mode,
3408 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3409 gen_rtx_combine (XOR, mode,
3410 XEXP (x, 0), const1_rtx),
3411 GET_MODE_BITSIZE (mode) - 1),
3412 GET_MODE_BITSIZE (mode) - 1);
3413 goto restart;
3414 }
3415
3416 /* If we are adding two things that have no bits in common, convert
3417 the addition into an IOR. This will often be further simplified,
3418 for example in cases like ((a & 1) + (a & 2)), which can
3419 become a & 3. */
3420
3421 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3422 && (nonzero_bits (XEXP (x, 0), mode)
3423 & nonzero_bits (XEXP (x, 1), mode)) == 0)
3424 {
3425 x = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
3426 goto restart;
3427 }
3428 break;
3429
3430 case MINUS:
3431 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3432 (and <foo> (const_int pow2-1)) */
3433 if (GET_CODE (XEXP (x, 1)) == AND
3434 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3435 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
3436 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
3437 {
3438 x = simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
3439 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
3440 goto restart;
3441 }
3442 break;
3443
3444 case MULT:
3445 /* If we have (mult (plus A B) C), apply the distributive law and then
3446 the inverse distributive law to see if things simplify. This
3447 occurs mostly in addresses, often when unrolling loops. */
3448
3449 if (GET_CODE (XEXP (x, 0)) == PLUS)
3450 {
3451 x = apply_distributive_law
3452 (gen_binary (PLUS, mode,
3453 gen_binary (MULT, mode,
3454 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3455 gen_binary (MULT, mode,
3456 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3457
3458 if (GET_CODE (x) != MULT)
3459 goto restart;
3460 }
3461
3462 /* If this is multiplication by a power of two and its first operand is
3463 a shift, treat the multiply as a shift to allow the shifts to
3464 possibly combine. */
3465 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3466 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3467 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3468 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3469 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3470 || GET_CODE (XEXP (x, 0)) == ROTATE
3471 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3472 {
3473 x = simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0), i);
3474 goto restart;
3475 }
3476
3477 /* Convert (mult (ashift (const_int 1) A) B) to (ashift B A). */
3478 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3479 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3480 return gen_rtx_combine (ASHIFT, mode, XEXP (x, 1),
3481 XEXP (XEXP (x, 0), 1));
3482 break;
3483
3484 case UDIV:
3485 /* If this is a divide by a power of two, treat it as a shift if
3486 its first operand is a shift. */
3487 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3488 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3489 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3490 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3491 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3492 || GET_CODE (XEXP (x, 0)) == ROTATE
3493 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3494 {
3495 x = simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
3496 goto restart;
3497 }
3498 break;
3499
3500 case EQ: case NE:
3501 case GT: case GTU: case GE: case GEU:
3502 case LT: case LTU: case LE: case LEU:
3503 /* If the first operand is a condition code, we can't do anything
3504 with it. */
3505 if (GET_CODE (XEXP (x, 0)) == COMPARE
3506 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
3507#ifdef HAVE_cc0
3508 && XEXP (x, 0) != cc0_rtx
3509#endif
3510 ))
3511 {
3512 rtx op0 = XEXP (x, 0);
3513 rtx op1 = XEXP (x, 1);
3514 enum rtx_code new_code;
3515
3516 if (GET_CODE (op0) == COMPARE)
3517 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
3518
3519 /* Simplify our comparison, if possible. */
3520 new_code = simplify_comparison (code, &op0, &op1);
3521
3522#if STORE_FLAG_VALUE == 1
3523 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
3524 if only the low-order bit is possibly nonzero in X (such as when
3525 X is a ZERO_EXTRACT of one bit. Similarly, we can convert
3526 EQ to (xor X 1). Remove any ZERO_EXTRACT we made when thinking
3527 this was a comparison. It may now be simpler to use, e.g., an
3528 AND. If a ZERO_EXTRACT is indeed appropriate, it will
3529 be placed back by the call to make_compound_operation in the
3530 SET case. */
3531 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3532 && op1 == const0_rtx
3533 && nonzero_bits (op0, GET_MODE (op0)) == 1)
3534 return gen_lowpart_for_combine (mode,
3535 expand_compound_operation (op0));
3536 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3537 && op1 == const0_rtx
3538 && nonzero_bits (op0, GET_MODE (op0)) == 1)
3539 {
3540 op0 = expand_compound_operation (op0);
3541
3542 x = gen_rtx_combine (XOR, mode,
3543 gen_lowpart_for_combine (mode, op0),
3544 const1_rtx);
3545 goto restart;
3546 }
3547#endif
3548
3549#if STORE_FLAG_VALUE == -1
3550 /* If STORE_FLAG_VALUE is -1, we can convert (ne x 0)
3551 to (neg x) if only the low-order bit of X can be nonzero.
3552 This converts (ne (zero_extract X 1 Y) 0) to
3553 (sign_extract X 1 Y). */
3554 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3555 && op1 == const0_rtx
3556 && nonzero_bits (op0, GET_MODE (op0)) == 1)
3557 {
3558 op0 = expand_compound_operation (op0);
3559 x = gen_rtx_combine (NEG, mode,
3560 gen_lowpart_for_combine (mode, op0));
3561 goto restart;
3562 }
3563#endif
3564
3565 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
3566 one bit that might be nonzero, we can convert (ne x 0) to
3567 (ashift x c) where C puts the bit in the sign bit. Remove any
3568 AND with STORE_FLAG_VALUE when we are done, since we are only
3569 going to test the sign bit. */
3570 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3571 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3572 && (STORE_FLAG_VALUE
3573 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
3574 && op1 == const0_rtx
3575 && mode == GET_MODE (op0)
3576 && (i = exact_log2 (nonzero_bits (op0, GET_MODE (op0)))) >= 0)
3577 {
3578 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3579 expand_compound_operation (op0),
3580 GET_MODE_BITSIZE (mode) - 1 - i);
3581 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
3582 return XEXP (x, 0);
3583 else
3584 return x;
3585 }
3586
3587 /* If the code changed, return a whole new comparison. */
3588 if (new_code != code)
3589 return gen_rtx_combine (new_code, mode, op0, op1);
3590
3591 /* Otherwise, keep this operation, but maybe change its operands.
3592 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
3593 SUBST (XEXP (x, 0), op0);
3594 SUBST (XEXP (x, 1), op1);
3595 }
3596 break;
3597
3598 case IF_THEN_ELSE:
3599 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register
3600 used in it is being compared against certain values. Get the
3601 true and false comparisons and see if that says anything about the
3602 value of each arm. */
3603
3604 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3605 && reversible_comparison_p (XEXP (x, 0))
3606 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
3607 {
3608 HOST_WIDE_INT nzb;
3609 rtx from = XEXP (XEXP (x, 0), 0);
3610 enum rtx_code true_code = GET_CODE (XEXP (x, 0));
3611 enum rtx_code false_code = reverse_condition (true_code);
3612 rtx true_val = XEXP (XEXP (x, 0), 1);
3613 rtx false_val = true_val;
3614 rtx true_arm = XEXP (x, 1);
3615 rtx false_arm = XEXP (x, 2);
3616 int swapped = 0;
3617
3618 /* If FALSE_CODE is EQ, swap the codes and arms. */
3619
3620 if (false_code == EQ)
3621 {
3622 swapped = 1, true_code = EQ, false_code = NE;
3623 true_arm = XEXP (x, 2), false_arm = XEXP (x, 1);
3624 }
3625
3626 /* If we are comparing against zero and the expression being tested
3627 has only a single bit that might be nonzero, that is its value
3628 when it is not equal to zero. Similarly if it is known to be
3629 -1 or 0. */
3630
3631 if (true_code == EQ && true_val == const0_rtx
3632 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
3633 false_code = EQ, false_val = GEN_INT (nzb);
3634 else if (true_code == EQ && true_val == const0_rtx
3635 && (num_sign_bit_copies (from, GET_MODE (from))
3636 == GET_MODE_BITSIZE (GET_MODE (from))))
3637 false_code = EQ, false_val = constm1_rtx;
3638
3639 /* Now simplify an arm if we know the value of the register
3640 in the branch and it is used in the arm. Be carefull due to
3641 the potential of locally-shared RTL. */
3642
3643 if (reg_mentioned_p (from, true_arm))
3644 true_arm = subst (known_cond (copy_rtx (true_arm), true_code,
3645 from, true_val),
3646 pc_rtx, pc_rtx, 0, 0);
3647 if (reg_mentioned_p (from, false_arm))
3648 false_arm = subst (known_cond (copy_rtx (false_arm), false_code,
3649 from, false_val),
3650 pc_rtx, pc_rtx, 0, 0);
3651
3652 SUBST (XEXP (x, 1), swapped ? false_arm : true_arm);
3653 SUBST (XEXP (x, 2), swapped ? true_arm : false_arm);
3654 }
3655
3656 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
3657 reversed, do so to avoid needing two sets of patterns for
3658 subtract-and-branch insns. Similarly if we have a constant in that
3659 position or if the third operand is the same as the first operand
3660 of the comparison. */
3661
3662 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3663 && reversible_comparison_p (XEXP (x, 0))
3664 && (XEXP (x, 1) == pc_rtx || GET_CODE (XEXP (x, 1)) == CONST_INT
3665 || rtx_equal_p (XEXP (x, 2), XEXP (XEXP (x, 0), 0))))
3666 {
3667 SUBST (XEXP (x, 0),
3668 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3669 GET_MODE (XEXP (x, 0)),
3670 XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 0), 1)));
3671
3672 temp = XEXP (x, 1);
3673 SUBST (XEXP (x, 1), XEXP (x, 2));
3674 SUBST (XEXP (x, 2), temp);
3675 }
3676
3677 /* If the two arms are identical, we don't need the comparison. */
3678
3679 if (rtx_equal_p (XEXP (x, 1), XEXP (x, 2))
3680 && ! side_effects_p (XEXP (x, 0)))
3681 return XEXP (x, 1);
3682
3683 /* Look for cases where we have (abs x) or (neg (abs X)). */
3684
3685 if (GET_MODE_CLASS (mode) == MODE_INT
3686 && GET_CODE (XEXP (x, 2)) == NEG
3687 && rtx_equal_p (XEXP (x, 1), XEXP (XEXP (x, 2), 0))
3688 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3689 && rtx_equal_p (XEXP (x, 1), XEXP (XEXP (x, 0), 0))
3690 && ! side_effects_p (XEXP (x, 1)))
3691 switch (GET_CODE (XEXP (x, 0)))
3692 {
3693 case GT:
3694 case GE:
3695 x = gen_unary (ABS, mode, XEXP (x, 1));
3696 goto restart;
3697 case LT:
3698 case LE:
3699 x = gen_unary (NEG, mode, gen_unary (ABS, mode, XEXP (x, 1)));
3700 goto restart;
3701 }
3702
3703 /* Look for MIN or MAX. */
3704
3705 if (GET_MODE_CLASS (mode) == MODE_INT
3706 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3707 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3708 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 2))
3709 && ! side_effects_p (XEXP (x, 0)))
3710 switch (GET_CODE (XEXP (x, 0)))
3711 {
3712 case GE:
3713 case GT:
3714 x = gen_binary (SMAX, mode, XEXP (x, 1), XEXP (x, 2));
3715 goto restart;
3716 case LE:
3717 case LT:
3718 x = gen_binary (SMIN, mode, XEXP (x, 1), XEXP (x, 2));
3719 goto restart;
3720 case GEU:
3721 case GTU:
3722 x = gen_binary (UMAX, mode, XEXP (x, 1), XEXP (x, 2));
3723 goto restart;
3724 case LEU:
3725 case LTU:
3726 x = gen_binary (UMIN, mode, XEXP (x, 1), XEXP (x, 2));
3727 goto restart;
3728 }
3729
3730 /* If we have something like (if_then_else (ne A 0) (OP X C) X),
3731 A is known to be either 0 or 1, and OP is an identity when its
3732 second operand is zero, this can be done as (OP X (mult A C)).
3733 Similarly if A is known to be 0 or -1 and also similarly if we have
3734 a ZERO_EXTEND or SIGN_EXTEND as long as X is already extended (so
3735 we don't destroy it). */
3736
3737 if (mode != VOIDmode
3738 && (GET_CODE (XEXP (x, 0)) == EQ || GET_CODE (XEXP (x, 0)) == NE)
3739 && XEXP (XEXP (x, 0), 1) == const0_rtx
3740 && (nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1
3741 || (num_sign_bit_copies (XEXP (XEXP (x, 0), 0), mode)
3742 == GET_MODE_BITSIZE (mode))))
3743 {
3744 rtx nz = make_compound_operation (GET_CODE (XEXP (x, 0)) == NE
3745 ? XEXP (x, 1) : XEXP (x, 2));
3746 rtx z = GET_CODE (XEXP (x, 0)) == NE ? XEXP (x, 2) : XEXP (x, 1);
3747 rtx dir = (nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1
3748 ? const1_rtx : constm1_rtx);
3749 rtx c = 0;
3750 enum machine_mode m = mode;
3751 enum rtx_code op, extend_op = 0;
3752
3753 if ((GET_CODE (nz) == PLUS || GET_CODE (nz) == MINUS
3754 || GET_CODE (nz) == IOR || GET_CODE (nz) == XOR
3755 || GET_CODE (nz) == ASHIFT
3756 || GET_CODE (nz) == LSHIFTRT || GET_CODE (nz) == ASHIFTRT)
3757 && rtx_equal_p (XEXP (nz, 0), z))
3758 c = XEXP (nz, 1), op = GET_CODE (nz);
3759 else if (GET_CODE (nz) == SIGN_EXTEND
3760 && (GET_CODE (XEXP (nz, 0)) == PLUS
3761 || GET_CODE (XEXP (nz, 0)) == MINUS
3762 || GET_CODE (XEXP (nz, 0)) == IOR
3763 || GET_CODE (XEXP (nz, 0)) == XOR
3764 || GET_CODE (XEXP (nz, 0)) == ASHIFT
3765 || GET_CODE (XEXP (nz, 0)) == LSHIFTRT
3766 || GET_CODE (XEXP (nz, 0)) == ASHIFTRT)
3767 && GET_CODE (XEXP (XEXP (nz, 0), 0)) == SUBREG
3768 && subreg_lowpart_p (XEXP (XEXP (nz, 0), 0))
3769 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (nz, 0), 0)), z)
3770 && (num_sign_bit_copies (z, GET_MODE (z))
3771 >= (GET_MODE_BITSIZE (mode)
3772 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (nz, 0), 0))))))
3773 {
3774 c = XEXP (XEXP (nz, 0), 1);
3775 op = GET_CODE (XEXP (nz, 0));
3776 extend_op = SIGN_EXTEND;
3777 m = GET_MODE (XEXP (nz, 0));
3778 }
3779 else if (GET_CODE (nz) == ZERO_EXTEND
3780 && (GET_CODE (XEXP (nz, 0)) == PLUS
3781 || GET_CODE (XEXP (nz, 0)) == MINUS
3782 || GET_CODE (XEXP (nz, 0)) == IOR
3783 || GET_CODE (XEXP (nz, 0)) == XOR
3784 || GET_CODE (XEXP (nz, 0)) == ASHIFT
3785 || GET_CODE (XEXP (nz, 0)) == LSHIFTRT
3786 || GET_CODE (XEXP (nz, 0)) == ASHIFTRT)
3787 && GET_CODE (XEXP (XEXP (nz, 0), 0)) == SUBREG
3788 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3789 && subreg_lowpart_p (XEXP (XEXP (nz, 0), 0))
3790 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (nz, 0), 0)), z)
3791 && ((nonzero_bits (z, GET_MODE (z))
3792 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (nz, 0), 0))))
3793 == 0))
3794 {
3795 c = XEXP (XEXP (nz, 0), 1);
3796 op = GET_CODE (XEXP (nz, 0));
3797 extend_op = ZERO_EXTEND;
3798 m = GET_MODE (XEXP (nz, 0));
3799 }
3800
3801 if (c && ! side_effects_p (c) && ! side_effects_p (z))
3802 {
3803 temp
3804 = gen_binary (MULT, m,
3805 gen_lowpart_for_combine (m,
3806 XEXP (XEXP (x, 0), 0)),
3807 gen_binary (MULT, m, c, dir));
3808
3809 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
3810
3811 if (extend_op != 0)
3812 temp = gen_unary (extend_op, mode, temp);
3813
3814 return temp;
3815 }
3816 }
3817 break;
3818
3819 case ZERO_EXTRACT:
3820 case SIGN_EXTRACT:
3821 case ZERO_EXTEND:
3822 case SIGN_EXTEND:
3823 /* If we are processing SET_DEST, we are done. */
3824 if (in_dest)
3825 return x;
3826
3827 x = expand_compound_operation (x);
3828 if (GET_CODE (x) != code)
3829 goto restart;
3830 break;
3831
3832 case SET:
3833 /* (set (pc) (return)) gets written as (return). */
3834 if (GET_CODE (SET_DEST (x)) == PC && GET_CODE (SET_SRC (x)) == RETURN)
3835 return SET_SRC (x);
3836
3837 /* Convert this into a field assignment operation, if possible. */
3838 x = make_field_assignment (x);
3839
3840 /* If we are setting CC0 or if the source is a COMPARE, look for the
3841 use of the comparison result and try to simplify it unless we already
3842 have used undobuf.other_insn. */
3843 if ((GET_CODE (SET_SRC (x)) == COMPARE
3844#ifdef HAVE_cc0
3845 || SET_DEST (x) == cc0_rtx
3846#endif
3847 )
3848 && (cc_use = find_single_use (SET_DEST (x), subst_insn,
3849 &other_insn)) != 0
3850 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
3851 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
3852 && XEXP (*cc_use, 0) == SET_DEST (x))
3853 {
3854 enum rtx_code old_code = GET_CODE (*cc_use);
3855 enum rtx_code new_code;
3856 rtx op0, op1;
3857 int other_changed = 0;
3858 enum machine_mode compare_mode = GET_MODE (SET_DEST (x));
3859
3860 if (GET_CODE (SET_SRC (x)) == COMPARE)
3861 op0 = XEXP (SET_SRC (x), 0), op1 = XEXP (SET_SRC (x), 1);
3862 else
3863 op0 = SET_SRC (x), op1 = const0_rtx;
3864
3865 /* Simplify our comparison, if possible. */
3866 new_code = simplify_comparison (old_code, &op0, &op1);
3867
3868#ifdef EXTRA_CC_MODES
3869 /* If this machine has CC modes other than CCmode, check to see
3870 if we need to use a different CC mode here. */
3871 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
3872#endif /* EXTRA_CC_MODES */
3873
3874#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
3875 /* If the mode changed, we have to change SET_DEST, the mode
3876 in the compare, and the mode in the place SET_DEST is used.
3877 If SET_DEST is a hard register, just build new versions with
3878 the proper mode. If it is a pseudo, we lose unless it is only
3879 time we set the pseudo, in which case we can safely change
3880 its mode. */
3881 if (compare_mode != GET_MODE (SET_DEST (x)))
3882 {
3883 int regno = REGNO (SET_DEST (x));
3884 rtx new_dest = gen_rtx (REG, compare_mode, regno);
3885
3886 if (regno < FIRST_PSEUDO_REGISTER
3887 || (reg_n_sets[regno] == 1
3888 && ! REG_USERVAR_P (SET_DEST (x))))
3889 {
3890 if (regno >= FIRST_PSEUDO_REGISTER)
3891 SUBST (regno_reg_rtx[regno], new_dest);
3892
3893 SUBST (SET_DEST (x), new_dest);
3894 SUBST (XEXP (*cc_use, 0), new_dest);
3895 other_changed = 1;
3896 }
3897 }
3898#endif
3899
3900 /* If the code changed, we have to build a new comparison
3901 in undobuf.other_insn. */
3902 if (new_code != old_code)
3903 {
3904 unsigned HOST_WIDE_INT mask;
3905
3906 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
3907 SET_DEST (x), const0_rtx));
3908
3909 /* If the only change we made was to change an EQ into an
3910 NE or vice versa, OP0 has only one bit that might be nonzero,
3911 and OP1 is zero, check if changing the user of the condition
3912 code will produce a valid insn. If it won't, we can keep
3913 the original code in that insn by surrounding our operation
3914 with an XOR. */
3915
3916 if (((old_code == NE && new_code == EQ)
3917 || (old_code == EQ && new_code == NE))
3918 && ! other_changed && op1 == const0_rtx
3919 && (GET_MODE_BITSIZE (GET_MODE (op0))
3920 <= HOST_BITS_PER_WIDE_INT)
3921 && (exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0)))
3922 >= 0))
3923 {
3924 rtx pat = PATTERN (other_insn), note = 0;
3925
3926 if ((recog_for_combine (&pat, other_insn, &note) < 0
3927 && ! check_asm_operands (pat)))
3928 {
3929 PUT_CODE (*cc_use, old_code);
3930 other_insn = 0;
3931
3932 op0 = gen_binary (XOR, GET_MODE (op0), op0,
3933 GEN_INT (mask));
3934 }
3935 }
3936
3937 other_changed = 1;
3938 }
3939
3940 if (other_changed)
3941 undobuf.other_insn = other_insn;
3942
3943#ifdef HAVE_cc0
3944 /* If we are now comparing against zero, change our source if
3945 needed. If we do not use cc0, we always have a COMPARE. */
3946 if (op1 == const0_rtx && SET_DEST (x) == cc0_rtx)
3947 SUBST (SET_SRC (x), op0);
3948 else
3949#endif
3950
3951 /* Otherwise, if we didn't previously have a COMPARE in the
3952 correct mode, we need one. */
3953 if (GET_CODE (SET_SRC (x)) != COMPARE
3954 || GET_MODE (SET_SRC (x)) != compare_mode)
3955 SUBST (SET_SRC (x), gen_rtx_combine (COMPARE, compare_mode,
3956 op0, op1));
3957 else
3958 {
3959 /* Otherwise, update the COMPARE if needed. */
3960 SUBST (XEXP (SET_SRC (x), 0), op0);
3961 SUBST (XEXP (SET_SRC (x), 1), op1);
3962 }
3963 }
3964 else
3965 {
3966 /* Get SET_SRC in a form where we have placed back any
3967 compound expressions. Then do the checks below. */
3968 temp = make_compound_operation (SET_SRC (x), SET);
3969 SUBST (SET_SRC (x), temp);
3970 }
3971
3972 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some
3973 operation, and X being a REG or (subreg (reg)), we may be able to
3974 convert this to (set (subreg:m2 x) (op)).
3975
3976 We can always do this if M1 is narrower than M2 because that
3977 means that we only care about the low bits of the result.
3978
3979 However, on most machines (those with neither BYTE_LOADS_ZERO_EXTEND
3980 nor BYTES_LOADS_SIGN_EXTEND defined), we cannot perform a
3981 narrower operation that requested since the high-order bits will
3982 be undefined. On machine where BYTE_LOADS_*_EXTEND is defined,
3983 however, this transformation is safe as long as M1 and M2 have
3984 the same number of words. */
3985
3986 if (GET_CODE (SET_SRC (x)) == SUBREG
3987 && subreg_lowpart_p (SET_SRC (x))
3988 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) != 'o'
3989 && (((GET_MODE_SIZE (GET_MODE (SET_SRC (x))) + (UNITS_PER_WORD - 1))
3990 / UNITS_PER_WORD)
3991 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x))))
3992 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
3993#ifndef BYTE_LOADS_EXTEND
3994 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
3995 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
3996#endif
3997 && (GET_CODE (SET_DEST (x)) == REG
3998 || (GET_CODE (SET_DEST (x)) == SUBREG
3999 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG)))
4000 {
4001 SUBST (SET_DEST (x),
4002 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_SRC (x))),
4003 SET_DEST (x)));
4004 SUBST (SET_SRC (x), SUBREG_REG (SET_SRC (x)));
4005 }
4006
4007#ifdef BYTE_LOADS_EXTEND
4008 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with
4009 M wider than N, this would require a paradoxical subreg.
4010 Replace the subreg with a zero_extend to avoid the reload that
4011 would otherwise be required. */
4012
4013 if (GET_CODE (SET_SRC (x)) == SUBREG
4014 && subreg_lowpart_p (SET_SRC (x))
4015 && SUBREG_WORD (SET_SRC (x)) == 0
4016 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
4017 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
4018 && GET_CODE (SUBREG_REG (SET_SRC (x))) == MEM)
4019 SUBST (SET_SRC (x), gen_rtx_combine (LOAD_EXTEND,
4020 GET_MODE (SET_SRC (x)),
4021 XEXP (SET_SRC (x), 0)));
4022#endif
4023
4024#ifndef HAVE_conditional_move
4025
4026 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE,
4027 and we are comparing an item known to be 0 or -1 against 0, use a
4028 logical operation instead. Check for one of the arms being an IOR
4029 of the other arm with some value. We compute three terms to be
4030 IOR'ed together. In practice, at most two will be nonzero. Then
4031 we do the IOR's. */
4032
4033 if (GET_CODE (SET_DEST (x)) != PC
4034 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE
4035 && (GET_CODE (XEXP (SET_SRC (x), 0)) == EQ
4036 || GET_CODE (XEXP (SET_SRC (x), 0)) == NE)
4037 && XEXP (XEXP (SET_SRC (x), 0), 1) == const0_rtx
4038 && (num_sign_bit_copies (XEXP (XEXP (SET_SRC (x), 0), 0),
4039 GET_MODE (XEXP (XEXP (SET_SRC (x), 0), 0)))
4040 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (SET_SRC (x), 0), 0))))
4041 && ! side_effects_p (SET_SRC (x)))
4042 {
4043 rtx true = (GET_CODE (XEXP (SET_SRC (x), 0)) == NE
4044 ? XEXP (SET_SRC (x), 1) : XEXP (SET_SRC (x), 2));
4045 rtx false = (GET_CODE (XEXP (SET_SRC (x), 0)) == NE
4046 ? XEXP (SET_SRC (x), 2) : XEXP (SET_SRC (x), 1));
4047 rtx term1 = const0_rtx, term2, term3;
4048
4049 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
4050 term1 = false, true = XEXP (true, 1), false = const0_rtx;
4051 else if (GET_CODE (true) == IOR
4052 && rtx_equal_p (XEXP (true, 1), false))
4053 term1 = false, true = XEXP (true, 0), false = const0_rtx;
4054 else if (GET_CODE (false) == IOR
4055 && rtx_equal_p (XEXP (false, 0), true))
4056 term1 = true, false = XEXP (false, 1), true = const0_rtx;
4057 else if (GET_CODE (false) == IOR
4058 && rtx_equal_p (XEXP (false, 1), true))
4059 term1 = true, false = XEXP (false, 0), true = const0_rtx;
4060
4061 term2 = gen_binary (AND, GET_MODE (SET_SRC (x)),
4062 XEXP (XEXP (SET_SRC (x), 0), 0), true);
4063 term3 = gen_binary (AND, GET_MODE (SET_SRC (x)),
4064 gen_unary (NOT, GET_MODE (SET_SRC (x)),
4065 XEXP (XEXP (SET_SRC (x), 0), 0)),
4066 false);
4067
4068 SUBST (SET_SRC (x),
4069 gen_binary (IOR, GET_MODE (SET_SRC (x)),
4070 gen_binary (IOR, GET_MODE (SET_SRC (x)),
4071 term1, term2),
4072 term3));
4073 }
4074#endif
4075 break;
4076
4077 case AND:
4078 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4079 {
4080 x = simplify_and_const_int (x, mode, XEXP (x, 0),
4081 INTVAL (XEXP (x, 1)));
4082
4083 /* If we have (ior (and (X C1) C2)) and the next restart would be
4084 the last, simplify this by making C1 as small as possible
4085 and then exit. */
4086 if (n_restarts >= 3 && GET_CODE (x) == IOR
4087 && GET_CODE (XEXP (x, 0)) == AND
4088 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4089 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4090 {
4091 temp = gen_binary (AND, mode, XEXP (XEXP (x, 0), 0),
4092 GEN_INT (INTVAL (XEXP (XEXP (x, 0), 1))
4093 & ~ INTVAL (XEXP (x, 1))));
4094 return gen_binary (IOR, mode, temp, XEXP (x, 1));
4095 }
4096
4097 if (GET_CODE (x) != AND)
4098 goto restart;
4099 }
4100
4101 /* Convert (A | B) & A to A. */
4102 if (GET_CODE (XEXP (x, 0)) == IOR
4103 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4104 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
4105 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
4106 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
4107 return XEXP (x, 1);
4108
4109 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
4110 insn (and may simplify more). */
4111 else if (GET_CODE (XEXP (x, 0)) == XOR
4112 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4113 && ! side_effects_p (XEXP (x, 1)))
4114 {
4115 x = gen_binary (AND, mode,
4116 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
4117 XEXP (x, 1));
4118 goto restart;
4119 }
4120 else if (GET_CODE (XEXP (x, 0)) == XOR
4121 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
4122 && ! side_effects_p (XEXP (x, 1)))
4123 {
4124 x = gen_binary (AND, mode,
4125 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
4126 XEXP (x, 1));
4127 goto restart;
4128 }
4129
4130 /* Similarly for (~ (A ^ B)) & A. */
4131 else if (GET_CODE (XEXP (x, 0)) == NOT
4132 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
4133 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 0), XEXP (x, 1))
4134 && ! side_effects_p (XEXP (x, 1)))
4135 {
4136 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 1),
4137 XEXP (x, 1));
4138 goto restart;
4139 }
4140 else if (GET_CODE (XEXP (x, 0)) == NOT
4141 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
4142 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 1), XEXP (x, 1))
4143 && ! side_effects_p (XEXP (x, 1)))
4144 {
4145 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 0),
4146 XEXP (x, 1));
4147 goto restart;
4148 }
4149
4150 /* If we have (and A B) with A not an object but that is known to
4151 be -1 or 0, this is equivalent to the expression
4152 (if_then_else (ne A (const_int 0)) B (const_int 0))
4153 We make this conversion because it may allow further
4154 simplifications and then allow use of conditional move insns.
4155 If the machine doesn't have condition moves, code in case SET
4156 will convert the IF_THEN_ELSE back to the logical operation.
4157 We build the IF_THEN_ELSE here in case further simplification
4158 is possible (e.g., we can convert it to ABS). */
4159
4160 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
4161 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
4162 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o')
4163 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4164 == GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
4165 {
4166 rtx op0 = XEXP (x, 0);
4167 rtx op1 = const0_rtx;
4168 enum rtx_code comp_code
4169 = simplify_comparison (NE, &op0, &op1);
4170
4171 x = gen_rtx_combine (IF_THEN_ELSE, mode,
4172 gen_binary (comp_code, VOIDmode, op0, op1),
4173 XEXP (x, 1), const0_rtx);
4174 goto restart;
4175 }
4176
4177 /* In the following group of tests (and those in case IOR below),
4178 we start with some combination of logical operations and apply
4179 the distributive law followed by the inverse distributive law.
4180 Most of the time, this results in no change. However, if some of
4181 the operands are the same or inverses of each other, simplifications
4182 will result.
4183
4184 For example, (and (ior A B) (not B)) can occur as the result of
4185 expanding a bit field assignment. When we apply the distributive
4186 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
4187 which then simplifies to (and (A (not B))). */
4188
4189 /* If we have (and (ior A B) C), apply the distributive law and then
4190 the inverse distributive law to see if things simplify. */
4191
4192 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == XOR)
4193 {
4194 x = apply_distributive_law
4195 (gen_binary (GET_CODE (XEXP (x, 0)), mode,
4196 gen_binary (AND, mode,
4197 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4198 gen_binary (AND, mode,
4199 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
4200 if (GET_CODE (x) != AND)
4201 goto restart;
4202 }
4203
4204 if (GET_CODE (XEXP (x, 1)) == IOR || GET_CODE (XEXP (x, 1)) == XOR)
4205 {
4206 x = apply_distributive_law
4207 (gen_binary (GET_CODE (XEXP (x, 1)), mode,
4208 gen_binary (AND, mode,
4209 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
4210 gen_binary (AND, mode,
4211 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
4212 if (GET_CODE (x) != AND)
4213 goto restart;
4214 }
4215
4216 /* Similarly, taking advantage of the fact that
4217 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
4218
4219 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == XOR)
4220 {
4221 x = apply_distributive_law
4222 (gen_binary (XOR, mode,
4223 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
4224 XEXP (XEXP (x, 1), 0)),
4225 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
4226 XEXP (XEXP (x, 1), 1))));
4227 if (GET_CODE (x) != AND)
4228 goto restart;
4229 }
4230
4231 else if (GET_CODE (XEXP (x, 1)) == NOT && GET_CODE (XEXP (x, 0)) == XOR)
4232 {
4233 x = apply_distributive_law
4234 (gen_binary (XOR, mode,
4235 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
4236 XEXP (XEXP (x, 0), 0)),
4237 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
4238 XEXP (XEXP (x, 0), 1))));
4239 if (GET_CODE (x) != AND)
4240 goto restart;
4241 }
4242 break;
4243
4244 case IOR:
4245 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
4246 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4247 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4248 && (nonzero_bits (XEXP (x, 0), mode) & ~ INTVAL (XEXP (x, 1))) == 0)
4249 return XEXP (x, 1);
4250
4251 /* Convert (A & B) | A to A. */
4252 if (GET_CODE (XEXP (x, 0)) == AND
4253 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4254 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
4255 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
4256 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
4257 return XEXP (x, 1);
4258
4259 /* If we have (ior (and A B) C), apply the distributive law and then
4260 the inverse distributive law to see if things simplify. */
4261
4262 if (GET_CODE (XEXP (x, 0)) == AND)
4263 {
4264 x = apply_distributive_law
4265 (gen_binary (AND, mode,
4266 gen_binary (IOR, mode,
4267 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4268 gen_binary (IOR, mode,
4269 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
4270
4271 if (GET_CODE (x) != IOR)
4272 goto restart;
4273 }
4274
4275 if (GET_CODE (XEXP (x, 1)) == AND)
4276 {
4277 x = apply_distributive_law
4278 (gen_binary (AND, mode,
4279 gen_binary (IOR, mode,
4280 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
4281 gen_binary (IOR, mode,
4282 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
4283
4284 if (GET_CODE (x) != IOR)
4285 goto restart;
4286 }
4287
4288 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
4289 mode size to (rotate A CX). */
4290
4291 if (((GET_CODE (XEXP (x, 0)) == ASHIFT
4292 && GET_CODE (XEXP (x, 1)) == LSHIFTRT)
4293 || (GET_CODE (XEXP (x, 1)) == ASHIFT
4294 && GET_CODE (XEXP (x, 0)) == LSHIFTRT))
4295 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 1), 0))
4296 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4297 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4298 && (INTVAL (XEXP (XEXP (x, 0), 1)) + INTVAL (XEXP (XEXP (x, 1), 1))
4299 == GET_MODE_BITSIZE (mode)))
4300 {
4301 rtx shift_count;
4302
4303 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
4304 shift_count = XEXP (XEXP (x, 0), 1);
4305 else
4306 shift_count = XEXP (XEXP (x, 1), 1);
4307 x = gen_rtx (ROTATE, mode, XEXP (XEXP (x, 0), 0), shift_count);
4308 goto restart;
4309 }
4310 break;
4311
4312 case XOR:
4313 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
4314 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
4315 (NOT y). */
4316 {
4317 int num_negated = 0;
4318 rtx in1 = XEXP (x, 0), in2 = XEXP (x, 1);
4319
4320 if (GET_CODE (in1) == NOT)
4321 num_negated++, in1 = XEXP (in1, 0);
4322 if (GET_CODE (in2) == NOT)
4323 num_negated++, in2 = XEXP (in2, 0);
4324
4325 if (num_negated == 2)
4326 {
4327 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4328 SUBST (XEXP (x, 1), XEXP (XEXP (x, 1), 0));
4329 }
4330 else if (num_negated == 1)
4331 {
4332 x = gen_unary (NOT, mode,
4333 gen_binary (XOR, mode, in1, in2));
4334 goto restart;
4335 }
4336 }
4337
4338 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
4339 correspond to a machine insn or result in further simplifications
4340 if B is a constant. */
4341
4342 if (GET_CODE (XEXP (x, 0)) == AND
4343 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
4344 && ! side_effects_p (XEXP (x, 1)))
4345 {
4346 x = gen_binary (AND, mode,
4347 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
4348 XEXP (x, 1));
4349 goto restart;
4350 }
4351 else if (GET_CODE (XEXP (x, 0)) == AND
4352 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4353 && ! side_effects_p (XEXP (x, 1)))
4354 {
4355 x = gen_binary (AND, mode,
4356 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
4357 XEXP (x, 1));
4358 goto restart;
4359 }
4360
4361
4362#if STORE_FLAG_VALUE == 1
4363 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
4364 comparison. */
4365 if (XEXP (x, 1) == const1_rtx
4366 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4367 && reversible_comparison_p (XEXP (x, 0)))
4368 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
4369 mode, XEXP (XEXP (x, 0), 0),
4370 XEXP (XEXP (x, 0), 1));
4371
4372 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
4373 is (lt foo (const_int 0)), so we can perform the above
4374 simplification. */
4375
4376 if (XEXP (x, 1) == const1_rtx
4377 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
4378 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4379 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
4380 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
4381#endif
4382
4383 /* (xor (comparison foo bar) (const_int sign-bit))
4384 when STORE_FLAG_VALUE is the sign bit. */
4385 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4386 && (STORE_FLAG_VALUE
4387 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
4388 && XEXP (x, 1) == const_true_rtx
4389 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4390 && reversible_comparison_p (XEXP (x, 0)))
4391 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
4392 mode, XEXP (XEXP (x, 0), 0),
4393 XEXP (XEXP (x, 0), 1));
4394 break;
4395
4396 case ABS:
4397 /* (abs (neg <foo>)) -> (abs <foo>) */
4398 if (GET_CODE (XEXP (x, 0)) == NEG)
4399 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4400
4401 /* If operand is something known to be positive, ignore the ABS. */
4402 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4403 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4404 <= HOST_BITS_PER_WIDE_INT)
4405 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4406 & ((HOST_WIDE_INT) 1
4407 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4408 == 0)))
4409 return XEXP (x, 0);
4410
4411
4412 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
4413 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4414 {
4415 x = gen_rtx_combine (NEG, mode, XEXP (x, 0));
4416 goto restart;
4417 }
4418 break;
4419
4420 case FFS:
4421 /* (ffs (*_extend <X>)) = (ffs <X>) */
4422 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4423 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4424 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4425 break;
4426
4427 case FLOAT:
4428 /* (float (sign_extend <X>)) = (float <X>). */
4429 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4430 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4431 break;
4432
4433 case LSHIFT:
4434 case ASHIFT:
4435 case LSHIFTRT:
4436 case ASHIFTRT:
4437 case ROTATE:
4438 case ROTATERT:
4439 /* If this is a shift by a constant amount, simplify it. */
4440 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4441 {
4442 x = simplify_shift_const (x, code, mode, XEXP (x, 0),
4443 INTVAL (XEXP (x, 1)));
4444 if (GET_CODE (x) != code)
4445 goto restart;
4446 }
4447
4448#ifdef SHIFT_COUNT_TRUNCATED
4449 else if (GET_CODE (XEXP (x, 1)) != REG)
4450 SUBST (XEXP (x, 1),
4451 force_to_mode (XEXP (x, 1), GET_MODE (x),
4452 exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))),
4453 NULL_RTX));
4454#endif
4455
4456 break;
4457 }
4458
4459 return x;
4460}
4461\f
4462/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
4463 operations" because they can be replaced with two more basic operations.
4464 ZERO_EXTEND is also considered "compound" because it can be replaced with
4465 an AND operation, which is simpler, though only one operation.
4466
4467 The function expand_compound_operation is called with an rtx expression
4468 and will convert it to the appropriate shifts and AND operations,
4469 simplifying at each stage.
4470
4471 The function make_compound_operation is called to convert an expression
4472 consisting of shifts and ANDs into the equivalent compound expression.
4473 It is the inverse of this function, loosely speaking. */
4474
4475static rtx
4476expand_compound_operation (x)
4477 rtx x;
4478{
4479 int pos = 0, len;
4480 int unsignedp = 0;
4481 int modewidth;
4482 rtx tem;
4483
4484 switch (GET_CODE (x))
4485 {
4486 case ZERO_EXTEND:
4487 unsignedp = 1;
4488 case SIGN_EXTEND:
4489 /* We can't necessarily use a const_int for a multiword mode;
4490 it depends on implicitly extending the value.
4491 Since we don't know the right way to extend it,
4492 we can't tell whether the implicit way is right.
4493
4494 Even for a mode that is no wider than a const_int,
4495 we can't win, because we need to sign extend one of its bits through
4496 the rest of it, and we don't know which bit. */
4497 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
4498 return x;
4499
4500 if (! FAKE_EXTEND_SAFE_P (GET_MODE (XEXP (x, 0)), XEXP (x, 0)))
4501 return x;
4502
4503 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
4504 /* If the inner object has VOIDmode (the only way this can happen
4505 is if it is a ASM_OPERANDS), we can't do anything since we don't
4506 know how much masking to do. */
4507 if (len == 0)
4508 return x;
4509
4510 break;
4511
4512 case ZERO_EXTRACT:
4513 unsignedp = 1;
4514 case SIGN_EXTRACT:
4515 /* If the operand is a CLOBBER, just return it. */
4516 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
4517 return XEXP (x, 0);
4518
4519 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4520 || GET_CODE (XEXP (x, 2)) != CONST_INT
4521 || GET_MODE (XEXP (x, 0)) == VOIDmode)
4522 return x;
4523
4524 len = INTVAL (XEXP (x, 1));
4525 pos = INTVAL (XEXP (x, 2));
4526
4527 /* If this goes outside the object being extracted, replace the object
4528 with a (use (mem ...)) construct that only combine understands
4529 and is used only for this purpose. */
4530 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4531 SUBST (XEXP (x, 0), gen_rtx (USE, GET_MODE (x), XEXP (x, 0)));
4532
4533#if BITS_BIG_ENDIAN
4534 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
4535#endif
4536 break;
4537
4538 default:
4539 return x;
4540 }
4541
4542 /* If we reach here, we want to return a pair of shifts. The inner
4543 shift is a left shift of BITSIZE - POS - LEN bits. The outer
4544 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
4545 logical depending on the value of UNSIGNEDP.
4546
4547 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
4548 converted into an AND of a shift.
4549
4550 We must check for the case where the left shift would have a negative
4551 count. This can happen in a case like (x >> 31) & 255 on machines
4552 that can't shift by a constant. On those machines, we would first
4553 combine the shift with the AND to produce a variable-position
4554 extraction. Then the constant of 31 would be substituted in to produce
4555 a such a position. */
4556
4557 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
4558 if (modewidth >= pos - len)
4559 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
4560 GET_MODE (x),
4561 simplify_shift_const (NULL_RTX, ASHIFT,
4562 GET_MODE (x),
4563 XEXP (x, 0),
4564 modewidth - pos - len),
4565 modewidth - len);
4566
4567 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
4568 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
4569 simplify_shift_const (NULL_RTX, LSHIFTRT,
4570 GET_MODE (x),
4571 XEXP (x, 0), pos),
4572 ((HOST_WIDE_INT) 1 << len) - 1);
4573 else
4574 /* Any other cases we can't handle. */
4575 return x;
4576
4577
4578 /* If we couldn't do this for some reason, return the original
4579 expression. */
4580 if (GET_CODE (tem) == CLOBBER)
4581 return x;
4582
4583 return tem;
4584}
4585\f
4586/* X is a SET which contains an assignment of one object into
4587 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
4588 or certain SUBREGS). If possible, convert it into a series of
4589 logical operations.
4590
4591 We half-heartedly support variable positions, but do not at all
4592 support variable lengths. */
4593
4594static rtx
4595expand_field_assignment (x)
4596 rtx x;
4597{
4598 rtx inner;
4599 rtx pos; /* Always counts from low bit. */
4600 int len;
4601 rtx mask;
4602 enum machine_mode compute_mode;
4603
4604 /* Loop until we find something we can't simplify. */
4605 while (1)
4606 {
4607 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
4608 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
4609 {
4610 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
4611 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4612 pos = const0_rtx;
4613 }
4614 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
4615 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
4616 {
4617 inner = XEXP (SET_DEST (x), 0);
4618 len = INTVAL (XEXP (SET_DEST (x), 1));
4619 pos = XEXP (SET_DEST (x), 2);
4620
4621 /* If the position is constant and spans the width of INNER,
4622 surround INNER with a USE to indicate this. */
4623 if (GET_CODE (pos) == CONST_INT
4624 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
4625 inner = gen_rtx (USE, GET_MODE (SET_DEST (x)), inner);
4626
4627#if BITS_BIG_ENDIAN
4628 if (GET_CODE (pos) == CONST_INT)
4629 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
4630 - INTVAL (pos));
4631 else if (GET_CODE (pos) == MINUS
4632 && GET_CODE (XEXP (pos, 1)) == CONST_INT
4633 && (INTVAL (XEXP (pos, 1))
4634 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
4635 /* If position is ADJUST - X, new position is X. */
4636 pos = XEXP (pos, 0);
4637 else
4638 pos = gen_binary (MINUS, GET_MODE (pos),
4639 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
4640 - len),
4641 pos);
4642#endif
4643 }
4644
4645 /* A SUBREG between two modes that occupy the same numbers of words
4646 can be done by moving the SUBREG to the source. */
4647 else if (GET_CODE (SET_DEST (x)) == SUBREG
4648 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
4649 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
4650 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
4651 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
4652 {
4653 x = gen_rtx (SET, VOIDmode, SUBREG_REG (SET_DEST (x)),
4654 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
4655 SET_SRC (x)));
4656 continue;
4657 }
4658 else
4659 break;
4660
4661 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4662 inner = SUBREG_REG (inner);
4663
4664 compute_mode = GET_MODE (inner);
4665
4666 /* Compute a mask of LEN bits, if we can do this on the host machine. */
4667 if (len < HOST_BITS_PER_WIDE_INT)
4668 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
4669 else
4670 break;
4671
4672 /* Now compute the equivalent expression. Make a copy of INNER
4673 for the SET_DEST in case it is a MEM into which we will substitute;
4674 we don't want shared RTL in that case. */
4675 x = gen_rtx (SET, VOIDmode, copy_rtx (inner),
4676 gen_binary (IOR, compute_mode,
4677 gen_binary (AND, compute_mode,
4678 gen_unary (NOT, compute_mode,
4679 gen_binary (ASHIFT,
4680 compute_mode,
4681 mask, pos)),
4682 inner),
4683 gen_binary (ASHIFT, compute_mode,
4684 gen_binary (AND, compute_mode,
4685 gen_lowpart_for_combine
4686 (compute_mode,
4687 SET_SRC (x)),
4688 mask),
4689 pos)));
4690 }
4691
4692 return x;
4693}
4694\f
4695/* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
4696 it is an RTX that represents a variable starting position; otherwise,
4697 POS is the (constant) starting bit position (counted from the LSB).
4698
4699 INNER may be a USE. This will occur when we started with a bitfield
4700 that went outside the boundary of the object in memory, which is
4701 allowed on most machines. To isolate this case, we produce a USE
4702 whose mode is wide enough and surround the MEM with it. The only
4703 code that understands the USE is this routine. If it is not removed,
4704 it will cause the resulting insn not to match.
4705
4706 UNSIGNEDP is non-zero for an unsigned reference and zero for a
4707 signed reference.
4708
4709 IN_DEST is non-zero if this is a reference in the destination of a
4710 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
4711 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
4712 be used.
4713
4714 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
4715 ZERO_EXTRACT should be built even for bits starting at bit 0.
4716
4717 MODE is the desired mode of the result (if IN_DEST == 0). */
4718
4719static rtx
4720make_extraction (mode, inner, pos, pos_rtx, len,
4721 unsignedp, in_dest, in_compare)
4722 enum machine_mode mode;
4723 rtx inner;
4724 int pos;
4725 rtx pos_rtx;
4726 int len;
4727 int unsignedp;
4728 int in_dest, in_compare;
4729{
4730 /* This mode describes the size of the storage area
4731 to fetch the overall value from. Within that, we
4732 ignore the POS lowest bits, etc. */
4733 enum machine_mode is_mode = GET_MODE (inner);
4734 enum machine_mode inner_mode;
4735 enum machine_mode wanted_mem_mode = byte_mode;
4736 enum machine_mode pos_mode = word_mode;
4737 enum machine_mode extraction_mode = word_mode;
4738 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
4739 int spans_byte = 0;
4740 rtx new = 0;
4741 rtx orig_pos_rtx = pos_rtx;
4742
4743 /* Get some information about INNER and get the innermost object. */
4744 if (GET_CODE (inner) == USE)
4745 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
4746 /* We don't need to adjust the position because we set up the USE
4747 to pretend that it was a full-word object. */
4748 spans_byte = 1, inner = XEXP (inner, 0);
4749 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4750 {
4751 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
4752 consider just the QI as the memory to extract from.
4753 The subreg adds or removes high bits; its mode is
4754 irrelevant to the meaning of this extraction,
4755 since POS and LEN count from the lsb. */
4756 if (GET_CODE (SUBREG_REG (inner)) == MEM)
4757 is_mode = GET_MODE (SUBREG_REG (inner));
4758 inner = SUBREG_REG (inner);
4759 }
4760
4761 inner_mode = GET_MODE (inner);
4762
4763 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
4764 pos = INTVAL (pos_rtx), pos_rtx = 0;
4765
4766 /* See if this can be done without an extraction. We never can if the
4767 width of the field is not the same as that of some integer mode. For
4768 registers, we can only avoid the extraction if the position is at the
4769 low-order bit and this is either not in the destination or we have the
4770 appropriate STRICT_LOW_PART operation available.
4771
4772 For MEM, we can avoid an extract if the field starts on an appropriate
4773 boundary and we can change the mode of the memory reference. However,
4774 we cannot directly access the MEM if we have a USE and the underlying
4775 MEM is not TMODE. This combination means that MEM was being used in a
4776 context where bits outside its mode were being referenced; that is only
4777 valid in bit-field insns. */
4778
4779 if (tmode != BLKmode
4780 && ! (spans_byte && inner_mode != tmode)
4781 && ((pos_rtx == 0 && pos == 0 && GET_CODE (inner) != MEM
4782 && (! in_dest
4783 || (GET_CODE (inner) == REG
4784 && (movstrict_optab->handlers[(int) tmode].insn_code
4785 != CODE_FOR_nothing))))
4786 || (GET_CODE (inner) == MEM && pos_rtx == 0
4787 && (pos
4788 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
4789 : BITS_PER_UNIT)) == 0
4790 /* We can't do this if we are widening INNER_MODE (it
4791 may not be aligned, for one thing). */
4792 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
4793 && (inner_mode == tmode
4794 || (! mode_dependent_address_p (XEXP (inner, 0))
4795 && ! MEM_VOLATILE_P (inner))))))
4796 {
4797 /* If INNER is a MEM, make a new MEM that encompasses just the desired
4798 field. If the original and current mode are the same, we need not
4799 adjust the offset. Otherwise, we do if bytes big endian.
4800
4801 If INNER is not a MEM, get a piece consisting of the just the field
4802 of interest (in this case POS must be 0). */
4803
4804 if (GET_CODE (inner) == MEM)
4805 {
4806 int offset;
4807 /* POS counts from lsb, but make OFFSET count in memory order. */
4808 if (BYTES_BIG_ENDIAN)
4809 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
4810 else
4811 offset = pos / BITS_PER_UNIT;
4812
4813 new = gen_rtx (MEM, tmode, plus_constant (XEXP (inner, 0), offset));
4814 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
4815 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
4816 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
4817 }
4818 else if (GET_CODE (inner) == REG)
4819 /* We can't call gen_lowpart_for_combine here since we always want
4820 a SUBREG and it would sometimes return a new hard register. */
4821 new = gen_rtx (SUBREG, tmode, inner,
4822 (WORDS_BIG_ENDIAN
4823 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
4824 ? ((GET_MODE_SIZE (inner_mode) - GET_MODE_SIZE (tmode))
4825 / UNITS_PER_WORD)
4826 : 0));
4827 else
4828 new = force_to_mode (inner, tmode, len, NULL_RTX);
4829
4830 /* If this extraction is going into the destination of a SET,
4831 make a STRICT_LOW_PART unless we made a MEM. */
4832
4833 if (in_dest)
4834 return (GET_CODE (new) == MEM ? new
4835 : (GET_CODE (new) != SUBREG
4836 ? gen_rtx (CLOBBER, tmode, const0_rtx)
4837 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
4838
4839 /* Otherwise, sign- or zero-extend unless we already are in the
4840 proper mode. */
4841
4842 return (mode == tmode ? new
4843 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
4844 mode, new));
4845 }
4846
4847 /* Unless this is a COMPARE or we have a funny memory reference,
4848 don't do anything with zero-extending field extracts starting at
4849 the low-order bit since they are simple AND operations. */
4850 if (pos_rtx == 0 && pos == 0 && ! in_dest
4851 && ! in_compare && ! spans_byte && unsignedp)
4852 return 0;
4853
4854 /* Get the mode to use should INNER be a MEM, the mode for the position,
4855 and the mode for the result. */
4856#ifdef HAVE_insv
4857 if (in_dest)
4858 {
4859 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
4860 pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
4861 extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
4862 }
4863#endif
4864
4865#ifdef HAVE_extzv
4866 if (! in_dest && unsignedp)
4867 {
4868 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
4869 pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
4870 extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
4871 }
4872#endif
4873
4874#ifdef HAVE_extv
4875 if (! in_dest && ! unsignedp)
4876 {
4877 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
4878 pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
4879 extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
4880 }
4881#endif
4882
4883 /* Never narrow an object, since that might not be safe. */
4884
4885 if (mode != VOIDmode
4886 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
4887 extraction_mode = mode;
4888
4889 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
4890 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
4891 pos_mode = GET_MODE (pos_rtx);
4892
4893 /* If this is not from memory or we have to change the mode of memory and
4894 cannot, the desired mode is EXTRACTION_MODE. */
4895 if (GET_CODE (inner) != MEM
4896 || (inner_mode != wanted_mem_mode
4897 && (mode_dependent_address_p (XEXP (inner, 0))
4898 || MEM_VOLATILE_P (inner))))
4899 wanted_mem_mode = extraction_mode;
4900
4901#if BITS_BIG_ENDIAN
4902 /* If position is constant, compute new position. Otherwise, build
4903 subtraction. */
4904 if (pos_rtx == 0)
4905 pos = (MAX (GET_MODE_BITSIZE (is_mode), GET_MODE_BITSIZE (wanted_mem_mode))
4906 - len - pos);
4907 else
4908 pos_rtx
4909 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
4910 GEN_INT (MAX (GET_MODE_BITSIZE (is_mode),
4911 GET_MODE_BITSIZE (wanted_mem_mode))
4912 - len),
4913 pos_rtx);
4914#endif
4915
4916 /* If INNER has a wider mode, make it smaller. If this is a constant
4917 extract, try to adjust the byte to point to the byte containing
4918 the value. */
4919 if (wanted_mem_mode != VOIDmode
4920 && GET_MODE_SIZE (wanted_mem_mode) < GET_MODE_SIZE (is_mode)
4921 && ((GET_CODE (inner) == MEM
4922 && (inner_mode == wanted_mem_mode
4923 || (! mode_dependent_address_p (XEXP (inner, 0))
4924 && ! MEM_VOLATILE_P (inner))))))
4925 {
4926 int offset = 0;
4927
4928 /* The computations below will be correct if the machine is big
4929 endian in both bits and bytes or little endian in bits and bytes.
4930 If it is mixed, we must adjust. */
4931
4932 /* If bytes are big endian and we had a paradoxical SUBREG, we must
4933 adjust OFFSET to compensate. */
4934#if BYTES_BIG_ENDIAN
4935 if (! spans_byte
4936 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
4937 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
4938#endif
4939
4940 /* If this is a constant position, we can move to the desired byte. */
4941 if (pos_rtx == 0)
4942 {
4943 offset += pos / BITS_PER_UNIT;
4944 pos %= GET_MODE_BITSIZE (wanted_mem_mode);
4945 }
4946
4947#if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
4948 if (! spans_byte && is_mode != wanted_mem_mode)
4949 offset = (GET_MODE_SIZE (is_mode)
4950 - GET_MODE_SIZE (wanted_mem_mode) - offset);
4951#endif
4952
4953 if (offset != 0 || inner_mode != wanted_mem_mode)
4954 {
4955 rtx newmem = gen_rtx (MEM, wanted_mem_mode,
4956 plus_constant (XEXP (inner, 0), offset));
4957 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
4958 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
4959 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
4960 inner = newmem;
4961 }
4962 }
4963
4964 /* If INNER is not memory, we can always get it into the proper mode. */
4965 else if (GET_CODE (inner) != MEM)
4966 inner = force_to_mode (inner, extraction_mode,
4967 (pos < 0 ? GET_MODE_BITSIZE (extraction_mode)
4968 : len + pos),
4969 NULL_RTX);
4970
4971 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
4972 have to zero extend. Otherwise, we can just use a SUBREG. */
4973 if (pos_rtx != 0
4974 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
4975 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
4976 else if (pos_rtx != 0
4977 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
4978 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
4979
4980 /* Make POS_RTX unless we already have it and it is correct. If we don't
4981 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
4982 be a CONST_INT. */
4983 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
4984 pos_rtx = orig_pos_rtx;
4985
4986 else if (pos_rtx == 0)
4987 pos_rtx = GEN_INT (pos);
4988
4989 /* Make the required operation. See if we can use existing rtx. */
4990 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
4991 extraction_mode, inner, GEN_INT (len), pos_rtx);
4992 if (! in_dest)
4993 new = gen_lowpart_for_combine (mode, new);
4994
4995 return new;
4996}
4997\f
4998/* Look at the expression rooted at X. Look for expressions
4999 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
5000 Form these expressions.
5001
5002 Return the new rtx, usually just X.
5003
5004 Also, for machines like the Vax that don't have logical shift insns,
5005 try to convert logical to arithmetic shift operations in cases where
5006 they are equivalent. This undoes the canonicalizations to logical
5007 shifts done elsewhere.
5008
5009 We try, as much as possible, to re-use rtl expressions to save memory.
5010
5011 IN_CODE says what kind of expression we are processing. Normally, it is
5012 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
5013 being kludges), it is MEM. When processing the arguments of a comparison
5014 or a COMPARE against zero, it is COMPARE. */
5015
5016static rtx
5017make_compound_operation (x, in_code)
5018 rtx x;
5019 enum rtx_code in_code;
5020{
5021 enum rtx_code code = GET_CODE (x);
5022 enum machine_mode mode = GET_MODE (x);
5023 int mode_width = GET_MODE_BITSIZE (mode);
5024 enum rtx_code next_code;
5025 int i, count;
5026 rtx new = 0;
5027 rtx tem;
5028 char *fmt;
5029
5030 /* Select the code to be used in recursive calls. Once we are inside an
5031 address, we stay there. If we have a comparison, set to COMPARE,
5032 but once inside, go back to our default of SET. */
5033
5034 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
5035 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
5036 && XEXP (x, 1) == const0_rtx) ? COMPARE
5037 : in_code == COMPARE ? SET : in_code);
5038
5039 /* Process depending on the code of this operation. If NEW is set
5040 non-zero, it will be returned. */
5041
5042 switch (code)
5043 {
5044 case ASHIFT:
5045 case LSHIFT:
5046 /* Convert shifts by constants into multiplications if inside
5047 an address. */
5048 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5049 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
5050 && INTVAL (XEXP (x, 1)) >= 0)
5051 {
5052 new = make_compound_operation (XEXP (x, 0), next_code);
5053 new = gen_rtx_combine (MULT, mode, new,
5054 GEN_INT ((HOST_WIDE_INT) 1
5055 << INTVAL (XEXP (x, 1))));
5056 }
5057 break;
5058
5059 case AND:
5060 /* If the second operand is not a constant, we can't do anything
5061 with it. */
5062 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5063 break;
5064
5065 /* If the constant is a power of two minus one and the first operand
5066 is a logical right shift, make an extraction. */
5067 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5068 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5069 {
5070 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5071 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
5072 0, in_code == COMPARE);
5073 }
5074
5075 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
5076 else if (GET_CODE (XEXP (x, 0)) == SUBREG
5077 && subreg_lowpart_p (XEXP (x, 0))
5078 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
5079 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5080 {
5081 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
5082 next_code);
5083 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
5084 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
5085 0, in_code == COMPARE);
5086 }
5087
5088 /* If we are have (and (rotate X C) M) and C is larger than the number
5089 of bits in M, this is an extraction. */
5090
5091 else if (GET_CODE (XEXP (x, 0)) == ROTATE
5092 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5093 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
5094 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
5095 {
5096 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5097 new = make_extraction (mode, new,
5098 (GET_MODE_BITSIZE (mode)
5099 - INTVAL (XEXP (XEXP (x, 0), 1))),
5100 NULL_RTX, i, 1, 0, in_code == COMPARE);
5101 }
5102
5103 /* On machines without logical shifts, if the operand of the AND is
5104 a logical shift and our mask turns off all the propagated sign
5105 bits, we can replace the logical shift with an arithmetic shift. */
5106 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5107 && (lshr_optab->handlers[(int) mode].insn_code
5108 == CODE_FOR_nothing)
5109 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
5110 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5111 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5112 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5113 && mode_width <= HOST_BITS_PER_WIDE_INT)
5114 {
5115 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
5116
5117 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
5118 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
5119 SUBST (XEXP (x, 0),
5120 gen_rtx_combine (ASHIFTRT, mode,
5121 make_compound_operation (XEXP (XEXP (x, 0), 0),
5122 next_code),
5123 XEXP (XEXP (x, 0), 1)));
5124 }
5125
5126 /* If the constant is one less than a power of two, this might be
5127 representable by an extraction even if no shift is present.
5128 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
5129 we are in a COMPARE. */
5130 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5131 new = make_extraction (mode,
5132 make_compound_operation (XEXP (x, 0),
5133 next_code),
5134 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
5135
5136 /* If we are in a comparison and this is an AND with a power of two,
5137 convert this into the appropriate bit extract. */
5138 else if (in_code == COMPARE
5139 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
5140 new = make_extraction (mode,
5141 make_compound_operation (XEXP (x, 0),
5142 next_code),
5143 i, NULL_RTX, 1, 1, 0, 1);
5144
5145 break;
5146
5147 case LSHIFTRT:
5148 /* If the sign bit is known to be zero, replace this with an
5149 arithmetic shift. */
5150 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
5151 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5152 && mode_width <= HOST_BITS_PER_WIDE_INT
5153 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
5154 {
5155 new = gen_rtx_combine (ASHIFTRT, mode,
5156 make_compound_operation (XEXP (x, 0),
5157 next_code),
5158 XEXP (x, 1));
5159 break;
5160 }
5161
5162 /* ... fall through ... */
5163
5164 case ASHIFTRT:
5165 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
5166 this is a SIGN_EXTRACT. */
5167 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5168 && GET_CODE (XEXP (x, 0)) == ASHIFT
5169 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5170 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (x, 0), 1)))
5171 {
5172 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5173 new = make_extraction (mode, new,
5174 (INTVAL (XEXP (x, 1))
5175 - INTVAL (XEXP (XEXP (x, 0), 1))),
5176 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
5177 code == LSHIFTRT, 0, in_code == COMPARE);
5178 }
5179
5180 /* Similarly if we have (ashifrt (OP (ashift foo C1) C3) C2). In these
5181 cases, we are better off returning a SIGN_EXTEND of the operation. */
5182
5183 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5184 && (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND
5185 || GET_CODE (XEXP (x, 0)) == XOR
5186 || GET_CODE (XEXP (x, 0)) == PLUS)
5187 && GET_CODE (XEXP (XEXP (x, 0), 0)) == ASHIFT
5188 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
5189 && INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)) < HOST_BITS_PER_WIDE_INT
5190 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5191 && 0 == (INTVAL (XEXP (XEXP (x, 0), 1))
5192 & (((HOST_WIDE_INT) 1
5193 << (MIN (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)),
5194 INTVAL (XEXP (x, 1)))
5195 - 1)))))
5196 {
5197 rtx c1 = XEXP (XEXP (XEXP (x, 0), 0), 1);
5198 rtx c2 = XEXP (x, 1);
5199 rtx c3 = XEXP (XEXP (x, 0), 1);
5200 HOST_WIDE_INT newop1;
5201 rtx inner = XEXP (XEXP (XEXP (x, 0), 0), 0);
5202
5203 /* If C1 > C2, INNER needs to have the shift performed on it
5204 for C1-C2 bits. */
5205 if (INTVAL (c1) > INTVAL (c2))
5206 {
5207 inner = gen_binary (ASHIFT, mode, inner,
5208 GEN_INT (INTVAL (c1) - INTVAL (c2)));
5209 c1 = c2;
5210 }
5211
5212 newop1 = INTVAL (c3) >> INTVAL (c1);
5213 new = make_compound_operation (inner,
5214 GET_CODE (XEXP (x, 0)) == PLUS
5215 ? MEM : GET_CODE (XEXP (x, 0)));
5216 new = make_extraction (mode,
5217 gen_binary (GET_CODE (XEXP (x, 0)), mode, new,
5218 GEN_INT (newop1)),
5219 INTVAL (c2) - INTVAL (c1),
5220 NULL_RTX, mode_width - INTVAL (c2),
5221 code == LSHIFTRT, 0, in_code == COMPARE);
5222 }
5223
5224 /* Similarly for (ashiftrt (neg (ashift FOO C1)) C2). */
5225 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5226 && GET_CODE (XEXP (x, 0)) == NEG
5227 && GET_CODE (XEXP (XEXP (x, 0), 0)) == ASHIFT
5228 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
5229 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)))
5230 {
5231 new = make_compound_operation (XEXP (XEXP (XEXP (x, 0), 0), 0),
5232 next_code);
5233 new = make_extraction (mode,
5234 gen_unary (GET_CODE (XEXP (x, 0)), mode,
5235 new, 0),
5236 (INTVAL (XEXP (x, 1))
5237 - INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))),
5238 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
5239 code == LSHIFTRT, 0, in_code == COMPARE);
5240 }
5241 break;
5242
5243 case SUBREG:
5244 /* Call ourselves recursively on the inner expression. If we are
5245 narrowing the object and it has a different RTL code from
5246 what it originally did, do this SUBREG as a force_to_mode. */
5247
5248 tem = make_compound_operation (SUBREG_REG (x), in_code);
5249 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
5250 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
5251 && subreg_lowpart_p (x))
5252 {
5253 rtx newer = force_to_mode (tem, mode,
5254 GET_MODE_BITSIZE (mode), NULL_RTX);
5255
5256 /* If we have something other than a SUBREG, we might have
5257 done an expansion, so rerun outselves. */
5258 if (GET_CODE (newer) != SUBREG)
5259 newer = make_compound_operation (newer, in_code);
5260
5261 return newer;
5262 }
5263 }
5264
5265 if (new)
5266 {
5267 x = gen_lowpart_for_combine (mode, new);
5268 code = GET_CODE (x);
5269 }
5270
5271 /* Now recursively process each operand of this operation. */
5272 fmt = GET_RTX_FORMAT (code);
5273 for (i = 0; i < GET_RTX_LENGTH (code); i++)
5274 if (fmt[i] == 'e')
5275 {
5276 new = make_compound_operation (XEXP (x, i), next_code);
5277 SUBST (XEXP (x, i), new);
5278 }
5279
5280 return x;
5281}
5282\f
5283/* Given M see if it is a value that would select a field of bits
5284 within an item, but not the entire word. Return -1 if not.
5285 Otherwise, return the starting position of the field, where 0 is the
5286 low-order bit.
5287
5288 *PLEN is set to the length of the field. */
5289
5290static int
5291get_pos_from_mask (m, plen)
5292 unsigned HOST_WIDE_INT m;
5293 int *plen;
5294{
5295 /* Get the bit number of the first 1 bit from the right, -1 if none. */
5296 int pos = exact_log2 (m & - m);
5297
5298 if (pos < 0)
5299 return -1;
5300
5301 /* Now shift off the low-order zero bits and see if we have a power of
5302 two minus 1. */
5303 *plen = exact_log2 ((m >> pos) + 1);
5304
5305 if (*plen <= 0)
5306 return -1;
5307
5308 return pos;
5309}
5310\f
5311/* Rewrite X so that it is an expression in MODE. We only care about the
5312 low-order BITS bits so we can ignore AND operations that just clear
5313 higher-order bits.
5314
5315 Also, if REG is non-zero and X is a register equal in value to REG,
5316 replace X with REG. */
5317
5318static rtx
5319force_to_mode (x, mode, bits, reg)
5320 rtx x;
5321 enum machine_mode mode;
5322 int bits;
5323 rtx reg;
5324{
5325 enum rtx_code code = GET_CODE (x);
5326 enum machine_mode op_mode = mode;
5327
5328 /* If X is narrower than MODE or if BITS is larger than the size of MODE,
5329 just get X in the proper mode. */
5330
5331 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
5332 || bits > GET_MODE_BITSIZE (mode))
5333 return gen_lowpart_for_combine (mode, x);
5334
5335 switch (code)
5336 {
5337 case SIGN_EXTEND:
5338 case ZERO_EXTEND:
5339 case ZERO_EXTRACT:
5340 case SIGN_EXTRACT:
5341 x = expand_compound_operation (x);
5342 if (GET_CODE (x) != code)
5343 return force_to_mode (x, mode, bits, reg);
5344 break;
5345
5346 case REG:
5347 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
5348 || rtx_equal_p (reg, get_last_value (x))))
5349 x = reg;
5350 break;
5351
5352 case CONST_INT:
5353 if (bits < HOST_BITS_PER_WIDE_INT)
5354 x = GEN_INT (INTVAL (x) & (((HOST_WIDE_INT) 1 << bits) - 1));
5355 return x;
5356
5357 case SUBREG:
5358 /* Ignore low-order SUBREGs. */
5359 if (subreg_lowpart_p (x))
5360 return force_to_mode (SUBREG_REG (x), mode, bits, reg);
5361 break;
5362
5363 case AND:
5364 /* If this is an AND with a constant. Otherwise, we fall through to
5365 do the general binary case. */
5366
5367 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
5368 {
5369 HOST_WIDE_INT mask = INTVAL (XEXP (x, 1));
5370 int len = exact_log2 (mask + 1);
5371 rtx op = XEXP (x, 0);
5372
5373 /* If this is masking some low-order bits, we may be able to
5374 impose a stricter constraint on what bits of the operand are
5375 required. */
5376
5377 op = force_to_mode (op, mode, len > 0 ? MIN (len, bits) : bits,
5378 reg);
5379
5380 if (bits < HOST_BITS_PER_WIDE_INT)
5381 mask &= ((HOST_WIDE_INT) 1 << bits) - 1;
5382
5383 /* If we have no AND in MODE, use the original mode for the
5384 operation. */
5385
5386 if (and_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5387 op_mode = GET_MODE (x);
5388
5389 x = simplify_and_const_int (x, op_mode, op, mask);
5390
5391 /* If X is still an AND, see if it is an AND with a mask that
5392 is just some low-order bits. If so, and it is BITS wide (it
5393 can't be wider), we don't need it. */
5394
5395 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
5396 && bits < HOST_BITS_PER_WIDE_INT
5397 && INTVAL (XEXP (x, 1)) == ((HOST_WIDE_INT) 1 << bits) - 1)
5398 x = XEXP (x, 0);
5399
5400 break;
5401 }
5402
5403 /* ... fall through ... */
5404
5405 case PLUS:
5406 case MINUS:
5407 case MULT:
5408 case IOR:
5409 case XOR:
5410 /* For most binary operations, just propagate into the operation and
5411 change the mode if we have an operation of that mode. */
5412
5413 if ((code == PLUS
5414 && add_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5415 || (code == MINUS
5416 && sub_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5417 || (code == MULT && (smul_optab->handlers[(int) mode].insn_code
5418 == CODE_FOR_nothing))
5419 || (code == AND
5420 && and_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5421 || (code == IOR
5422 && ior_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5423 || (code == XOR && (xor_optab->handlers[(int) mode].insn_code
5424 == CODE_FOR_nothing)))
5425 op_mode = GET_MODE (x);
5426
5427 x = gen_binary (code, op_mode,
5428 gen_lowpart_for_combine (op_mode,
5429 force_to_mode (XEXP (x, 0),
5430 mode, bits,
5431 reg)),
5432 gen_lowpart_for_combine (op_mode,
5433 force_to_mode (XEXP (x, 1),
5434 mode, bits,
5435 reg)));
5436 break;
5437
5438 case ASHIFT:
5439 case LSHIFT:
5440 /* For left shifts, do the same, but just for the first operand.
5441 However, we cannot do anything with shifts where we cannot
5442 guarantee that the counts are smaller than the size of the mode
5443 because such a count will have a different meaning in a
5444 wider mode.
5445
5446 If we can narrow the shift and know the count, we need even fewer
5447 bits of the first operand. */
5448
5449 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
5450 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
5451 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
5452 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
5453 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
5454 break;
5455
5456 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) < bits)
5457 bits -= INTVAL (XEXP (x, 1));
5458
5459 if ((code == ASHIFT
5460 && ashl_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5461 || (code == LSHIFT && (lshl_optab->handlers[(int) mode].insn_code
5462 == CODE_FOR_nothing)))
5463 op_mode = GET_MODE (x);
5464
5465 x = gen_binary (code, op_mode,
5466 gen_lowpart_for_combine (op_mode,
5467 force_to_mode (XEXP (x, 0),
5468 mode, bits,
5469 reg)),
5470 XEXP (x, 1));
5471 break;
5472
5473 case LSHIFTRT:
5474 /* Here we can only do something if the shift count is a constant and
5475 the count plus BITS is no larger than the width of MODE. In that
5476 case, we can do the shift in MODE. */
5477
5478 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5479 && INTVAL (XEXP (x, 1)) + bits <= GET_MODE_BITSIZE (mode))
5480 {
5481 rtx inner = force_to_mode (XEXP (x, 0), mode,
5482 bits + INTVAL (XEXP (x, 1)), reg);
5483
5484 if (lshr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5485 op_mode = GET_MODE (x);
5486
5487 x = gen_binary (LSHIFTRT, op_mode,
5488 gen_lowpart_for_combine (op_mode, inner),
5489 XEXP (x, 1));
5490 }
5491 break;
5492
5493 case ASHIFTRT:
5494 /* If this is a sign-extension operation that just affects bits
5495 we don't care about, remove it. */
5496
5497 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5498 && INTVAL (XEXP (x, 1)) >= 0
5499 && INTVAL (XEXP (x, 1)) <= GET_MODE_BITSIZE (GET_MODE (x)) - bits
5500 && GET_CODE (XEXP (x, 0)) == ASHIFT
5501 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5502 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
5503 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, bits, reg);
5504 break;
5505
5506 case NEG:
5507 case NOT:
5508 if ((code == NEG
5509 && neg_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5510 || (code == NOT && (one_cmpl_optab->handlers[(int) mode].insn_code
5511 == CODE_FOR_nothing)))
5512 op_mode = GET_MODE (x);
5513
5514 /* Handle these similarly to the way we handle most binary operations. */
5515 x = gen_unary (code, op_mode,
5516 gen_lowpart_for_combine (op_mode,
5517 force_to_mode (XEXP (x, 0), mode,
5518 bits, reg)));
5519 break;
5520
5521 case IF_THEN_ELSE:
5522 /* We have no way of knowing if the IF_THEN_ELSE can itself be
5523 written in a narrower mode. We play it safe and do not do so. */
5524
5525 SUBST (XEXP (x, 1),
5526 gen_lowpart_for_combine (GET_MODE (x),
5527 force_to_mode (XEXP (x, 1), mode,
5528 bits, reg)));
5529 SUBST (XEXP (x, 2),
5530 gen_lowpart_for_combine (GET_MODE (x),
5531 force_to_mode (XEXP (x, 2), mode,
5532 bits, reg)));
5533 break;
5534 }
5535
5536 /* Ensure we return a value of the proper mode. */
5537 return gen_lowpart_for_combine (mode, x);
5538}
5539\f
5540/* Return the value of expression X given the fact that condition COND
5541 is known to be true when applied to REG as its first operand and VAL
5542 as its second. X is known to not be shared and so can be modified in
5543 place.
5544
5545 We only handle the simplest cases, and specifically those cases that
5546 arise with IF_THEN_ELSE expressions. */
5547
5548static rtx
5549known_cond (x, cond, reg, val)
5550 rtx x;
5551 enum rtx_code cond;
5552 rtx reg, val;
5553{
5554 enum rtx_code code = GET_CODE (x);
5555 rtx new, temp;
5556 char *fmt;
5557 int i, j;
5558
5559 if (side_effects_p (x))
5560 return x;
5561
5562 if (cond == EQ && rtx_equal_p (x, reg))
5563 return val;
5564
5565 /* If X is (abs REG) and we know something about REG's relationship
5566 with zero, we may be able to simplify this. */
5567
5568 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
5569 switch (cond)
5570 {
5571 case GE: case GT: case EQ:
5572 return XEXP (x, 0);
5573 case LT: case LE:
5574 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), XEXP (x, 0));
5575 }
5576
5577 /* The only other cases we handle are MIN, MAX, and comparisons if the
5578 operands are the same as REG and VAL. */
5579
5580 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
5581 {
5582 if (rtx_equal_p (XEXP (x, 0), val))
5583 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
5584
5585 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
5586 {
5587 if (GET_RTX_CLASS (code) == '<')
5588 return (comparison_dominates_p (cond, code) ? const_true_rtx
5589 : (comparison_dominates_p (cond,
5590 reverse_condition (code))
5591 ? const0_rtx : x));
5592
5593 else if (code == SMAX || code == SMIN
5594 || code == UMIN || code == UMAX)
5595 {
5596 int unsignedp = (code == UMIN || code == UMAX);
5597
5598 if (code == SMAX || code == UMAX)
5599 cond = reverse_condition (cond);
5600
5601 switch (cond)
5602 {
5603 case GE: case GT:
5604 return unsignedp ? x : XEXP (x, 1);
5605 case LE: case LT:
5606 return unsignedp ? x : XEXP (x, 0);
5607 case GEU: case GTU:
5608 return unsignedp ? XEXP (x, 1) : x;
5609 case LEU: case LTU:
5610 return unsignedp ? XEXP (x, 0) : x;
5611 }
5612 }
5613 }
5614 }
5615
5616 fmt = GET_RTX_FORMAT (code);
5617 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5618 {
5619 if (fmt[i] == 'e')
5620 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
5621 else if (fmt[i] == 'E')
5622 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5623 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
5624 cond, reg, val));
5625 }
5626
5627 return x;
5628}
5629\f
5630/* See if X, a SET operation, can be rewritten as a bit-field assignment.
5631 Return that assignment if so.
5632
5633 We only handle the most common cases. */
5634
5635static rtx
5636make_field_assignment (x)
5637 rtx x;
5638{
5639 rtx dest = SET_DEST (x);
5640 rtx src = SET_SRC (x);
5641 rtx ourdest;
5642 rtx assign;
5643 HOST_WIDE_INT c1;
5644 int pos, len;
5645 rtx other;
5646 enum machine_mode mode;
5647
5648 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
5649 a clear of a one-bit field. We will have changed it to
5650 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
5651 for a SUBREG. */
5652
5653 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
5654 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
5655 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
5656 && (rtx_equal_p (dest, XEXP (src, 1))
5657 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
5658 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
5659 {
5660 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
5661 1, 1, 1, 0);
5662 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
5663 }
5664
5665 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
5666 && subreg_lowpart_p (XEXP (src, 0))
5667 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
5668 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
5669 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
5670 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
5671 && (rtx_equal_p (dest, XEXP (src, 1))
5672 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
5673 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
5674 {
5675 assign = make_extraction (VOIDmode, dest, 0,
5676 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
5677 1, 1, 1, 0);
5678 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
5679 }
5680
5681 /* If SRC is (ior (ashift (const_int 1) POS DEST)), this is a set of a
5682 one-bit field. */
5683 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
5684 && XEXP (XEXP (src, 0), 0) == const1_rtx
5685 && (rtx_equal_p (dest, XEXP (src, 1))
5686 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
5687 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
5688 {
5689 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
5690 1, 1, 1, 0);
5691 return gen_rtx (SET, VOIDmode, assign, const1_rtx);
5692 }
5693
5694 /* The other case we handle is assignments into a constant-position
5695 field. They look like (ior (and DEST C1) OTHER). If C1 represents
5696 a mask that has all one bits except for a group of zero bits and
5697 OTHER is known to have zeros where C1 has ones, this is such an
5698 assignment. Compute the position and length from C1. Shift OTHER
5699 to the appropriate position, force it to the required mode, and
5700 make the extraction. Check for the AND in both operands. */
5701
5702 if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == AND
5703 && GET_CODE (XEXP (XEXP (src, 0), 1)) == CONST_INT
5704 && (rtx_equal_p (XEXP (XEXP (src, 0), 0), dest)
5705 || rtx_equal_p (XEXP (XEXP (src, 0), 0), get_last_value (dest))
5706 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 0), 1)), dest)))
5707 c1 = INTVAL (XEXP (XEXP (src, 0), 1)), other = XEXP (src, 1);
5708 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 1)) == AND
5709 && GET_CODE (XEXP (XEXP (src, 1), 1)) == CONST_INT
5710 && (rtx_equal_p (XEXP (XEXP (src, 1), 0), dest)
5711 || rtx_equal_p (XEXP (XEXP (src, 1), 0), get_last_value (dest))
5712 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 1), 0)),
5713 dest)))
5714 c1 = INTVAL (XEXP (XEXP (src, 1), 1)), other = XEXP (src, 0);
5715 else
5716 return x;
5717
5718 pos = get_pos_from_mask (~c1, &len);
5719 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
5720 || (GET_MODE_BITSIZE (GET_MODE (other)) <= HOST_BITS_PER_WIDE_INT
5721 && (c1 & nonzero_bits (other, GET_MODE (other))) != 0))
5722 return x;
5723
5724 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
5725
5726 /* The mode to use for the source is the mode of the assignment, or of
5727 what is inside a possible STRICT_LOW_PART. */
5728 mode = (GET_CODE (assign) == STRICT_LOW_PART
5729 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
5730
5731 /* Shift OTHER right POS places and make it the source, restricting it
5732 to the proper length and mode. */
5733
5734 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
5735 GET_MODE (src), other, pos),
5736 mode, len, dest);
5737
5738 return gen_rtx_combine (SET, VOIDmode, assign, src);
5739}
5740\f
5741/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
5742 if so. */
5743
5744static rtx
5745apply_distributive_law (x)
5746 rtx x;
5747{
5748 enum rtx_code code = GET_CODE (x);
5749 rtx lhs, rhs, other;
5750 rtx tem;
5751 enum rtx_code inner_code;
5752
5753 /* Distributivity is not true for floating point.
5754 It can change the value. So don't do it.
5755 -- rms and moshier@world.std.com. */
5756 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
5757 return x;
5758
5759 /* The outer operation can only be one of the following: */
5760 if (code != IOR && code != AND && code != XOR
5761 && code != PLUS && code != MINUS)
5762 return x;
5763
5764 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
5765
5766 /* If either operand is a primitive we can't do anything, so get out fast. */
5767 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
5768 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
5769 return x;
5770
5771 lhs = expand_compound_operation (lhs);
5772 rhs = expand_compound_operation (rhs);
5773 inner_code = GET_CODE (lhs);
5774 if (inner_code != GET_CODE (rhs))
5775 return x;
5776
5777 /* See if the inner and outer operations distribute. */
5778 switch (inner_code)
5779 {
5780 case LSHIFTRT:
5781 case ASHIFTRT:
5782 case AND:
5783 case IOR:
5784 /* These all distribute except over PLUS. */
5785 if (code == PLUS || code == MINUS)
5786 return x;
5787 break;
5788
5789 case MULT:
5790 if (code != PLUS && code != MINUS)
5791 return x;
5792 break;
5793
5794 case ASHIFT:
5795 case LSHIFT:
5796 /* These are also multiplies, so they distribute over everything. */
5797 break;
5798
5799 case SUBREG:
5800 /* Non-paradoxical SUBREGs distributes over all operations, provided
5801 the inner modes and word numbers are the same, this is an extraction
5802 of a low-order part, we don't convert an fp operation to int or
5803 vice versa, and we would not be converting a single-word
5804 operation into a multi-word operation. The latter test is not
5805 required, but it prevents generating unneeded multi-word operations.
5806 Some of the previous tests are redundant given the latter test, but
5807 are retained because they are required for correctness.
5808
5809 We produce the result slightly differently in this case. */
5810
5811 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
5812 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
5813 || ! subreg_lowpart_p (lhs)
5814 || (GET_MODE_CLASS (GET_MODE (lhs))
5815 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
5816 || (GET_MODE_SIZE (GET_MODE (lhs))
5817 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
5818 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
5819 return x;
5820
5821 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
5822 SUBREG_REG (lhs), SUBREG_REG (rhs));
5823 return gen_lowpart_for_combine (GET_MODE (x), tem);
5824
5825 default:
5826 return x;
5827 }
5828
5829 /* Set LHS and RHS to the inner operands (A and B in the example
5830 above) and set OTHER to the common operand (C in the example).
5831 These is only one way to do this unless the inner operation is
5832 commutative. */
5833 if (GET_RTX_CLASS (inner_code) == 'c'
5834 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
5835 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
5836 else if (GET_RTX_CLASS (inner_code) == 'c'
5837 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
5838 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
5839 else if (GET_RTX_CLASS (inner_code) == 'c'
5840 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
5841 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
5842 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
5843 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
5844 else
5845 return x;
5846
5847 /* Form the new inner operation, seeing if it simplifies first. */
5848 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
5849
5850 /* There is one exception to the general way of distributing:
5851 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
5852 if (code == XOR && inner_code == IOR)
5853 {
5854 inner_code = AND;
5855 other = gen_unary (NOT, GET_MODE (x), other);
5856 }
5857
5858 /* We may be able to continuing distributing the result, so call
5859 ourselves recursively on the inner operation before forming the
5860 outer operation, which we return. */
5861 return gen_binary (inner_code, GET_MODE (x),
5862 apply_distributive_law (tem), other);
5863}
5864\f
5865/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
5866 in MODE.
5867
5868 Return an equivalent form, if different from X. Otherwise, return X. If
5869 X is zero, we are to always construct the equivalent form. */
5870
5871static rtx
5872simplify_and_const_int (x, mode, varop, constop)
5873 rtx x;
5874 enum machine_mode mode;
5875 rtx varop;
5876 unsigned HOST_WIDE_INT constop;
5877{
5878 register enum machine_mode tmode;
5879 register rtx temp;
5880 unsigned HOST_WIDE_INT nonzero;
5881
5882 /* There is a large class of optimizations based on the principle that
5883 some operations produce results where certain bits are known to be zero,
5884 and hence are not significant to the AND. For example, if we have just
5885 done a left shift of one bit, the low-order bit is known to be zero and
5886 hence an AND with a mask of ~1 would not do anything.
5887
5888 At the end of the following loop, we set:
5889
5890 VAROP to be the item to be AND'ed with;
5891 CONSTOP to the constant value to AND it with. */
5892
5893 while (1)
5894 {
5895 /* If we ever encounter a mode wider than the host machine's widest
5896 integer size, we can't compute the masks accurately, so give up. */
5897 if (GET_MODE_BITSIZE (GET_MODE (varop)) > HOST_BITS_PER_WIDE_INT)
5898 break;
5899
5900 /* Unless one of the cases below does a `continue',
5901 a `break' will be executed to exit the loop. */
5902
5903 switch (GET_CODE (varop))
5904 {
5905 case CLOBBER:
5906 /* If VAROP is a (clobber (const_int)), return it since we know
5907 we are generating something that won't match. */
5908 return varop;
5909
5910#if ! BITS_BIG_ENDIAN
5911 case USE:
5912 /* VAROP is a (use (mem ..)) that was made from a bit-field
5913 extraction that spanned the boundary of the MEM. If we are
5914 now masking so it is within that boundary, we don't need the
5915 USE any more. */
5916 if ((constop & ~ GET_MODE_MASK (GET_MODE (XEXP (varop, 0)))) == 0)
5917 {
5918 varop = XEXP (varop, 0);
5919 continue;
5920 }
5921 break;
5922#endif
5923
5924 case SUBREG:
5925 if (subreg_lowpart_p (varop)
5926 /* We can ignore the effect this SUBREG if it narrows the mode
5927 or, on machines where byte operations extend, if the
5928 constant masks to zero all the bits the mode doesn't have. */
5929 && ((GET_MODE_SIZE (GET_MODE (varop))
5930 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop))))
5931#ifdef BYTE_LOADS_EXTEND
5932 || (0 == (constop
5933 & GET_MODE_MASK (GET_MODE (varop))
5934 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (varop)))))
5935#endif
5936 ))
5937 {
5938 varop = SUBREG_REG (varop);
5939 continue;
5940 }
5941 break;
5942
5943 case ZERO_EXTRACT:
5944 case SIGN_EXTRACT:
5945 case ZERO_EXTEND:
5946 case SIGN_EXTEND:
5947 /* Try to expand these into a series of shifts and then work
5948 with that result. If we can't, for example, if the extract
5949 isn't at a fixed position, give up. */
5950 temp = expand_compound_operation (varop);
5951 if (temp != varop)
5952 {
5953 varop = temp;
5954 continue;
5955 }
5956 break;
5957
5958 case AND:
5959 if (GET_CODE (XEXP (varop, 1)) == CONST_INT)
5960 {
5961 constop &= INTVAL (XEXP (varop, 1));
5962 varop = XEXP (varop, 0);
5963 continue;
5964 }
5965 break;
5966
5967 case IOR:
5968 case XOR:
5969 /* If VAROP is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
5970 LSHIFT so we end up with an (and (lshiftrt (ior ...) ...) ...)
5971 operation which may be a bitfield extraction. Ensure
5972 that the constant we form is not wider than the mode of
5973 VAROP. */
5974
5975 if (GET_CODE (XEXP (varop, 0)) == LSHIFTRT
5976 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5977 && INTVAL (XEXP (XEXP (varop, 0), 1)) >= 0
5978 && INTVAL (XEXP (XEXP (varop, 0), 1)) < HOST_BITS_PER_WIDE_INT
5979 && GET_CODE (XEXP (varop, 1)) == CONST_INT
5980 && ((INTVAL (XEXP (XEXP (varop, 0), 1))
5981 + floor_log2 (INTVAL (XEXP (varop, 1))))
5982 < GET_MODE_BITSIZE (GET_MODE (varop)))
5983 && (INTVAL (XEXP (varop, 1))
5984 & ~ nonzero_bits (XEXP (varop, 0), GET_MODE (varop)) == 0))
5985 {
5986 temp = GEN_INT ((INTVAL (XEXP (varop, 1)) & constop)
5987 << INTVAL (XEXP (XEXP (varop, 0), 1)));
5988 temp = gen_binary (GET_CODE (varop), GET_MODE (varop),
5989 XEXP (XEXP (varop, 0), 0), temp);
5990 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5991 temp, XEXP (varop, 1));
5992 continue;
5993 }
5994
5995 /* Apply the AND to both branches of the IOR or XOR, then try to
5996 apply the distributive law. This may eliminate operations
5997 if either branch can be simplified because of the AND.
5998 It may also make some cases more complex, but those cases
5999 probably won't match a pattern either with or without this. */
6000 return
6001 gen_lowpart_for_combine
6002 (mode, apply_distributive_law
6003 (gen_rtx_combine
6004 (GET_CODE (varop), GET_MODE (varop),
6005 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
6006 XEXP (varop, 0), constop),
6007 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
6008 XEXP (varop, 1), constop))));
6009
6010 case NOT:
6011 /* (and (not FOO)) is (and (xor FOO CONST)), so if FOO is an
6012 LSHIFTRT, we can do the same as above. Ensure that the constant
6013 we form is not wider than the mode of VAROP. */
6014
6015 if (GET_CODE (XEXP (varop, 0)) == LSHIFTRT
6016 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
6017 && INTVAL (XEXP (XEXP (varop, 0), 1)) >= 0
6018 && (INTVAL (XEXP (XEXP (varop, 0), 1)) + floor_log2 (constop)
6019 < GET_MODE_BITSIZE (GET_MODE (varop)))
6020 && INTVAL (XEXP (XEXP (varop, 0), 1)) < HOST_BITS_PER_WIDE_INT)
6021 {
6022 temp = GEN_INT (constop << INTVAL (XEXP (XEXP (varop, 0), 1)));
6023 temp = gen_binary (XOR, GET_MODE (varop),
6024 XEXP (XEXP (varop, 0), 0), temp);
6025 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
6026 temp, XEXP (XEXP (varop, 0), 1));
6027 continue;
6028 }
6029 break;
6030
6031 case ASHIFTRT:
6032 /* If we are just looking for the sign bit, we don't need this
6033 shift at all, even if it has a variable count. */
6034 if (constop == ((HOST_WIDE_INT) 1
6035 << (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)))
6036 {
6037 varop = XEXP (varop, 0);
6038 continue;
6039 }
6040
6041 /* If this is a shift by a constant, get a mask that contains
6042 those bits that are not copies of the sign bit. We then have
6043 two cases: If CONSTOP only includes those bits, this can be
6044 a logical shift, which may allow simplifications. If CONSTOP
6045 is a single-bit field not within those bits, we are requesting
6046 a copy of the sign bit and hence can shift the sign bit to
6047 the appropriate location. */
6048 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6049 && INTVAL (XEXP (varop, 1)) >= 0
6050 && INTVAL (XEXP (varop, 1)) < HOST_BITS_PER_WIDE_INT)
6051 {
6052 int i = -1;
6053
6054 nonzero = GET_MODE_MASK (GET_MODE (varop));
6055 nonzero >>= INTVAL (XEXP (varop, 1));
6056
6057 if ((constop & ~ nonzero) == 0
6058 || (i = exact_log2 (constop)) >= 0)
6059 {
6060 varop = simplify_shift_const
6061 (varop, LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
6062 i < 0 ? INTVAL (XEXP (varop, 1))
6063 : GET_MODE_BITSIZE (GET_MODE (varop)) - 1 - i);
6064 if (GET_CODE (varop) != ASHIFTRT)
6065 continue;
6066 }
6067 }
6068
6069 /* If our mask is 1, convert this to a LSHIFTRT. This can be done
6070 even if the shift count isn't a constant. */
6071 if (constop == 1)
6072 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
6073 XEXP (varop, 0), XEXP (varop, 1));
6074 break;
6075
6076 case LSHIFTRT:
6077 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
6078 shift and AND produces only copies of the sign bit (C2 is one less
6079 than a power of two), we can do this with just a shift. */
6080
6081 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6082 && ((INTVAL (XEXP (varop, 1))
6083 + num_sign_bit_copies (XEXP (varop, 0),
6084 GET_MODE (XEXP (varop, 0))))
6085 >= GET_MODE_BITSIZE (GET_MODE (varop)))
6086 && exact_log2 (constop + 1) >= 0)
6087 varop
6088 = gen_rtx_combine (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
6089 GEN_INT (GET_MODE_BITSIZE (GET_MODE (varop))
6090 - exact_log2 (constop + 1)));
6091 break;
6092
6093 case NE:
6094 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is
6095 included in STORE_FLAG_VALUE and FOO has no bits that might be
6096 nonzero not in CONST. */
6097 if ((constop & ~ STORE_FLAG_VALUE) == 0
6098 && XEXP (varop, 0) == const0_rtx
6099 && (nonzero_bits (XEXP (varop, 0), mode) & ~ constop) == 0)
6100 {
6101 varop = XEXP (varop, 0);
6102 continue;
6103 }
6104 break;
6105
6106 case PLUS:
6107 /* In (and (plus FOO C1) M), if M is a mask that just turns off
6108 low-order bits (as in an alignment operation) and FOO is already
6109 aligned to that boundary, we can convert remove this AND
6110 and possibly the PLUS if it is now adding zero. */
6111 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6112 && exact_log2 (-constop) >= 0
6113 && (nonzero_bits (XEXP (varop, 0), mode) & ~ constop) == 0)
6114 {
6115 varop = plus_constant (XEXP (varop, 0),
6116 INTVAL (XEXP (varop, 1)) & constop);
6117 constop = ~0;
6118 break;
6119 }
6120
6121 /* ... fall through ... */
6122
6123 case MINUS:
6124 /* In (and (plus (and FOO M1) BAR) M2), if M1 and M2 are one
6125 less than powers of two and M2 is narrower than M1, we can
6126 eliminate the inner AND. This occurs when incrementing
6127 bit fields. */
6128
6129 if (GET_CODE (XEXP (varop, 0)) == ZERO_EXTRACT
6130 || GET_CODE (XEXP (varop, 0)) == ZERO_EXTEND)
6131 SUBST (XEXP (varop, 0),
6132 expand_compound_operation (XEXP (varop, 0)));
6133
6134 if (GET_CODE (XEXP (varop, 0)) == AND
6135 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
6136 && exact_log2 (constop + 1) >= 0
6137 && exact_log2 (INTVAL (XEXP (XEXP (varop, 0), 1)) + 1) >= 0
6138 && (~ INTVAL (XEXP (XEXP (varop, 0), 1)) & constop) == 0)
6139 SUBST (XEXP (varop, 0), XEXP (XEXP (varop, 0), 0));
6140 break;
6141 }
6142
6143 break;
6144 }
6145
6146 /* If we have reached a constant, this whole thing is constant. */
6147 if (GET_CODE (varop) == CONST_INT)
6148 return GEN_INT (constop & INTVAL (varop));
6149
6150 /* See what bits may be nonzero in VAROP. Unlike the general case of
6151 a call to nonzero_bits, here we don't care about bits outside
6152 MODE. */
6153
6154 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
6155
6156 /* Turn off all bits in the constant that are known to already be zero.
6157 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
6158 which is tested below. */
6159
6160 constop &= nonzero;
6161
6162 /* If we don't have any bits left, return zero. */
6163 if (constop == 0)
6164 return const0_rtx;
6165
6166 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
6167 if we already had one (just check for the simplest cases). */
6168 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
6169 && GET_MODE (XEXP (x, 0)) == mode
6170 && SUBREG_REG (XEXP (x, 0)) == varop)
6171 varop = XEXP (x, 0);
6172 else
6173 varop = gen_lowpart_for_combine (mode, varop);
6174
6175 /* If we can't make the SUBREG, try to return what we were given. */
6176 if (GET_CODE (varop) == CLOBBER)
6177 return x ? x : varop;
6178
6179 /* If we are only masking insignificant bits, return VAROP. */
6180 if (constop == nonzero)
6181 x = varop;
6182
6183 /* Otherwise, return an AND. See how much, if any, of X we can use. */
6184 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
6185 x = gen_rtx_combine (AND, mode, varop, GEN_INT (constop));
6186
6187 else
6188 {
6189 if (GET_CODE (XEXP (x, 1)) != CONST_INT
6190 || INTVAL (XEXP (x, 1)) != constop)
6191 SUBST (XEXP (x, 1), GEN_INT (constop));
6192
6193 SUBST (XEXP (x, 0), varop);
6194 }
6195
6196 return x;
6197}
6198\f
6199/* Given an expression, X, compute which bits in X can be non-zero.
6200 We don't care about bits outside of those defined in MODE.
6201
6202 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
6203 a shift, AND, or zero_extract, we can do better. */
6204
6205static unsigned HOST_WIDE_INT
6206nonzero_bits (x, mode)
6207 rtx x;
6208 enum machine_mode mode;
6209{
6210 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
6211 unsigned HOST_WIDE_INT inner_nz;
6212 enum rtx_code code;
6213 int mode_width = GET_MODE_BITSIZE (mode);
6214 rtx tem;
6215
6216 /* If X is wider than MODE, use its mode instead. */
6217 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
6218 {
6219 mode = GET_MODE (x);
6220 nonzero = GET_MODE_MASK (mode);
6221 mode_width = GET_MODE_BITSIZE (mode);
6222 }
6223
6224 if (mode_width > HOST_BITS_PER_WIDE_INT)
6225 /* Our only callers in this case look for single bit values. So
6226 just return the mode mask. Those tests will then be false. */
6227 return nonzero;
6228
6229 code = GET_CODE (x);
6230 switch (code)
6231 {
6232 case REG:
6233#ifdef STACK_BOUNDARY
6234 /* If this is the stack pointer, we may know something about its
6235 alignment. If PUSH_ROUNDING is defined, it is possible for the
6236 stack to be momentarily aligned only to that amount, so we pick
6237 the least alignment. */
6238
6239 if (x == stack_pointer_rtx)
6240 {
6241 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
6242
6243#ifdef PUSH_ROUNDING
6244 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
6245#endif
6246
6247 return nonzero & ~ (sp_alignment - 1);
6248 }
6249#endif
6250
6251 /* If X is a register whose nonzero bits value is current, use it.
6252 Otherwise, if X is a register whose value we can find, use that
6253 value. Otherwise, use the previously-computed global nonzero bits
6254 for this register. */
6255
6256 if (reg_last_set_value[REGNO (x)] != 0
6257 && reg_last_set_mode[REGNO (x)] == mode
6258 && (reg_n_sets[REGNO (x)] == 1
6259 || reg_last_set_label[REGNO (x)] == label_tick)
6260 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
6261 return reg_last_set_nonzero_bits[REGNO (x)];
6262
6263 tem = get_last_value (x);
6264
6265 if (tem)
6266 {
6267#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
6268 /* If X is narrower than MODE and TEM is a non-negative
6269 constant that would appear negative in the mode of X,
6270 sign-extend it for use in reg_nonzero_bits because some
6271 machines (maybe most) will actually do the sign-extension
6272 and this is the conservative approach.
6273
6274 ??? For 2.5, try to tighten up the MD files in this regard
6275 instead of this kludge. */
6276
6277 if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
6278 && GET_CODE (tem) == CONST_INT
6279 && INTVAL (tem) > 0
6280 && 0 != (INTVAL (tem)
6281 & ((HOST_WIDE_INT) 1
6282 << GET_MODE_BITSIZE (GET_MODE (x)))))
6283 tem = GEN_INT (INTVAL (tem)
6284 | ((HOST_WIDE_INT) (-1)
6285 << GET_MODE_BITSIZE (GET_MODE (x))));
6286#endif
6287 return nonzero_bits (tem, mode);
6288 }
6289 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
6290 return reg_nonzero_bits[REGNO (x)] & nonzero;
6291 else
6292 return nonzero;
6293
6294 case CONST_INT:
6295#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
6296 /* If X is negative in MODE, sign-extend the value. */
6297 if (INTVAL (x) > 0
6298 && 0 != (INTVAL (x)
6299 & ((HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (GET_MODE (x)))))
6300 return (INTVAL (x)
6301 | ((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (GET_MODE (x))));
6302#endif
6303
6304 return INTVAL (x);
6305
6306#ifdef BYTE_LOADS_ZERO_EXTEND
6307 case MEM:
6308 /* In many, if not most, RISC machines, reading a byte from memory
6309 zeros the rest of the register. Noticing that fact saves a lot
6310 of extra zero-extends. */
6311 nonzero &= GET_MODE_MASK (GET_MODE (x));
6312 break;
6313#endif
6314
6315#if STORE_FLAG_VALUE == 1
6316 case EQ: case NE:
6317 case GT: case GTU:
6318 case LT: case LTU:
6319 case GE: case GEU:
6320 case LE: case LEU:
6321
6322 if (GET_MODE_CLASS (mode) == MODE_INT)
6323 nonzero = 1;
6324
6325 /* A comparison operation only sets the bits given by its mode. The
6326 rest are set undefined. */
6327 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
6328 nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
6329 break;
6330#endif
6331
6332 case NEG:
6333 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6334 == GET_MODE_BITSIZE (GET_MODE (x)))
6335 nonzero = 1;
6336
6337 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
6338 nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
6339 break;
6340
6341 case ABS:
6342 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6343 == GET_MODE_BITSIZE (GET_MODE (x)))
6344 nonzero = 1;
6345 break;
6346
6347 case TRUNCATE:
6348 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
6349 break;
6350
6351 case ZERO_EXTEND:
6352 nonzero &= nonzero_bits (XEXP (x, 0), mode);
6353 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
6354 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
6355 break;
6356
6357 case SIGN_EXTEND:
6358 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
6359 Otherwise, show all the bits in the outer mode but not the inner
6360 may be non-zero. */
6361 inner_nz = nonzero_bits (XEXP (x, 0), mode);
6362 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
6363 {
6364 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
6365 if (inner_nz &
6366 (((HOST_WIDE_INT) 1
6367 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
6368 inner_nz |= (GET_MODE_MASK (mode)
6369 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
6370 }
6371
6372 nonzero &= inner_nz;
6373 break;
6374
6375 case AND:
6376 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
6377 & nonzero_bits (XEXP (x, 1), mode));
6378 break;
6379
6380 case XOR: case IOR:
6381 case UMIN: case UMAX: case SMIN: case SMAX:
6382 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
6383 | nonzero_bits (XEXP (x, 1), mode));
6384 break;
6385
6386 case PLUS: case MINUS:
6387 case MULT:
6388 case DIV: case UDIV:
6389 case MOD: case UMOD:
6390 /* We can apply the rules of arithmetic to compute the number of
6391 high- and low-order zero bits of these operations. We start by
6392 computing the width (position of the highest-order non-zero bit)
6393 and the number of low-order zero bits for each value. */
6394 {
6395 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
6396 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
6397 int width0 = floor_log2 (nz0) + 1;
6398 int width1 = floor_log2 (nz1) + 1;
6399 int low0 = floor_log2 (nz0 & -nz0);
6400 int low1 = floor_log2 (nz1 & -nz1);
6401 int op0_maybe_minusp = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
6402 int op1_maybe_minusp = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
6403 int result_width = mode_width;
6404 int result_low = 0;
6405
6406 switch (code)
6407 {
6408 case PLUS:
6409 result_width = MAX (width0, width1) + 1;
6410 result_low = MIN (low0, low1);
6411 break;
6412 case MINUS:
6413 result_low = MIN (low0, low1);
6414 break;
6415 case MULT:
6416 result_width = width0 + width1;
6417 result_low = low0 + low1;
6418 break;
6419 case DIV:
6420 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6421 result_width = width0;
6422 break;
6423 case UDIV:
6424 result_width = width0;
6425 break;
6426 case MOD:
6427 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6428 result_width = MIN (width0, width1);
6429 result_low = MIN (low0, low1);
6430 break;
6431 case UMOD:
6432 result_width = MIN (width0, width1);
6433 result_low = MIN (low0, low1);
6434 break;
6435 }
6436
6437 if (result_width < mode_width)
6438 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
6439
6440 if (result_low > 0)
6441 nonzero &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
6442 }
6443 break;
6444
6445 case ZERO_EXTRACT:
6446 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6447 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6448 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
6449 break;
6450
6451 case SUBREG:
6452 /* If this is a SUBREG formed for a promoted variable that has
6453 been zero-extended, we know that at least the high-order bits
6454 are zero, though others might be too. */
6455
6456 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
6457 nonzero = (GET_MODE_MASK (GET_MODE (x))
6458 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
6459
6460 /* If the inner mode is a single word for both the host and target
6461 machines, we can compute this from which bits of the inner
6462 object might be nonzero. */
6463 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
6464 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
6465 <= HOST_BITS_PER_WIDE_INT))
6466 {
6467 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
6468#ifndef BYTE_LOADS_EXTEND
6469 /* On many CISC machines, accessing an object in a wider mode
6470 causes the high-order bits to become undefined. So they are
6471 not known to be zero. */
6472 if (GET_MODE_SIZE (GET_MODE (x))
6473 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6474 nonzero |= (GET_MODE_MASK (GET_MODE (x))
6475 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
6476#endif
6477 }
6478 break;
6479
6480 case ASHIFTRT:
6481 case LSHIFTRT:
6482 case ASHIFT:
6483 case LSHIFT:
6484 case ROTATE:
6485 /* The nonzero bits are in two classes: any bits within MODE
6486 that aren't in GET_MODE (x) are always significant. The rest of the
6487 nonzero bits are those that are significant in the operand of
6488 the shift when shifted the appropriate number of bits. This
6489 shows that high-order bits are cleared by the right shift and
6490 low-order bits by left shifts. */
6491 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6492 && INTVAL (XEXP (x, 1)) >= 0
6493 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6494 {
6495 enum machine_mode inner_mode = GET_MODE (x);
6496 int width = GET_MODE_BITSIZE (inner_mode);
6497 int count = INTVAL (XEXP (x, 1));
6498 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
6499 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
6500 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
6501 unsigned HOST_WIDE_INT outer = 0;
6502
6503 if (mode_width > width)
6504 outer = (op_nonzero & nonzero & ~ mode_mask);
6505
6506 if (code == LSHIFTRT)
6507 inner >>= count;
6508 else if (code == ASHIFTRT)
6509 {
6510 inner >>= count;
6511
6512 /* If the sign bit may have been nonzero before the shift, we
6513 need to mark all the places it could have been copied to
6514 by the shift as possibly nonzero. */
6515 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
6516 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
6517 }
6518 else if (code == LSHIFT || code == ASHIFT)
6519 inner <<= count;
6520 else
6521 inner = ((inner << (count % width)
6522 | (inner >> (width - (count % width)))) & mode_mask);
6523
6524 nonzero &= (outer | inner);
6525 }
6526 break;
6527
6528 case FFS:
6529 /* This is at most the number of bits in the mode. */
6530 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
6531 break;
6532
6533 case IF_THEN_ELSE:
6534 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
6535 | nonzero_bits (XEXP (x, 2), mode));
6536 break;
6537 }
6538
6539 return nonzero;
6540}
6541\f
6542/* Return the number of bits at the high-order end of X that are known to
6543 be equal to the sign bit. This number will always be between 1 and
6544 the number of bits in the mode of X. MODE is the mode to be used
6545 if X is VOIDmode. */
6546
6547static int
6548num_sign_bit_copies (x, mode)
6549 rtx x;
6550 enum machine_mode mode;
6551{
6552 enum rtx_code code = GET_CODE (x);
6553 int bitwidth;
6554 int num0, num1, result;
6555 unsigned HOST_WIDE_INT nonzero;
6556 rtx tem;
6557
6558 /* If we weren't given a mode, use the mode of X. If the mode is still
6559 VOIDmode, we don't know anything. */
6560
6561 if (mode == VOIDmode)
6562 mode = GET_MODE (x);
6563
6564 if (mode == VOIDmode)
6565 return 1;
6566
6567 bitwidth = GET_MODE_BITSIZE (mode);
6568
6569 switch (code)
6570 {
6571 case REG:
6572
6573 if (reg_last_set_value[REGNO (x)] != 0
6574 && reg_last_set_mode[REGNO (x)] == mode
6575 && (reg_n_sets[REGNO (x)] == 1
6576 || reg_last_set_label[REGNO (x)] == label_tick)
6577 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
6578 return reg_last_set_sign_bit_copies[REGNO (x)];
6579
6580 tem = get_last_value (x);
6581 if (tem != 0)
6582 return num_sign_bit_copies (tem, mode);
6583
6584 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
6585 return reg_sign_bit_copies[REGNO (x)];
6586 break;
6587
6588#ifdef BYTE_LOADS_SIGN_EXTEND
6589 case MEM:
6590 /* Some RISC machines sign-extend all loads of smaller than a word. */
6591 return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
6592#endif
6593
6594 case CONST_INT:
6595 /* If the constant is negative, take its 1's complement and remask.
6596 Then see how many zero bits we have. */
6597 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
6598 if (bitwidth <= HOST_BITS_PER_WIDE_INT
6599 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6600 nonzero = (~ nonzero) & GET_MODE_MASK (mode);
6601
6602 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
6603
6604 case SUBREG:
6605 /* If this is a SUBREG for a promoted object that is sign-extended
6606 and we are looking at it in a wider mode, we know that at least the
6607 high-order bits are known to be sign bit copies. */
6608
6609 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
6610 return MAX (bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1,
6611 num_sign_bit_copies (SUBREG_REG (x), mode));
6612
6613 /* For a smaller object, just ignore the high bits. */
6614 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
6615 {
6616 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
6617 return MAX (1, (num0
6618 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
6619 - bitwidth)));
6620 }
6621
6622#ifdef BYTE_LOADS_EXTEND
6623 /* For paradoxical SUBREGs, just look inside since, on machines with
6624 one of these defined, we assume that operations are actually
6625 performed on the full register. Note that we are passing MODE
6626 to the recursive call, so the number of sign bit copies will
6627 remain relative to that mode, not the inner mode. */
6628
6629 if (GET_MODE_SIZE (GET_MODE (x))
6630 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6631 return num_sign_bit_copies (SUBREG_REG (x), mode);
6632#endif
6633
6634 break;
6635
6636 case SIGN_EXTRACT:
6637 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
6638 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
6639 break;
6640
6641 case SIGN_EXTEND:
6642 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6643 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
6644
6645 case TRUNCATE:
6646 /* For a smaller object, just ignore the high bits. */
6647 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
6648 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6649 - bitwidth)));
6650
6651 case NOT:
6652 return num_sign_bit_copies (XEXP (x, 0), mode);
6653
6654 case ROTATE: case ROTATERT:
6655 /* If we are rotating left by a number of bits less than the number
6656 of sign bit copies, we can just subtract that amount from the
6657 number. */
6658 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6659 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
6660 {
6661 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6662 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
6663 : bitwidth - INTVAL (XEXP (x, 1))));
6664 }
6665 break;
6666
6667 case NEG:
6668 /* In general, this subtracts one sign bit copy. But if the value
6669 is known to be positive, the number of sign bit copies is the
6670 same as that of the input. Finally, if the input has just one bit
6671 that might be nonzero, all the bits are copies of the sign bit. */
6672 nonzero = nonzero_bits (XEXP (x, 0), mode);
6673 if (nonzero == 1)
6674 return bitwidth;
6675
6676 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6677 if (num0 > 1
6678 && bitwidth <= HOST_BITS_PER_WIDE_INT
6679 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
6680 num0--;
6681
6682 return num0;
6683
6684 case IOR: case AND: case XOR:
6685 case SMIN: case SMAX: case UMIN: case UMAX:
6686 /* Logical operations will preserve the number of sign-bit copies.
6687 MIN and MAX operations always return one of the operands. */
6688 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6689 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6690 return MIN (num0, num1);
6691
6692 case PLUS: case MINUS:
6693 /* For addition and subtraction, we can have a 1-bit carry. However,
6694 if we are subtracting 1 from a positive number, there will not
6695 be such a carry. Furthermore, if the positive number is known to
6696 be 0 or 1, we know the result is either -1 or 0. */
6697
6698 if (code == PLUS && XEXP (x, 1) == constm1_rtx
6699 && bitwidth <= HOST_BITS_PER_WIDE_INT)
6700 {
6701 nonzero = nonzero_bits (XEXP (x, 0), mode);
6702 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
6703 return (nonzero == 1 || nonzero == 0 ? bitwidth
6704 : bitwidth - floor_log2 (nonzero) - 1);
6705 }
6706
6707 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6708 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6709 return MAX (1, MIN (num0, num1) - 1);
6710
6711 case MULT:
6712 /* The number of bits of the product is the sum of the number of
6713 bits of both terms. However, unless one of the terms if known
6714 to be positive, we must allow for an additional bit since negating
6715 a negative number can remove one sign bit copy. */
6716
6717 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6718 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6719
6720 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
6721 if (result > 0
6722 && bitwidth <= HOST_BITS_PER_WIDE_INT
6723 && ((nonzero_bits (XEXP (x, 0), mode)
6724 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6725 && (nonzero_bits (XEXP (x, 1), mode)
6726 & ((HOST_WIDE_INT) 1 << (bitwidth - 1)) != 0))
6727 result--;
6728
6729 return MAX (1, result);
6730
6731 case UDIV:
6732 /* The result must be <= the first operand. */
6733 return num_sign_bit_copies (XEXP (x, 0), mode);
6734
6735 case UMOD:
6736 /* The result must be <= the scond operand. */
6737 return num_sign_bit_copies (XEXP (x, 1), mode);
6738
6739 case DIV:
6740 /* Similar to unsigned division, except that we have to worry about
6741 the case where the divisor is negative, in which case we have
6742 to add 1. */
6743 result = num_sign_bit_copies (XEXP (x, 0), mode);
6744 if (result > 1
6745 && bitwidth <= HOST_BITS_PER_WIDE_INT
6746 && (nonzero_bits (XEXP (x, 1), mode)
6747 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6748 result --;
6749
6750 return result;
6751
6752 case MOD:
6753 result = num_sign_bit_copies (XEXP (x, 1), mode);
6754 if (result > 1
6755 && bitwidth <= HOST_BITS_PER_WIDE_INT
6756 && (nonzero_bits (XEXP (x, 1), mode)
6757 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6758 result --;
6759
6760 return result;
6761
6762 case ASHIFTRT:
6763 /* Shifts by a constant add to the number of bits equal to the
6764 sign bit. */
6765 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6766 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6767 && INTVAL (XEXP (x, 1)) > 0)
6768 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
6769
6770 return num0;
6771
6772 case ASHIFT:
6773 case LSHIFT:
6774 /* Left shifts destroy copies. */
6775 if (GET_CODE (XEXP (x, 1)) != CONST_INT
6776 || INTVAL (XEXP (x, 1)) < 0
6777 || INTVAL (XEXP (x, 1)) >= bitwidth)
6778 return 1;
6779
6780 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6781 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
6782
6783 case IF_THEN_ELSE:
6784 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
6785 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
6786 return MIN (num0, num1);
6787
6788#if STORE_FLAG_VALUE == -1
6789 case EQ: case NE: case GE: case GT: case LE: case LT:
6790 case GEU: case GTU: case LEU: case LTU:
6791 return bitwidth;
6792#endif
6793 }
6794
6795 /* If we haven't been able to figure it out by one of the above rules,
6796 see if some of the high-order bits are known to be zero. If so,
6797 count those bits and return one less than that amount. If we can't
6798 safely compute the mask for this mode, always return BITWIDTH. */
6799
6800 if (bitwidth > HOST_BITS_PER_WIDE_INT)
6801 return 1;
6802
6803 nonzero = nonzero_bits (x, mode);
2a5f595d 6804 return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
9bf86ebb
PR
6805 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
6806}
6807\f
6808/* Return the number of "extended" bits there are in X, when interpreted
6809 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
6810 unsigned quantities, this is the number of high-order zero bits.
6811 For signed quantities, this is the number of copies of the sign bit
6812 minus 1. In both case, this function returns the number of "spare"
6813 bits. For example, if two quantities for which this function returns
6814 at least 1 are added, the addition is known not to overflow.
6815
6816 This function will always return 0 unless called during combine, which
6817 implies that it must be called from a define_split. */
6818
6819int
6820extended_count (x, mode, unsignedp)
6821 rtx x;
6822 enum machine_mode mode;
6823 int unsignedp;
6824{
6825 if (nonzero_sign_valid == 0)
6826 return 0;
6827
6828 return (unsignedp
6829 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6830 && (GET_MODE_BITSIZE (mode) - 1
6831 - floor_log2 (nonzero_bits (x, mode))))
6832 : num_sign_bit_copies (x, mode) - 1);
6833}
6834\f
6835/* This function is called from `simplify_shift_const' to merge two
6836 outer operations. Specifically, we have already found that we need
6837 to perform operation *POP0 with constant *PCONST0 at the outermost
6838 position. We would now like to also perform OP1 with constant CONST1
6839 (with *POP0 being done last).
6840
6841 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
6842 the resulting operation. *PCOMP_P is set to 1 if we would need to
6843 complement the innermost operand, otherwise it is unchanged.
6844
6845 MODE is the mode in which the operation will be done. No bits outside
6846 the width of this mode matter. It is assumed that the width of this mode
6847 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
6848
6849 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
6850 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
6851 result is simply *PCONST0.
6852
6853 If the resulting operation cannot be expressed as one operation, we
6854 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
6855
6856static int
6857merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
6858 enum rtx_code *pop0;
6859 HOST_WIDE_INT *pconst0;
6860 enum rtx_code op1;
6861 HOST_WIDE_INT const1;
6862 enum machine_mode mode;
6863 int *pcomp_p;
6864{
6865 enum rtx_code op0 = *pop0;
6866 HOST_WIDE_INT const0 = *pconst0;
6867
6868 const0 &= GET_MODE_MASK (mode);
6869 const1 &= GET_MODE_MASK (mode);
6870
6871 /* If OP0 is an AND, clear unimportant bits in CONST1. */
6872 if (op0 == AND)
6873 const1 &= const0;
6874
6875 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
6876 if OP0 is SET. */
6877
6878 if (op1 == NIL || op0 == SET)
6879 return 1;
6880
6881 else if (op0 == NIL)
6882 op0 = op1, const0 = const1;
6883
6884 else if (op0 == op1)
6885 {
6886 switch (op0)
6887 {
6888 case AND:
6889 const0 &= const1;
6890 break;
6891 case IOR:
6892 const0 |= const1;
6893 break;
6894 case XOR:
6895 const0 ^= const1;
6896 break;
6897 case PLUS:
6898 const0 += const1;
6899 break;
6900 case NEG:
6901 op0 = NIL;
6902 break;
6903 }
6904 }
6905
6906 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
6907 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
6908 return 0;
6909
6910 /* If the two constants aren't the same, we can't do anything. The
6911 remaining six cases can all be done. */
6912 else if (const0 != const1)
6913 return 0;
6914
6915 else
6916 switch (op0)
6917 {
6918 case IOR:
6919 if (op1 == AND)
6920 /* (a & b) | b == b */
6921 op0 = SET;
6922 else /* op1 == XOR */
6923 /* (a ^ b) | b == a | b */
6924 ;
6925 break;
6926
6927 case XOR:
6928 if (op1 == AND)
6929 /* (a & b) ^ b == (~a) & b */
6930 op0 = AND, *pcomp_p = 1;
6931 else /* op1 == IOR */
6932 /* (a | b) ^ b == a & ~b */
6933 op0 = AND, *pconst0 = ~ const0;
6934 break;
6935
6936 case AND:
6937 if (op1 == IOR)
6938 /* (a | b) & b == b */
6939 op0 = SET;
6940 else /* op1 == XOR */
6941 /* (a ^ b) & b) == (~a) & b */
6942 *pcomp_p = 1;
6943 break;
6944 }
6945
6946 /* Check for NO-OP cases. */
6947 const0 &= GET_MODE_MASK (mode);
6948 if (const0 == 0
6949 && (op0 == IOR || op0 == XOR || op0 == PLUS))
6950 op0 = NIL;
6951 else if (const0 == 0 && op0 == AND)
6952 op0 = SET;
6953 else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
6954 op0 = NIL;
6955
6956 *pop0 = op0;
6957 *pconst0 = const0;
6958
6959 return 1;
6960}
6961\f
6962/* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
6963 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
6964 that we started with.
6965
6966 The shift is normally computed in the widest mode we find in VAROP, as
6967 long as it isn't a different number of words than RESULT_MODE. Exceptions
6968 are ASHIFTRT and ROTATE, which are always done in their original mode, */
6969
6970static rtx
6971simplify_shift_const (x, code, result_mode, varop, count)
6972 rtx x;
6973 enum rtx_code code;
6974 enum machine_mode result_mode;
6975 rtx varop;
6976 int count;
6977{
6978 enum rtx_code orig_code = code;
6979 int orig_count = count;
6980 enum machine_mode mode = result_mode;
6981 enum machine_mode shift_mode, tmode;
6982 int mode_words
6983 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
6984 /* We form (outer_op (code varop count) (outer_const)). */
6985 enum rtx_code outer_op = NIL;
6986 HOST_WIDE_INT outer_const;
6987 rtx const_rtx;
6988 int complement_p = 0;
6989 rtx new;
6990
6991 /* If we were given an invalid count, don't do anything except exactly
6992 what was requested. */
6993
6994 if (count < 0 || count > GET_MODE_BITSIZE (mode))
6995 {
6996 if (x)
6997 return x;
6998
6999 return gen_rtx (code, mode, varop, GEN_INT (count));
7000 }
7001
7002 /* Unless one of the branches of the `if' in this loop does a `continue',
7003 we will `break' the loop after the `if'. */
7004
7005 while (count != 0)
7006 {
7007 /* If we have an operand of (clobber (const_int 0)), just return that
7008 value. */
7009 if (GET_CODE (varop) == CLOBBER)
7010 return varop;
7011
7012 /* If we discovered we had to complement VAROP, leave. Making a NOT
7013 here would cause an infinite loop. */
7014 if (complement_p)
7015 break;
7016
7017 /* Convert ROTATETRT to ROTATE. */
7018 if (code == ROTATERT)
7019 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
7020
7021 /* Canonicalize LSHIFT to ASHIFT. */
7022 if (code == LSHIFT)
7023 code = ASHIFT;
7024
7025 /* We need to determine what mode we will do the shift in. If the
7026 shift is a ASHIFTRT or ROTATE, we must always do it in the mode it
7027 was originally done in. Otherwise, we can do it in MODE, the widest
7028 mode encountered. */
7029 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
7030
7031 /* Handle cases where the count is greater than the size of the mode
7032 minus 1. For ASHIFT, use the size minus one as the count (this can
7033 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
7034 take the count modulo the size. For other shifts, the result is
7035 zero.
7036
7037 Since these shifts are being produced by the compiler by combining
7038 multiple operations, each of which are defined, we know what the
7039 result is supposed to be. */
7040
7041 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
7042 {
7043 if (code == ASHIFTRT)
7044 count = GET_MODE_BITSIZE (shift_mode) - 1;
7045 else if (code == ROTATE || code == ROTATERT)
7046 count %= GET_MODE_BITSIZE (shift_mode);
7047 else
7048 {
7049 /* We can't simply return zero because there may be an
7050 outer op. */
7051 varop = const0_rtx;
7052 count = 0;
7053 break;
7054 }
7055 }
7056
7057 /* Negative counts are invalid and should not have been made (a
7058 programmer-specified negative count should have been handled
7059 above). */
7060 else if (count < 0)
7061 abort ();
7062
7063 /* An arithmetic right shift of a quantity known to be -1 or 0
7064 is a no-op. */
7065 if (code == ASHIFTRT
7066 && (num_sign_bit_copies (varop, shift_mode)
7067 == GET_MODE_BITSIZE (shift_mode)))
7068 {
7069 count = 0;
7070 break;
7071 }
7072
7073 /* If we are doing an arithmetic right shift and discarding all but
7074 the sign bit copies, this is equivalent to doing a shift by the
7075 bitsize minus one. Convert it into that shift because it will often
7076 allow other simplifications. */
7077
7078 if (code == ASHIFTRT
7079 && (count + num_sign_bit_copies (varop, shift_mode)
7080 >= GET_MODE_BITSIZE (shift_mode)))
7081 count = GET_MODE_BITSIZE (shift_mode) - 1;
7082
7083 /* We simplify the tests below and elsewhere by converting
7084 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
7085 `make_compound_operation' will convert it to a ASHIFTRT for
7086 those machines (such as Vax) that don't have a LSHIFTRT. */
7087 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
7088 && code == ASHIFTRT
7089 && ((nonzero_bits (varop, shift_mode)
7090 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
7091 == 0))
7092 code = LSHIFTRT;
7093
7094 switch (GET_CODE (varop))
7095 {
7096 case SIGN_EXTEND:
7097 case ZERO_EXTEND:
7098 case SIGN_EXTRACT:
7099 case ZERO_EXTRACT:
7100 new = expand_compound_operation (varop);
7101 if (new != varop)
7102 {
7103 varop = new;
7104 continue;
7105 }
7106 break;
7107
7108 case MEM:
7109 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
7110 minus the width of a smaller mode, we can do this with a
7111 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
7112 if ((code == ASHIFTRT || code == LSHIFTRT)
7113 && ! mode_dependent_address_p (XEXP (varop, 0))
7114 && ! MEM_VOLATILE_P (varop)
7115 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
7116 MODE_INT, 1)) != BLKmode)
7117 {
7118#if BYTES_BIG_ENDIAN
7119 new = gen_rtx (MEM, tmode, XEXP (varop, 0));
7120#else
7121 new = gen_rtx (MEM, tmode,
7122 plus_constant (XEXP (varop, 0),
7123 count / BITS_PER_UNIT));
7124 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
7125 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
7126 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
7127#endif
7128 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
7129 : ZERO_EXTEND, mode, new);
7130 count = 0;
7131 continue;
7132 }
7133 break;
7134
7135 case USE:
7136 /* Similar to the case above, except that we can only do this if
7137 the resulting mode is the same as that of the underlying
7138 MEM and adjust the address depending on the *bits* endianness
7139 because of the way that bit-field extract insns are defined. */
7140 if ((code == ASHIFTRT || code == LSHIFTRT)
7141 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
7142 MODE_INT, 1)) != BLKmode
7143 && tmode == GET_MODE (XEXP (varop, 0)))
7144 {
7145#if BITS_BIG_ENDIAN
7146 new = XEXP (varop, 0);
7147#else
7148 new = copy_rtx (XEXP (varop, 0));
7149 SUBST (XEXP (new, 0),
7150 plus_constant (XEXP (new, 0),
7151 count / BITS_PER_UNIT));
7152#endif
7153
7154 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
7155 : ZERO_EXTEND, mode, new);
7156 count = 0;
7157 continue;
7158 }
7159 break;
7160
7161 case SUBREG:
7162 /* If VAROP is a SUBREG, strip it as long as the inner operand has
7163 the same number of words as what we've seen so far. Then store
7164 the widest mode in MODE. */
7165 if (subreg_lowpart_p (varop)
7166 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
7167 > GET_MODE_SIZE (GET_MODE (varop)))
7168 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
7169 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
7170 == mode_words))
7171 {
7172 varop = SUBREG_REG (varop);
7173 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
7174 mode = GET_MODE (varop);
7175 continue;
7176 }
7177 break;
7178
7179 case MULT:
7180 /* Some machines use MULT instead of ASHIFT because MULT
7181 is cheaper. But it is still better on those machines to
7182 merge two shifts into one. */
7183 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7184 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
7185 {
7186 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
7187 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
7188 continue;
7189 }
7190 break;
7191
7192 case UDIV:
7193 /* Similar, for when divides are cheaper. */
7194 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7195 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
7196 {
7197 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
7198 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
7199 continue;
7200 }
7201 break;
7202
7203 case ASHIFTRT:
7204 /* If we are extracting just the sign bit of an arithmetic right
7205 shift, that shift is not needed. */
7206 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
7207 {
7208 varop = XEXP (varop, 0);
7209 continue;
7210 }
7211
7212 /* ... fall through ... */
7213
7214 case LSHIFTRT:
7215 case ASHIFT:
7216 case LSHIFT:
7217 case ROTATE:
7218 /* Here we have two nested shifts. The result is usually the
7219 AND of a new shift with a mask. We compute the result below. */
7220 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7221 && INTVAL (XEXP (varop, 1)) >= 0
7222 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
7223 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
7224 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7225 {
7226 enum rtx_code first_code = GET_CODE (varop);
7227 int first_count = INTVAL (XEXP (varop, 1));
7228 unsigned HOST_WIDE_INT mask;
7229 rtx mask_rtx;
7230 rtx inner;
7231
7232 if (first_code == LSHIFT)
7233 first_code = ASHIFT;
7234
7235 /* We have one common special case. We can't do any merging if
7236 the inner code is an ASHIFTRT of a smaller mode. However, if
7237 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
7238 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
7239 we can convert it to
7240 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
7241 This simplifies certain SIGN_EXTEND operations. */
7242 if (code == ASHIFT && first_code == ASHIFTRT
7243 && (GET_MODE_BITSIZE (result_mode)
7244 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
7245 {
7246 /* C3 has the low-order C1 bits zero. */
7247
7248 mask = (GET_MODE_MASK (mode)
7249 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
7250
7251 varop = simplify_and_const_int (NULL_RTX, result_mode,
7252 XEXP (varop, 0), mask);
7253 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
7254 varop, count);
7255 count = first_count;
7256 code = ASHIFTRT;
7257 continue;
7258 }
7259
7260 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
7261 than C1 high-order bits equal to the sign bit, we can convert
7262 this to either an ASHIFT or a ASHIFTRT depending on the
7263 two counts.
7264
7265 We cannot do this if VAROP's mode is not SHIFT_MODE. */
7266
7267 if (code == ASHIFTRT && first_code == ASHIFT
7268 && GET_MODE (varop) == shift_mode
7269 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
7270 > first_count))
7271 {
7272 count -= first_count;
7273 if (count < 0)
7274 count = - count, code = ASHIFT;
7275 varop = XEXP (varop, 0);
7276 continue;
7277 }
7278
7279 /* There are some cases we can't do. If CODE is ASHIFTRT,
7280 we can only do this if FIRST_CODE is also ASHIFTRT.
7281
7282 We can't do the case when CODE is ROTATE and FIRST_CODE is
7283 ASHIFTRT.
7284
7285 If the mode of this shift is not the mode of the outer shift,
7286 we can't do this if either shift is ASHIFTRT or ROTATE.
7287
7288 Finally, we can't do any of these if the mode is too wide
7289 unless the codes are the same.
7290
7291 Handle the case where the shift codes are the same
7292 first. */
7293
7294 if (code == first_code)
7295 {
7296 if (GET_MODE (varop) != result_mode
7297 && (code == ASHIFTRT || code == ROTATE))
7298 break;
7299
7300 count += first_count;
7301 varop = XEXP (varop, 0);
7302 continue;
7303 }
7304
7305 if (code == ASHIFTRT
7306 || (code == ROTATE && first_code == ASHIFTRT)
7307 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
7308 || (GET_MODE (varop) != result_mode
7309 && (first_code == ASHIFTRT || first_code == ROTATE
7310 || code == ROTATE)))
7311 break;
7312
7313 /* To compute the mask to apply after the shift, shift the
7314 nonzero bits of the inner shift the same way the
7315 outer shift will. */
7316
7317 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
7318
7319 mask_rtx
7320 = simplify_binary_operation (code, result_mode, mask_rtx,
7321 GEN_INT (count));
7322
7323 /* Give up if we can't compute an outer operation to use. */
7324 if (mask_rtx == 0
7325 || GET_CODE (mask_rtx) != CONST_INT
7326 || ! merge_outer_ops (&outer_op, &outer_const, AND,
7327 INTVAL (mask_rtx),
7328 result_mode, &complement_p))
7329 break;
7330
7331 /* If the shifts are in the same direction, we add the
7332 counts. Otherwise, we subtract them. */
7333 if ((code == ASHIFTRT || code == LSHIFTRT)
7334 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
7335 count += first_count;
7336 else
7337 count -= first_count;
7338
7339 /* If COUNT is positive, the new shift is usually CODE,
7340 except for the two exceptions below, in which case it is
7341 FIRST_CODE. If the count is negative, FIRST_CODE should
7342 always be used */
7343 if (count > 0
7344 && ((first_code == ROTATE && code == ASHIFT)
7345 || (first_code == ASHIFTRT && code == LSHIFTRT)))
7346 code = first_code;
7347 else if (count < 0)
7348 code = first_code, count = - count;
7349
7350 varop = XEXP (varop, 0);
7351 continue;
7352 }
7353
7354 /* If we have (A << B << C) for any shift, we can convert this to
7355 (A << C << B). This wins if A is a constant. Only try this if
7356 B is not a constant. */
7357
7358 else if (GET_CODE (varop) == code
7359 && GET_CODE (XEXP (varop, 1)) != CONST_INT
7360 && 0 != (new
7361 = simplify_binary_operation (code, mode,
7362 XEXP (varop, 0),
7363 GEN_INT (count))))
7364 {
7365 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
7366 count = 0;
7367 continue;
7368 }
7369 break;
7370
7371 case NOT:
7372 /* Make this fit the case below. */
7373 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
7374 GEN_INT (GET_MODE_MASK (mode)));
7375 continue;
7376
7377 case IOR:
7378 case AND:
7379 case XOR:
7380 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
7381 with C the size of VAROP - 1 and the shift is logical if
7382 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
7383 we have an (le X 0) operation. If we have an arithmetic shift
7384 and STORE_FLAG_VALUE is 1 or we have a logical shift with
7385 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
7386
7387 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
7388 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
7389 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7390 && (code == LSHIFTRT || code == ASHIFTRT)
7391 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
7392 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
7393 {
7394 count = 0;
7395 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
7396 const0_rtx);
7397
7398 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
7399 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
7400
7401 continue;
7402 }
7403
7404 /* If we have (shift (logical)), move the logical to the outside
7405 to allow it to possibly combine with another logical and the
7406 shift to combine with another shift. This also canonicalizes to
7407 what a ZERO_EXTRACT looks like. Also, some machines have
7408 (and (shift)) insns. */
7409
7410 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7411 && (new = simplify_binary_operation (code, result_mode,
7412 XEXP (varop, 1),
7413 GEN_INT (count))) != 0
7414 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
7415 INTVAL (new), result_mode, &complement_p))
7416 {
7417 varop = XEXP (varop, 0);
7418 continue;
7419 }
7420
7421 /* If we can't do that, try to simplify the shift in each arm of the
7422 logical expression, make a new logical expression, and apply
7423 the inverse distributive law. */
7424 {
7425 rtx lhs = simplify_shift_const (NULL_RTX, code, result_mode,
7426 XEXP (varop, 0), count);
7427 rtx rhs = simplify_shift_const (NULL_RTX, code, result_mode,
7428 XEXP (varop, 1), count);
7429
7430 varop = gen_binary (GET_CODE (varop), result_mode, lhs, rhs);
7431 varop = apply_distributive_law (varop);
7432
7433 count = 0;
7434 }
7435 break;
7436
7437 case EQ:
7438 /* convert (lshift (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
7439 says that the sign bit can be tested, FOO has mode MODE, C is
7440 GET_MODE_BITSIZE (MODE) - 1, and FOO has only the low-order bit
7441 may be nonzero. */
7442 if (code == LSHIFT
7443 && XEXP (varop, 1) == const0_rtx
7444 && GET_MODE (XEXP (varop, 0)) == result_mode
7445 && count == GET_MODE_BITSIZE (result_mode) - 1
7446 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
7447 && ((STORE_FLAG_VALUE
7448 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
7449 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
7450 && merge_outer_ops (&outer_op, &outer_const, XOR,
7451 (HOST_WIDE_INT) 1, result_mode,
7452 &complement_p))
7453 {
7454 varop = XEXP (varop, 0);
7455 count = 0;
7456 continue;
7457 }
7458 break;
7459
7460 case NEG:
7461 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
7462 than the number of bits in the mode is equivalent to A. */
7463 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
7464 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
7465 {
7466 varop = XEXP (varop, 0);
7467 count = 0;
7468 continue;
7469 }
7470
7471 /* NEG commutes with ASHIFT since it is multiplication. Move the
7472 NEG outside to allow shifts to combine. */
7473 if (code == ASHIFT
7474 && merge_outer_ops (&outer_op, &outer_const, NEG,
7475 (HOST_WIDE_INT) 0, result_mode,
7476 &complement_p))
7477 {
7478 varop = XEXP (varop, 0);
7479 continue;
7480 }
7481 break;
7482
7483 case PLUS:
7484 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
7485 is one less than the number of bits in the mode is
7486 equivalent to (xor A 1). */
7487 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
7488 && XEXP (varop, 1) == constm1_rtx
7489 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
7490 && merge_outer_ops (&outer_op, &outer_const, XOR,
7491 (HOST_WIDE_INT) 1, result_mode,
7492 &complement_p))
7493 {
7494 count = 0;
7495 varop = XEXP (varop, 0);
7496 continue;
7497 }
7498
7499 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
7500 that might be nonzero in BAR are those being shifted out and those
7501 bits are known zero in FOO, we can replace the PLUS with FOO.
7502 Similarly in the other operand order. This code occurs when
7503 we are computing the size of a variable-size array. */
7504
7505 if ((code == ASHIFTRT || code == LSHIFTRT)
7506 && count < HOST_BITS_PER_WIDE_INT
7507 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
7508 && (nonzero_bits (XEXP (varop, 1), result_mode)
7509 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
7510 {
7511 varop = XEXP (varop, 0);
7512 continue;
7513 }
7514 else if ((code == ASHIFTRT || code == LSHIFTRT)
7515 && count < HOST_BITS_PER_WIDE_INT
7516 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
7517 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
7518 >> count)
7519 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
7520 & nonzero_bits (XEXP (varop, 1),
7521 result_mode)))
7522 {
7523 varop = XEXP (varop, 1);
7524 continue;
7525 }
7526
7527 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
7528 if (code == ASHIFT
7529 && GET_CODE (XEXP (varop, 1)) == CONST_INT
7530 && (new = simplify_binary_operation (ASHIFT, result_mode,
7531 XEXP (varop, 1),
7532 GEN_INT (count))) != 0
7533 && merge_outer_ops (&outer_op, &outer_const, PLUS,
7534 INTVAL (new), result_mode, &complement_p))
7535 {
7536 varop = XEXP (varop, 0);
7537 continue;
7538 }
7539 break;
7540
7541 case MINUS:
7542 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
7543 with C the size of VAROP - 1 and the shift is logical if
7544 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
7545 we have a (gt X 0) operation. If the shift is arithmetic with
7546 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
7547 we have a (neg (gt X 0)) operation. */
7548
7549 if (GET_CODE (XEXP (varop, 0)) == ASHIFTRT
7550 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
7551 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7552 && (code == LSHIFTRT || code == ASHIFTRT)
7553 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
7554 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
7555 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
7556 {
7557 count = 0;
7558 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
7559 const0_rtx);
7560
7561 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
7562 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
7563
7564 continue;
7565 }
7566 break;
7567 }
7568
7569 break;
7570 }
7571
7572 /* We need to determine what mode to do the shift in. If the shift is
7573 a ASHIFTRT or ROTATE, we must always do it in the mode it was originally
7574 done in. Otherwise, we can do it in MODE, the widest mode encountered.
7575 The code we care about is that of the shift that will actually be done,
7576 not the shift that was originally requested. */
7577 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
7578
7579 /* We have now finished analyzing the shift. The result should be
7580 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
7581 OUTER_OP is non-NIL, it is an operation that needs to be applied
7582 to the result of the shift. OUTER_CONST is the relevant constant,
7583 but we must turn off all bits turned off in the shift.
7584
7585 If we were passed a value for X, see if we can use any pieces of
7586 it. If not, make new rtx. */
7587
7588 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
7589 && GET_CODE (XEXP (x, 1)) == CONST_INT
7590 && INTVAL (XEXP (x, 1)) == count)
7591 const_rtx = XEXP (x, 1);
7592 else
7593 const_rtx = GEN_INT (count);
7594
7595 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7596 && GET_MODE (XEXP (x, 0)) == shift_mode
7597 && SUBREG_REG (XEXP (x, 0)) == varop)
7598 varop = XEXP (x, 0);
7599 else if (GET_MODE (varop) != shift_mode)
7600 varop = gen_lowpart_for_combine (shift_mode, varop);
7601
7602 /* If we can't make the SUBREG, try to return what we were given. */
7603 if (GET_CODE (varop) == CLOBBER)
7604 return x ? x : varop;
7605
7606 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
7607 if (new != 0)
7608 x = new;
7609 else
7610 {
7611 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
7612 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
7613
7614 SUBST (XEXP (x, 0), varop);
7615 SUBST (XEXP (x, 1), const_rtx);
7616 }
7617
7618 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
7619 turn off all the bits that the shift would have turned off. */
7620 if (orig_code == LSHIFTRT && result_mode != shift_mode)
7621 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
7622 GET_MODE_MASK (result_mode) >> orig_count);
7623
7624 /* Do the remainder of the processing in RESULT_MODE. */
7625 x = gen_lowpart_for_combine (result_mode, x);
7626
7627 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
7628 operation. */
7629 if (complement_p)
7630 x = gen_unary (NOT, result_mode, x);
7631
7632 if (outer_op != NIL)
7633 {
7634 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
7635 outer_const &= GET_MODE_MASK (result_mode);
7636
7637 if (outer_op == AND)
7638 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
7639 else if (outer_op == SET)
7640 /* This means that we have determined that the result is
7641 equivalent to a constant. This should be rare. */
7642 x = GEN_INT (outer_const);
7643 else if (GET_RTX_CLASS (outer_op) == '1')
7644 x = gen_unary (outer_op, result_mode, x);
7645 else
7646 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
7647 }
7648
7649 return x;
7650}
7651\f
7652/* Like recog, but we receive the address of a pointer to a new pattern.
7653 We try to match the rtx that the pointer points to.
7654 If that fails, we may try to modify or replace the pattern,
7655 storing the replacement into the same pointer object.
7656
7657 Modifications include deletion or addition of CLOBBERs.
7658
7659 PNOTES is a pointer to a location where any REG_UNUSED notes added for
7660 the CLOBBERs are placed.
7661
7662 The value is the final insn code from the pattern ultimately matched,
7663 or -1. */
7664
7665static int
7666recog_for_combine (pnewpat, insn, pnotes)
7667 rtx *pnewpat;
7668 rtx insn;
7669 rtx *pnotes;
7670{
7671 register rtx pat = *pnewpat;
7672 int insn_code_number;
7673 int num_clobbers_to_add = 0;
7674 int i;
7675 rtx notes = 0;
7676
7677 /* Is the result of combination a valid instruction? */
7678 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
7679
7680 /* If it isn't, there is the possibility that we previously had an insn
7681 that clobbered some register as a side effect, but the combined
7682 insn doesn't need to do that. So try once more without the clobbers
7683 unless this represents an ASM insn. */
7684
7685 if (insn_code_number < 0 && ! check_asm_operands (pat)
7686 && GET_CODE (pat) == PARALLEL)
7687 {
7688 int pos;
7689
7690 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
7691 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
7692 {
7693 if (i != pos)
7694 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
7695 pos++;
7696 }
7697
7698 SUBST_INT (XVECLEN (pat, 0), pos);
7699
7700 if (pos == 1)
7701 pat = XVECEXP (pat, 0, 0);
7702
7703 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
7704 }
7705
7706 /* If we had any clobbers to add, make a new pattern than contains
7707 them. Then check to make sure that all of them are dead. */
7708 if (num_clobbers_to_add)
7709 {
7710 rtx newpat = gen_rtx (PARALLEL, VOIDmode,
7711 gen_rtvec (GET_CODE (pat) == PARALLEL
7712 ? XVECLEN (pat, 0) + num_clobbers_to_add
7713 : num_clobbers_to_add + 1));
7714
7715 if (GET_CODE (pat) == PARALLEL)
7716 for (i = 0; i < XVECLEN (pat, 0); i++)
7717 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
7718 else
7719 XVECEXP (newpat, 0, 0) = pat;
7720
7721 add_clobbers (newpat, insn_code_number);
7722
7723 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
7724 i < XVECLEN (newpat, 0); i++)
7725 {
7726 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
7727 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
7728 return -1;
7729 notes = gen_rtx (EXPR_LIST, REG_UNUSED,
7730 XEXP (XVECEXP (newpat, 0, i), 0), notes);
7731 }
7732 pat = newpat;
7733 }
7734
7735 *pnewpat = pat;
7736 *pnotes = notes;
7737
7738 return insn_code_number;
7739}
7740\f
7741/* Like gen_lowpart but for use by combine. In combine it is not possible
7742 to create any new pseudoregs. However, it is safe to create
7743 invalid memory addresses, because combine will try to recognize
7744 them and all they will do is make the combine attempt fail.
7745
7746 If for some reason this cannot do its job, an rtx
7747 (clobber (const_int 0)) is returned.
7748 An insn containing that will not be recognized. */
7749
7750#undef gen_lowpart
7751
7752static rtx
7753gen_lowpart_for_combine (mode, x)
7754 enum machine_mode mode;
7755 register rtx x;
7756{
7757 rtx result;
7758
7759 if (GET_MODE (x) == mode)
7760 return x;
7761
7762 /* We can only support MODE being wider than a word if X is a
7763 constant integer or has a mode the same size. */
7764
7765 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
7766 && ! ((GET_MODE (x) == VOIDmode
7767 && (GET_CODE (x) == CONST_INT
7768 || GET_CODE (x) == CONST_DOUBLE))
7769 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
7770 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
7771
7772 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
7773 won't know what to do. So we will strip off the SUBREG here and
7774 process normally. */
7775 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
7776 {
7777 x = SUBREG_REG (x);
7778 if (GET_MODE (x) == mode)
7779 return x;
7780 }
7781
7782 result = gen_lowpart_common (mode, x);
7783 if (result)
7784 return result;
7785
7786 if (GET_CODE (x) == MEM)
7787 {
7788 register int offset = 0;
7789 rtx new;
7790
7791 /* Refuse to work on a volatile memory ref or one with a mode-dependent
7792 address. */
7793 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
7794 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
7795
7796 /* If we want to refer to something bigger than the original memref,
7797 generate a perverse subreg instead. That will force a reload
7798 of the original memref X. */
7799 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
7800 return gen_rtx (SUBREG, mode, x, 0);
7801
7802#if WORDS_BIG_ENDIAN
7803 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
7804 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
7805#endif
7806#if BYTES_BIG_ENDIAN
7807 /* Adjust the address so that the address-after-the-data
7808 is unchanged. */
7809 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
7810 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
7811#endif
7812 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
7813 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
7814 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
7815 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
7816 return new;
7817 }
7818
7819 /* If X is a comparison operator, rewrite it in a new mode. This
7820 probably won't match, but may allow further simplifications. */
7821 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
7822 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
7823
7824 /* If we couldn't simplify X any other way, just enclose it in a
7825 SUBREG. Normally, this SUBREG won't match, but some patterns may
7826 include an explicit SUBREG or we may simplify it further in combine. */
7827 else
7828 {
7829 int word = 0;
7830
7831 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
7832 word = ((GET_MODE_SIZE (GET_MODE (x))
7833 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
7834 / UNITS_PER_WORD);
7835 return gen_rtx (SUBREG, mode, x, word);
7836 }
7837}
7838\f
7839/* Make an rtx expression. This is a subset of gen_rtx and only supports
7840 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
7841
7842 If the identical expression was previously in the insn (in the undobuf),
7843 it will be returned. Only if it is not found will a new expression
7844 be made. */
7845
7846/*VARARGS2*/
7847static rtx
7848gen_rtx_combine (va_alist)
7849 va_dcl
7850{
7851 va_list p;
7852 enum rtx_code code;
7853 enum machine_mode mode;
7854 int n_args;
7855 rtx args[3];
7856 int i, j;
7857 char *fmt;
7858 rtx rt;
7859
7860 va_start (p);
7861 code = va_arg (p, enum rtx_code);
7862 mode = va_arg (p, enum machine_mode);
7863 n_args = GET_RTX_LENGTH (code);
7864 fmt = GET_RTX_FORMAT (code);
7865
7866 if (n_args == 0 || n_args > 3)
7867 abort ();
7868
7869 /* Get each arg and verify that it is supposed to be an expression. */
7870 for (j = 0; j < n_args; j++)
7871 {
7872 if (*fmt++ != 'e')
7873 abort ();
7874
7875 args[j] = va_arg (p, rtx);
7876 }
7877
7878 /* See if this is in undobuf. Be sure we don't use objects that came
7879 from another insn; this could produce circular rtl structures. */
7880
7881 for (i = previous_num_undos; i < undobuf.num_undo; i++)
7882 if (!undobuf.undo[i].is_int
7883 && GET_CODE (undobuf.undo[i].old_contents.rtx) == code
7884 && GET_MODE (undobuf.undo[i].old_contents.rtx) == mode)
7885 {
7886 for (j = 0; j < n_args; j++)
7887 if (XEXP (undobuf.undo[i].old_contents.rtx, j) != args[j])
7888 break;
7889
7890 if (j == n_args)
7891 return undobuf.undo[i].old_contents.rtx;
7892 }
7893
7894 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
7895 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
7896 rt = rtx_alloc (code);
7897 PUT_MODE (rt, mode);
7898 XEXP (rt, 0) = args[0];
7899 if (n_args > 1)
7900 {
7901 XEXP (rt, 1) = args[1];
7902 if (n_args > 2)
7903 XEXP (rt, 2) = args[2];
7904 }
7905 return rt;
7906}
7907
7908/* These routines make binary and unary operations by first seeing if they
7909 fold; if not, a new expression is allocated. */
7910
7911static rtx
7912gen_binary (code, mode, op0, op1)
7913 enum rtx_code code;
7914 enum machine_mode mode;
7915 rtx op0, op1;
7916{
7917 rtx result;
7918 rtx tem;
7919
7920 if (GET_RTX_CLASS (code) == 'c'
7921 && (GET_CODE (op0) == CONST_INT
7922 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
7923 tem = op0, op0 = op1, op1 = tem;
7924
7925 if (GET_RTX_CLASS (code) == '<')
7926 {
7927 enum machine_mode op_mode = GET_MODE (op0);
7928 if (op_mode == VOIDmode)
7929 op_mode = GET_MODE (op1);
7930 result = simplify_relational_operation (code, op_mode, op0, op1);
7931 }
7932 else
7933 result = simplify_binary_operation (code, mode, op0, op1);
7934
7935 if (result)
7936 return result;
7937
7938 /* Put complex operands first and constants second. */
7939 if (GET_RTX_CLASS (code) == 'c'
7940 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
7941 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
7942 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
7943 || (GET_CODE (op0) == SUBREG
7944 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
7945 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
7946 return gen_rtx_combine (code, mode, op1, op0);
7947
7948 return gen_rtx_combine (code, mode, op0, op1);
7949}
7950
7951static rtx
7952gen_unary (code, mode, op0)
7953 enum rtx_code code;
7954 enum machine_mode mode;
7955 rtx op0;
7956{
7957 rtx result = simplify_unary_operation (code, mode, op0, mode);
7958
7959 if (result)
7960 return result;
7961
7962 return gen_rtx_combine (code, mode, op0);
7963}
7964\f
7965/* Simplify a comparison between *POP0 and *POP1 where CODE is the
7966 comparison code that will be tested.
7967
7968 The result is a possibly different comparison code to use. *POP0 and
7969 *POP1 may be updated.
7970
7971 It is possible that we might detect that a comparison is either always
7972 true or always false. However, we do not perform general constant
7973 folding in combine, so this knowledge isn't useful. Such tautologies
7974 should have been detected earlier. Hence we ignore all such cases. */
7975
7976static enum rtx_code
7977simplify_comparison (code, pop0, pop1)
7978 enum rtx_code code;
7979 rtx *pop0;
7980 rtx *pop1;
7981{
7982 rtx op0 = *pop0;
7983 rtx op1 = *pop1;
7984 rtx tem, tem1;
7985 int i;
7986 enum machine_mode mode, tmode;
7987
7988 /* Try a few ways of applying the same transformation to both operands. */
7989 while (1)
7990 {
7991 /* If both operands are the same constant shift, see if we can ignore the
7992 shift. We can if the shift is a rotate or if the bits shifted out of
7993 this shift are known to be zero for both inputs and if the type of
7994 comparison is compatible with the shift. */
7995 if (GET_CODE (op0) == GET_CODE (op1)
7996 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
7997 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
7998 || ((GET_CODE (op0) == LSHIFTRT
7999 || GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
8000 && (code != GT && code != LT && code != GE && code != LE))
8001 || (GET_CODE (op0) == ASHIFTRT
8002 && (code != GTU && code != LTU
8003 && code != GEU && code != GEU)))
8004 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8005 && INTVAL (XEXP (op0, 1)) >= 0
8006 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
8007 && XEXP (op0, 1) == XEXP (op1, 1))
8008 {
8009 enum machine_mode mode = GET_MODE (op0);
8010 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
8011 int shift_count = INTVAL (XEXP (op0, 1));
8012
8013 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
8014 mask &= (mask >> shift_count) << shift_count;
8015 else if (GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
8016 mask = (mask & (mask << shift_count)) >> shift_count;
8017
8018 if ((nonzero_bits (XEXP (op0, 0), mode) & ~ mask) == 0
8019 && (nonzero_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
8020 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
8021 else
8022 break;
8023 }
8024
8025 /* If both operands are AND's of a paradoxical SUBREG by constant, the
8026 SUBREGs are of the same mode, and, in both cases, the AND would
8027 be redundant if the comparison was done in the narrower mode,
8028 do the comparison in the narrower mode (e.g., we are AND'ing with 1
8029 and the operand's possibly nonzero bits are 0xffffff01; in that case
8030 if we only care about QImode, we don't need the AND). This case
8031 occurs if the output mode of an scc insn is not SImode and
8032 STORE_FLAG_VALUE == 1 (e.g., the 386). */
8033
8034 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
8035 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8036 && GET_CODE (XEXP (op1, 1)) == CONST_INT
8037 && GET_CODE (XEXP (op0, 0)) == SUBREG
8038 && GET_CODE (XEXP (op1, 0)) == SUBREG
8039 && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
8040 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
8041 && (GET_MODE (SUBREG_REG (XEXP (op0, 0)))
8042 == GET_MODE (SUBREG_REG (XEXP (op1, 0))))
8043 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
8044 <= HOST_BITS_PER_WIDE_INT)
8045 && (nonzero_bits (SUBREG_REG (XEXP (op0, 0)),
8046 GET_MODE (SUBREG_REG (XEXP (op0, 0))))
8047 & ~ INTVAL (XEXP (op0, 1))) == 0
8048 && (nonzero_bits (SUBREG_REG (XEXP (op1, 0)),
8049 GET_MODE (SUBREG_REG (XEXP (op1, 0))))
8050 & ~ INTVAL (XEXP (op1, 1))) == 0)
8051 {
8052 op0 = SUBREG_REG (XEXP (op0, 0));
8053 op1 = SUBREG_REG (XEXP (op1, 0));
8054
8055 /* the resulting comparison is always unsigned since we masked off
8056 the original sign bit. */
8057 code = unsigned_condition (code);
8058 }
8059 else
8060 break;
8061 }
8062
8063 /* If the first operand is a constant, swap the operands and adjust the
8064 comparison code appropriately. */
8065 if (CONSTANT_P (op0))
8066 {
8067 tem = op0, op0 = op1, op1 = tem;
8068 code = swap_condition (code);
8069 }
8070
8071 /* We now enter a loop during which we will try to simplify the comparison.
8072 For the most part, we only are concerned with comparisons with zero,
8073 but some things may really be comparisons with zero but not start
8074 out looking that way. */
8075
8076 while (GET_CODE (op1) == CONST_INT)
8077 {
8078 enum machine_mode mode = GET_MODE (op0);
8079 int mode_width = GET_MODE_BITSIZE (mode);
8080 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
8081 int equality_comparison_p;
8082 int sign_bit_comparison_p;
8083 int unsigned_comparison_p;
8084 HOST_WIDE_INT const_op;
8085
8086 /* We only want to handle integral modes. This catches VOIDmode,
8087 CCmode, and the floating-point modes. An exception is that we
8088 can handle VOIDmode if OP0 is a COMPARE or a comparison
8089 operation. */
8090
8091 if (GET_MODE_CLASS (mode) != MODE_INT
8092 && ! (mode == VOIDmode
8093 && (GET_CODE (op0) == COMPARE
8094 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
8095 break;
8096
8097 /* Get the constant we are comparing against and turn off all bits
8098 not on in our mode. */
8099 const_op = INTVAL (op1);
8100 if (mode_width <= HOST_BITS_PER_WIDE_INT)
8101 const_op &= mask;
8102
8103 /* If we are comparing against a constant power of two and the value
8104 being compared can only have that single bit nonzero (e.g., it was
8105 `and'ed with that bit), we can replace this with a comparison
8106 with zero. */
8107 if (const_op
8108 && (code == EQ || code == NE || code == GE || code == GEU
8109 || code == LT || code == LTU)
8110 && mode_width <= HOST_BITS_PER_WIDE_INT
8111 && exact_log2 (const_op) >= 0
8112 && nonzero_bits (op0, mode) == const_op)
8113 {
8114 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
8115 op1 = const0_rtx, const_op = 0;
8116 }
8117
8118 /* Similarly, if we are comparing a value known to be either -1 or
8119 0 with -1, change it to the opposite comparison against zero. */
8120
8121 if (const_op == -1
8122 && (code == EQ || code == NE || code == GT || code == LE
8123 || code == GEU || code == LTU)
8124 && num_sign_bit_copies (op0, mode) == mode_width)
8125 {
8126 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
8127 op1 = const0_rtx, const_op = 0;
8128 }
8129
8130 /* Do some canonicalizations based on the comparison code. We prefer
8131 comparisons against zero and then prefer equality comparisons.
8132 If we can reduce the size of a constant, we will do that too. */
8133
8134 switch (code)
8135 {
8136 case LT:
8137 /* < C is equivalent to <= (C - 1) */
8138 if (const_op > 0)
8139 {
8140 const_op -= 1;
8141 op1 = GEN_INT (const_op);
8142 code = LE;
8143 /* ... fall through to LE case below. */
8144 }
8145 else
8146 break;
8147
8148 case LE:
8149 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
8150 if (const_op < 0)
8151 {
8152 const_op += 1;
8153 op1 = GEN_INT (const_op);
8154 code = LT;
8155 }
8156
8157 /* If we are doing a <= 0 comparison on a value known to have
8158 a zero sign bit, we can replace this with == 0. */
8159 else if (const_op == 0
8160 && mode_width <= HOST_BITS_PER_WIDE_INT
8161 && (nonzero_bits (op0, mode)
8162 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
8163 code = EQ;
8164 break;
8165
8166 case GE:
8167 /* >= C is equivalent to > (C - 1). */
8168 if (const_op > 0)
8169 {
8170 const_op -= 1;
8171 op1 = GEN_INT (const_op);
8172 code = GT;
8173 /* ... fall through to GT below. */
8174 }
8175 else
8176 break;
8177
8178 case GT:
8179 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
8180 if (const_op < 0)
8181 {
8182 const_op += 1;
8183 op1 = GEN_INT (const_op);
8184 code = GE;
8185 }
8186
8187 /* If we are doing a > 0 comparison on a value known to have
8188 a zero sign bit, we can replace this with != 0. */
8189 else if (const_op == 0
8190 && mode_width <= HOST_BITS_PER_WIDE_INT
8191 && (nonzero_bits (op0, mode)
8192 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
8193 code = NE;
8194 break;
8195
8196 case LTU:
8197 /* < C is equivalent to <= (C - 1). */
8198 if (const_op > 0)
8199 {
8200 const_op -= 1;
8201 op1 = GEN_INT (const_op);
8202 code = LEU;
8203 /* ... fall through ... */
8204 }
8205
8206 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
8207 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
8208 {
8209 const_op = 0, op1 = const0_rtx;
8210 code = GE;
8211 break;
8212 }
8213 else
8214 break;
8215
8216 case LEU:
8217 /* unsigned <= 0 is equivalent to == 0 */
8218 if (const_op == 0)
8219 code = EQ;
8220
8221 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
8222 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
8223 {
8224 const_op = 0, op1 = const0_rtx;
8225 code = GE;
8226 }
8227 break;
8228
8229 case GEU:
8230 /* >= C is equivalent to < (C - 1). */
8231 if (const_op > 1)
8232 {
8233 const_op -= 1;
8234 op1 = GEN_INT (const_op);
8235 code = GTU;
8236 /* ... fall through ... */
8237 }
8238
8239 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
8240 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
8241 {
8242 const_op = 0, op1 = const0_rtx;
8243 code = LT;
8244 }
8245 else
8246 break;
8247
8248 case GTU:
8249 /* unsigned > 0 is equivalent to != 0 */
8250 if (const_op == 0)
8251 code = NE;
8252
8253 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
8254 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
8255 {
8256 const_op = 0, op1 = const0_rtx;
8257 code = LT;
8258 }
8259 break;
8260 }
8261
8262 /* Compute some predicates to simplify code below. */
8263
8264 equality_comparison_p = (code == EQ || code == NE);
8265 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
8266 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
8267 || code == LEU);
8268
8269 /* Now try cases based on the opcode of OP0. If none of the cases
8270 does a "continue", we exit this loop immediately after the
8271 switch. */
8272
8273 switch (GET_CODE (op0))
8274 {
8275 case ZERO_EXTRACT:
8276 /* If we are extracting a single bit from a variable position in
8277 a constant that has only a single bit set and are comparing it
8278 with zero, we can convert this into an equality comparison
8279 between the position and the location of the single bit. We can't
8280 do this if bit endian and we don't have an extzv since we then
8281 can't know what mode to use for the endianness adjustment. */
8282
8283#if ! BITS_BIG_ENDIAN || defined (HAVE_extzv)
8284 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
8285 && XEXP (op0, 1) == const1_rtx
8286 && equality_comparison_p && const_op == 0
8287 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
8288 {
8289#if BITS_BIG_ENDIAN
8290 i = (GET_MODE_BITSIZE
8291 (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
8292#endif
8293
8294 op0 = XEXP (op0, 2);
8295 op1 = GEN_INT (i);
8296 const_op = i;
8297
8298 /* Result is nonzero iff shift count is equal to I. */
8299 code = reverse_condition (code);
8300 continue;
8301 }
8302#endif
8303
8304 /* ... fall through ... */
8305
8306 case SIGN_EXTRACT:
8307 tem = expand_compound_operation (op0);
8308 if (tem != op0)
8309 {
8310 op0 = tem;
8311 continue;
8312 }
8313 break;
8314
8315 case NOT:
8316 /* If testing for equality, we can take the NOT of the constant. */
8317 if (equality_comparison_p
8318 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
8319 {
8320 op0 = XEXP (op0, 0);
8321 op1 = tem;
8322 continue;
8323 }
8324
8325 /* If just looking at the sign bit, reverse the sense of the
8326 comparison. */
8327 if (sign_bit_comparison_p)
8328 {
8329 op0 = XEXP (op0, 0);
8330 code = (code == GE ? LT : GE);
8331 continue;
8332 }
8333 break;
8334
8335 case NEG:
8336 /* If testing for equality, we can take the NEG of the constant. */
8337 if (equality_comparison_p
8338 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
8339 {
8340 op0 = XEXP (op0, 0);
8341 op1 = tem;
8342 continue;
8343 }
8344
8345 /* The remaining cases only apply to comparisons with zero. */
8346 if (const_op != 0)
8347 break;
8348
8349 /* When X is ABS or is known positive,
8350 (neg X) is < 0 if and only if X != 0. */
8351
8352 if (sign_bit_comparison_p
8353 && (GET_CODE (XEXP (op0, 0)) == ABS
8354 || (mode_width <= HOST_BITS_PER_WIDE_INT
8355 && (nonzero_bits (XEXP (op0, 0), mode)
8356 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
8357 {
8358 op0 = XEXP (op0, 0);
8359 code = (code == LT ? NE : EQ);
8360 continue;
8361 }
8362
8363 /* If we have NEG of something whose two high-order bits are the
8364 same, we know that "(-a) < 0" is equivalent to "a > 0". */
8365 if (num_sign_bit_copies (op0, mode) >= 2)
8366 {
8367 op0 = XEXP (op0, 0);
8368 code = swap_condition (code);
8369 continue;
8370 }
8371 break;
8372
8373 case ROTATE:
8374 /* If we are testing equality and our count is a constant, we
8375 can perform the inverse operation on our RHS. */
8376 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
8377 && (tem = simplify_binary_operation (ROTATERT, mode,
8378 op1, XEXP (op0, 1))) != 0)
8379 {
8380 op0 = XEXP (op0, 0);
8381 op1 = tem;
8382 continue;
8383 }
8384
8385 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
8386 a particular bit. Convert it to an AND of a constant of that
8387 bit. This will be converted into a ZERO_EXTRACT. */
8388 if (const_op == 0 && sign_bit_comparison_p
8389 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8390 && mode_width <= HOST_BITS_PER_WIDE_INT)
8391 {
8392 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8393 ((HOST_WIDE_INT) 1
8394 << (mode_width - 1
8395 - INTVAL (XEXP (op0, 1)))));
8396 code = (code == LT ? NE : EQ);
8397 continue;
8398 }
8399
8400 /* ... fall through ... */
8401
8402 case ABS:
8403 /* ABS is ignorable inside an equality comparison with zero. */
8404 if (const_op == 0 && equality_comparison_p)
8405 {
8406 op0 = XEXP (op0, 0);
8407 continue;
8408 }
8409 break;
8410
8411
8412 case SIGN_EXTEND:
8413 /* Can simplify (compare (zero/sign_extend FOO) CONST)
8414 to (compare FOO CONST) if CONST fits in FOO's mode and we
8415 are either testing inequality or have an unsigned comparison
8416 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
8417 if (! unsigned_comparison_p
8418 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
8419 <= HOST_BITS_PER_WIDE_INT)
8420 && ((unsigned HOST_WIDE_INT) const_op
8421 < (((HOST_WIDE_INT) 1
8422 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
8423 {
8424 op0 = XEXP (op0, 0);
8425 continue;
8426 }
8427 break;
8428
8429 case SUBREG:
8430 /* Check for the case where we are comparing A - C1 with C2,
8431 both constants are smaller than 1/2 the maxium positive
8432 value in MODE, and the comparison is equality or unsigned.
8433 In that case, if A is either zero-extended to MODE or has
8434 sufficient sign bits so that the high-order bit in MODE
8435 is a copy of the sign in the inner mode, we can prove that it is
8436 safe to do the operation in the wider mode. This simplifies
8437 many range checks. */
8438
8439 if (mode_width <= HOST_BITS_PER_WIDE_INT
8440 && subreg_lowpart_p (op0)
8441 && GET_CODE (SUBREG_REG (op0)) == PLUS
8442 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
8443 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
8444 && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
8445 < GET_MODE_MASK (mode) / 2)
8446 && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
8447 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
8448 GET_MODE (SUBREG_REG (op0)))
8449 & ~ GET_MODE_MASK (mode))
8450 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
8451 GET_MODE (SUBREG_REG (op0)))
8452 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
8453 - GET_MODE_BITSIZE (mode)))))
8454 {
8455 op0 = SUBREG_REG (op0);
8456 continue;
8457 }
8458
8459 /* If the inner mode is narrower and we are extracting the low part,
8460 we can treat the SUBREG as if it were a ZERO_EXTEND. */
8461 if (subreg_lowpart_p (op0)
8462 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
8463 /* Fall through */ ;
8464 else
8465 break;
8466
8467 /* ... fall through ... */
8468
8469 case ZERO_EXTEND:
8470 if ((unsigned_comparison_p || equality_comparison_p)
8471 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
8472 <= HOST_BITS_PER_WIDE_INT)
8473 && ((unsigned HOST_WIDE_INT) const_op
8474 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
8475 {
8476 op0 = XEXP (op0, 0);
8477 continue;
8478 }
8479 break;
8480
8481 case PLUS:
8482 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
8483 this for equality comparisons due to pathological cases involving
8484 overflows. */
8485 if (equality_comparison_p
8486 && 0 != (tem = simplify_binary_operation (MINUS, mode,
8487 op1, XEXP (op0, 1))))
8488 {
8489 op0 = XEXP (op0, 0);
8490 op1 = tem;
8491 continue;
8492 }
8493
8494 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
8495 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
8496 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
8497 {
8498 op0 = XEXP (XEXP (op0, 0), 0);
8499 code = (code == LT ? EQ : NE);
8500 continue;
8501 }
8502 break;
8503
8504 case MINUS:
8505 /* (eq (minus A B) C) -> (eq A (plus B C)) or
8506 (eq B (minus A C)), whichever simplifies. We can only do
8507 this for equality comparisons due to pathological cases involving
8508 overflows. */
8509 if (equality_comparison_p
8510 && 0 != (tem = simplify_binary_operation (PLUS, mode,
8511 XEXP (op0, 1), op1)))
8512 {
8513 op0 = XEXP (op0, 0);
8514 op1 = tem;
8515 continue;
8516 }
8517
8518 if (equality_comparison_p
8519 && 0 != (tem = simplify_binary_operation (MINUS, mode,
8520 XEXP (op0, 0), op1)))
8521 {
8522 op0 = XEXP (op0, 1);
8523 op1 = tem;
8524 continue;
8525 }
8526
8527 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
8528 of bits in X minus 1, is one iff X > 0. */
8529 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
8530 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8531 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
8532 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
8533 {
8534 op0 = XEXP (op0, 1);
8535 code = (code == GE ? LE : GT);
8536 continue;
8537 }
8538 break;
8539
8540 case XOR:
8541 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
8542 if C is zero or B is a constant. */
8543 if (equality_comparison_p
8544 && 0 != (tem = simplify_binary_operation (XOR, mode,
8545 XEXP (op0, 1), op1)))
8546 {
8547 op0 = XEXP (op0, 0);
8548 op1 = tem;
8549 continue;
8550 }
8551 break;
8552
8553 case EQ: case NE:
8554 case LT: case LTU: case LE: case LEU:
8555 case GT: case GTU: case GE: case GEU:
8556 /* We can't do anything if OP0 is a condition code value, rather
8557 than an actual data value. */
8558 if (const_op != 0
8559#ifdef HAVE_cc0
8560 || XEXP (op0, 0) == cc0_rtx
8561#endif
8562 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
8563 break;
8564
8565 /* Get the two operands being compared. */
8566 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
8567 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
8568 else
8569 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
8570
8571 /* Check for the cases where we simply want the result of the
8572 earlier test or the opposite of that result. */
8573 if (code == NE
8574 || (code == EQ && reversible_comparison_p (op0))
8575 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
8576 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8577 && (STORE_FLAG_VALUE
8578 & (((HOST_WIDE_INT) 1
8579 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
8580 && (code == LT
8581 || (code == GE && reversible_comparison_p (op0)))))
8582 {
8583 code = (code == LT || code == NE
8584 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
8585 op0 = tem, op1 = tem1;
8586 continue;
8587 }
8588 break;
8589
8590 case IOR:
8591 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
8592 iff X <= 0. */
8593 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
8594 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
8595 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
8596 {
8597 op0 = XEXP (op0, 1);
8598 code = (code == GE ? GT : LE);
8599 continue;
8600 }
8601 break;
8602
8603 case AND:
8604 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
8605 will be converted to a ZERO_EXTRACT later. */
8606 if (const_op == 0 && equality_comparison_p
8607 && (GET_CODE (XEXP (op0, 0)) == ASHIFT
8608 || GET_CODE (XEXP (op0, 0)) == LSHIFT)
8609 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
8610 {
8611 op0 = simplify_and_const_int
8612 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
8613 XEXP (op0, 1),
8614 XEXP (XEXP (op0, 0), 1)),
8615 (HOST_WIDE_INT) 1);
8616 continue;
8617 }
8618
8619 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
8620 zero and X is a comparison and C1 and C2 describe only bits set
8621 in STORE_FLAG_VALUE, we can compare with X. */
8622 if (const_op == 0 && equality_comparison_p
8623 && mode_width <= HOST_BITS_PER_WIDE_INT
8624 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8625 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
8626 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8627 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
8628 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
8629 {
8630 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
8631 << INTVAL (XEXP (XEXP (op0, 0), 1)));
8632 if ((~ STORE_FLAG_VALUE & mask) == 0
8633 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
8634 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
8635 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
8636 {
8637 op0 = XEXP (XEXP (op0, 0), 0);
8638 continue;
8639 }
8640 }
8641
8642 /* If we are doing an equality comparison of an AND of a bit equal
8643 to the sign bit, replace this with a LT or GE comparison of
8644 the underlying value. */
8645 if (equality_comparison_p
8646 && const_op == 0
8647 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8648 && mode_width <= HOST_BITS_PER_WIDE_INT
8649 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
8650 == (HOST_WIDE_INT) 1 << (mode_width - 1)))
8651 {
8652 op0 = XEXP (op0, 0);
8653 code = (code == EQ ? GE : LT);
8654 continue;
8655 }
8656
8657 /* If this AND operation is really a ZERO_EXTEND from a narrower
8658 mode, the constant fits within that mode, and this is either an
8659 equality or unsigned comparison, try to do this comparison in
8660 the narrower mode. */
8661 if ((equality_comparison_p || unsigned_comparison_p)
8662 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8663 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
8664 & GET_MODE_MASK (mode))
8665 + 1)) >= 0
8666 && const_op >> i == 0
8667 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
8668 {
8669 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
8670 continue;
8671 }
8672 break;
8673
8674 case ASHIFT:
8675 case LSHIFT:
8676 /* If we have (compare (xshift FOO N) (const_int C)) and
8677 the high order N bits of FOO (N+1 if an inequality comparison)
8678 are known to be zero, we can do this by comparing FOO with C
8679 shifted right N bits so long as the low-order N bits of C are
8680 zero. */
8681 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
8682 && INTVAL (XEXP (op0, 1)) >= 0
8683 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
8684 < HOST_BITS_PER_WIDE_INT)
8685 && ((const_op
8686 & ((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1) == 0)
8687 && mode_width <= HOST_BITS_PER_WIDE_INT
8688 && (nonzero_bits (XEXP (op0, 0), mode)
8689 & ~ (mask >> (INTVAL (XEXP (op0, 1))
8690 + ! equality_comparison_p))) == 0)
8691 {
8692 const_op >>= INTVAL (XEXP (op0, 1));
8693 op1 = GEN_INT (const_op);
8694 op0 = XEXP (op0, 0);
8695 continue;
8696 }
8697
8698 /* If we are doing a sign bit comparison, it means we are testing
8699 a particular bit. Convert it to the appropriate AND. */
8700 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
8701 && mode_width <= HOST_BITS_PER_WIDE_INT)
8702 {
8703 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8704 ((HOST_WIDE_INT) 1
8705 << (mode_width - 1
8706 - INTVAL (XEXP (op0, 1)))));
8707 code = (code == LT ? NE : EQ);
8708 continue;
8709 }
8710
8711 /* If this an equality comparison with zero and we are shifting
8712 the low bit to the sign bit, we can convert this to an AND of the
8713 low-order bit. */
8714 if (const_op == 0 && equality_comparison_p
8715 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8716 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
8717 {
8718 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8719 (HOST_WIDE_INT) 1);
8720 continue;
8721 }
8722 break;
8723
8724 case ASHIFTRT:
8725 /* If this is an equality comparison with zero, we can do this
8726 as a logical shift, which might be much simpler. */
8727 if (equality_comparison_p && const_op == 0
8728 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
8729 {
8730 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
8731 XEXP (op0, 0),
8732 INTVAL (XEXP (op0, 1)));
8733 continue;
8734 }
8735
8736 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
8737 do the comparison in a narrower mode. */
8738 if (! unsigned_comparison_p
8739 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8740 && GET_CODE (XEXP (op0, 0)) == ASHIFT
8741 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
8742 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
8743 MODE_INT, 1)) != BLKmode
8744 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
8745 || ((unsigned HOST_WIDE_INT) - const_op
8746 <= GET_MODE_MASK (tmode))))
8747 {
8748 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
8749 continue;
8750 }
8751
8752 /* ... fall through ... */
8753 case LSHIFTRT:
8754 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
8755 the low order N bits of FOO are known to be zero, we can do this
8756 by comparing FOO with C shifted left N bits so long as no
8757 overflow occurs. */
8758 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
8759 && INTVAL (XEXP (op0, 1)) >= 0
8760 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
8761 && mode_width <= HOST_BITS_PER_WIDE_INT
8762 && (nonzero_bits (XEXP (op0, 0), mode)
8763 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
8764 && (const_op == 0
8765 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
8766 < mode_width)))
8767 {
8768 const_op <<= INTVAL (XEXP (op0, 1));
8769 op1 = GEN_INT (const_op);
8770 op0 = XEXP (op0, 0);
8771 continue;
8772 }
8773
8774 /* If we are using this shift to extract just the sign bit, we
8775 can replace this with an LT or GE comparison. */
8776 if (const_op == 0
8777 && (equality_comparison_p || sign_bit_comparison_p)
8778 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8779 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
8780 {
8781 op0 = XEXP (op0, 0);
8782 code = (code == NE || code == GT ? LT : GE);
8783 continue;
8784 }
8785 break;
8786 }
8787
8788 break;
8789 }
8790
8791 /* Now make any compound operations involved in this comparison. Then,
8792 check for an outmost SUBREG on OP0 that isn't doing anything or is
8793 paradoxical. The latter case can only occur when it is known that the
8794 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
8795 We can never remove a SUBREG for a non-equality comparison because the
8796 sign bit is in a different place in the underlying object. */
8797
8798 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
8799 op1 = make_compound_operation (op1, SET);
8800
8801 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
8802 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8803 && (code == NE || code == EQ)
8804 && ((GET_MODE_SIZE (GET_MODE (op0))
8805 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
8806 {
8807 op0 = SUBREG_REG (op0);
8808 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
8809 }
8810
8811 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
8812 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8813 && (code == NE || code == EQ)
8814 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
8815 <= HOST_BITS_PER_WIDE_INT)
8816 && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
8817 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
8818 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
8819 op1),
8820 (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
8821 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
8822 op0 = SUBREG_REG (op0), op1 = tem;
8823
8824 /* We now do the opposite procedure: Some machines don't have compare
8825 insns in all modes. If OP0's mode is an integer mode smaller than a
8826 word and we can't do a compare in that mode, see if there is a larger
8827 mode for which we can do the compare. There are a number of cases in
8828 which we can use the wider mode. */
8829
8830 mode = GET_MODE (op0);
8831 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
8832 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
8833 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
8834 for (tmode = GET_MODE_WIDER_MODE (mode);
8835 (tmode != VOIDmode
8836 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
8837 tmode = GET_MODE_WIDER_MODE (tmode))
8838 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
8839 {
8840 /* If the only nonzero bits in OP0 and OP1 are those in the
8841 narrower mode and this is an equality or unsigned comparison,
8842 we can use the wider mode. Similarly for sign-extended
8843 values and equality or signed comparisons. */
8844 if (((code == EQ || code == NE
8845 || code == GEU || code == GTU || code == LEU || code == LTU)
8846 && (nonzero_bits (op0, tmode) & ~ GET_MODE_MASK (mode)) == 0
8847 && (nonzero_bits (op1, tmode) & ~ GET_MODE_MASK (mode)) == 0)
8848 || ((code == EQ || code == NE
8849 || code == GE || code == GT || code == LE || code == LT)
8850 && (num_sign_bit_copies (op0, tmode)
8851 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
8852 && (num_sign_bit_copies (op1, tmode)
8853 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
8854 {
8855 op0 = gen_lowpart_for_combine (tmode, op0);
8856 op1 = gen_lowpart_for_combine (tmode, op1);
8857 break;
8858 }
8859
8860 /* If this is a test for negative, we can make an explicit
8861 test of the sign bit. */
8862
8863 if (op1 == const0_rtx && (code == LT || code == GE)
8864 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
8865 {
8866 op0 = gen_binary (AND, tmode,
8867 gen_lowpart_for_combine (tmode, op0),
8868 GEN_INT ((HOST_WIDE_INT) 1
8869 << (GET_MODE_BITSIZE (mode) - 1)));
8870 code = (code == LT) ? NE : EQ;
8871 break;
8872 }
8873 }
8874
8875 *pop0 = op0;
8876 *pop1 = op1;
8877
8878 return code;
8879}
8880\f
8881/* Return 1 if we know that X, a comparison operation, is not operating
8882 on a floating-point value or is EQ or NE, meaning that we can safely
8883 reverse it. */
8884
8885static int
8886reversible_comparison_p (x)
8887 rtx x;
8888{
8889 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
8890 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
8891 return 1;
8892
8893 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
8894 {
8895 case MODE_INT:
8896 return 1;
8897
8898 case MODE_CC:
8899 x = get_last_value (XEXP (x, 0));
8900 return (x && GET_CODE (x) == COMPARE
8901 && GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_INT);
8902 }
8903
8904 return 0;
8905}
8906\f
8907/* Utility function for following routine. Called when X is part of a value
8908 being stored into reg_last_set_value. Sets reg_last_set_table_tick
8909 for each register mentioned. Similar to mention_regs in cse.c */
8910
8911static void
8912update_table_tick (x)
8913 rtx x;
8914{
8915 register enum rtx_code code = GET_CODE (x);
8916 register char *fmt = GET_RTX_FORMAT (code);
8917 register int i;
8918
8919 if (code == REG)
8920 {
8921 int regno = REGNO (x);
8922 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8923 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
8924
8925 for (i = regno; i < endregno; i++)
8926 reg_last_set_table_tick[i] = label_tick;
8927
8928 return;
8929 }
8930
8931 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8932 /* Note that we can't have an "E" in values stored; see
8933 get_last_value_validate. */
8934 if (fmt[i] == 'e')
8935 update_table_tick (XEXP (x, i));
8936}
8937
8938/* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
8939 are saying that the register is clobbered and we no longer know its
8940 value. If INSN is zero, don't update reg_last_set; this is only permitted
8941 with VALUE also zero and is used to invalidate the register. */
8942
8943static void
8944record_value_for_reg (reg, insn, value)
8945 rtx reg;
8946 rtx insn;
8947 rtx value;
8948{
8949 int regno = REGNO (reg);
8950 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8951 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
8952 int i;
8953
8954 /* If VALUE contains REG and we have a previous value for REG, substitute
8955 the previous value. */
8956 if (value && insn && reg_overlap_mentioned_p (reg, value))
8957 {
8958 rtx tem;
8959
8960 /* Set things up so get_last_value is allowed to see anything set up to
8961 our insn. */
8962 subst_low_cuid = INSN_CUID (insn);
8963 tem = get_last_value (reg);
8964
8965 if (tem)
8966 value = replace_rtx (copy_rtx (value), reg, tem);
8967 }
8968
8969 /* For each register modified, show we don't know its value, that
8970 its value has been updated, and that we don't know the location of
8971 the death of the register. */
8972 for (i = regno; i < endregno; i ++)
8973 {
8974 if (insn)
8975 reg_last_set[i] = insn;
8976 reg_last_set_value[i] = 0;
8977 reg_last_death[i] = 0;
8978 }
8979
8980 /* Mark registers that are being referenced in this value. */
8981 if (value)
8982 update_table_tick (value);
8983
8984 /* Now update the status of each register being set.
8985 If someone is using this register in this block, set this register
8986 to invalid since we will get confused between the two lives in this
8987 basic block. This makes using this register always invalid. In cse, we
8988 scan the table to invalidate all entries using this register, but this
8989 is too much work for us. */
8990
8991 for (i = regno; i < endregno; i++)
8992 {
8993 reg_last_set_label[i] = label_tick;
8994 if (value && reg_last_set_table_tick[i] == label_tick)
8995 reg_last_set_invalid[i] = 1;
8996 else
8997 reg_last_set_invalid[i] = 0;
8998 }
8999
9000 /* The value being assigned might refer to X (like in "x++;"). In that
9001 case, we must replace it with (clobber (const_int 0)) to prevent
9002 infinite loops. */
9003 if (value && ! get_last_value_validate (&value,
9004 reg_last_set_label[regno], 0))
9005 {
9006 value = copy_rtx (value);
9007 if (! get_last_value_validate (&value, reg_last_set_label[regno], 1))
9008 value = 0;
9009 }
9010
9011 /* For the main register being modified, update the value, the mode, the
9012 nonzero bits, and the number of sign bit copies. */
9013
9014 reg_last_set_value[regno] = value;
9015
9016 if (value)
9017 {
9018 subst_low_cuid = INSN_CUID (insn);
9019 reg_last_set_mode[regno] = GET_MODE (reg);
9020 reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
9021 reg_last_set_sign_bit_copies[regno]
9022 = num_sign_bit_copies (value, GET_MODE (reg));
9023 }
9024}
9025
9026/* Used for communication between the following two routines. */
9027static rtx record_dead_insn;
9028
9029/* Called via note_stores from record_dead_and_set_regs to handle one
9030 SET or CLOBBER in an insn. */
9031
9032static void
9033record_dead_and_set_regs_1 (dest, setter)
9034 rtx dest, setter;
9035{
9036 if (GET_CODE (dest) == REG)
9037 {
9038 /* If we are setting the whole register, we know its value. Otherwise
9039 show that we don't know the value. We can handle SUBREG in
9040 some cases. */
9041 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
9042 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
9043 else if (GET_CODE (setter) == SET
9044 && GET_CODE (SET_DEST (setter)) == SUBREG
9045 && SUBREG_REG (SET_DEST (setter)) == dest
9046 && subreg_lowpart_p (SET_DEST (setter)))
9047 record_value_for_reg (dest, record_dead_insn,
9048 gen_lowpart_for_combine (GET_MODE (dest),
9049 SET_SRC (setter)));
9050 else
9051 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
9052 }
9053 else if (GET_CODE (dest) == MEM
9054 /* Ignore pushes, they clobber nothing. */
9055 && ! push_operand (dest, GET_MODE (dest)))
9056 mem_last_set = INSN_CUID (record_dead_insn);
9057}
9058
9059/* Update the records of when each REG was most recently set or killed
9060 for the things done by INSN. This is the last thing done in processing
9061 INSN in the combiner loop.
9062
9063 We update reg_last_set, reg_last_set_value, reg_last_death, and also the
9064 similar information mem_last_set (which insn most recently modified memory)
9065 and last_call_cuid (which insn was the most recent subroutine call). */
9066
9067static void
9068record_dead_and_set_regs (insn)
9069 rtx insn;
9070{
9071 register rtx link;
9072 int i;
9073
9074 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
9075 {
9076 if (REG_NOTE_KIND (link) == REG_DEAD
9077 && GET_CODE (XEXP (link, 0)) == REG)
9078 {
9079 int regno = REGNO (XEXP (link, 0));
9080 int endregno
9081 = regno + (regno < FIRST_PSEUDO_REGISTER
9082 ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
9083 : 1);
9084
9085 for (i = regno; i < endregno; i++)
9086 reg_last_death[i] = insn;
9087 }
9088 else if (REG_NOTE_KIND (link) == REG_INC)
9089 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
9090 }
9091
9092 if (GET_CODE (insn) == CALL_INSN)
9093 {
9094 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
9095 if (call_used_regs[i])
9096 {
9097 reg_last_set_value[i] = 0;
9098 reg_last_death[i] = 0;
9099 }
9100
9101 last_call_cuid = mem_last_set = INSN_CUID (insn);
9102 }
9103
9104 record_dead_insn = insn;
9105 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
9106}
9107\f
9108/* Utility routine for the following function. Verify that all the registers
9109 mentioned in *LOC are valid when *LOC was part of a value set when
9110 label_tick == TICK. Return 0 if some are not.
9111
9112 If REPLACE is non-zero, replace the invalid reference with
9113 (clobber (const_int 0)) and return 1. This replacement is useful because
9114 we often can get useful information about the form of a value (e.g., if
9115 it was produced by a shift that always produces -1 or 0) even though
9116 we don't know exactly what registers it was produced from. */
9117
9118static int
9119get_last_value_validate (loc, tick, replace)
9120 rtx *loc;
9121 int tick;
9122 int replace;
9123{
9124 rtx x = *loc;
9125 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
9126 int len = GET_RTX_LENGTH (GET_CODE (x));
9127 int i;
9128
9129 if (GET_CODE (x) == REG)
9130 {
9131 int regno = REGNO (x);
9132 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9133 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
9134 int j;
9135
9136 for (j = regno; j < endregno; j++)
9137 if (reg_last_set_invalid[j]
9138 /* If this is a pseudo-register that was only set once, it is
9139 always valid. */
9140 || (! (regno >= FIRST_PSEUDO_REGISTER && reg_n_sets[regno] == 1)
9141 && reg_last_set_label[j] > tick))
9142 {
9143 if (replace)
9144 *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
9145 return replace;
9146 }
9147
9148 return 1;
9149 }
9150
9151 for (i = 0; i < len; i++)
9152 if ((fmt[i] == 'e'
9153 && get_last_value_validate (&XEXP (x, i), tick, replace) == 0)
9154 /* Don't bother with these. They shouldn't occur anyway. */
9155 || fmt[i] == 'E')
9156 return 0;
9157
9158 /* If we haven't found a reason for it to be invalid, it is valid. */
9159 return 1;
9160}
9161
9162/* Get the last value assigned to X, if known. Some registers
9163 in the value may be replaced with (clobber (const_int 0)) if their value
9164 is known longer known reliably. */
9165
9166static rtx
9167get_last_value (x)
9168 rtx x;
9169{
9170 int regno;
9171 rtx value;
9172
9173 /* If this is a non-paradoxical SUBREG, get the value of its operand and
9174 then convert it to the desired mode. If this is a paradoxical SUBREG,
9175 we cannot predict what values the "extra" bits might have. */
9176 if (GET_CODE (x) == SUBREG
9177 && subreg_lowpart_p (x)
9178 && (GET_MODE_SIZE (GET_MODE (x))
9179 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
9180 && (value = get_last_value (SUBREG_REG (x))) != 0)
9181 return gen_lowpart_for_combine (GET_MODE (x), value);
9182
9183 if (GET_CODE (x) != REG)
9184 return 0;
9185
9186 regno = REGNO (x);
9187 value = reg_last_set_value[regno];
9188
9189 /* If we don't have a value or if it isn't for this basic block, return 0. */
9190
9191 if (value == 0
9192 || (reg_n_sets[regno] != 1
9193 && reg_last_set_label[regno] != label_tick))
9194 return 0;
9195
9196 /* If the value was set in a later insn that the ones we are processing,
9197 we can't use it even if the register was only set once, but make a quick
9198 check to see if the previous insn set it to something. This is commonly
9199 the case when the same pseudo is used by repeated insns. */
9200
9201 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
9202 {
9203 rtx insn, set;
9204
9205 for (insn = prev_nonnote_insn (subst_insn);
9206 insn && INSN_CUID (insn) >= subst_low_cuid;
9207 insn = prev_nonnote_insn (insn))
9208 ;
9209
9210 if (insn
9211 && (set = single_set (insn)) != 0
9212 && rtx_equal_p (SET_DEST (set), x))
9213 {
9214 value = SET_SRC (set);
9215
9216 /* Make sure that VALUE doesn't reference X. Replace any
9217 expliit references with a CLOBBER. If there are any remaining
9218 references (rare), don't use the value. */
9219
9220 if (reg_mentioned_p (x, value))
9221 value = replace_rtx (copy_rtx (value), x,
9222 gen_rtx (CLOBBER, GET_MODE (x), const0_rtx));
9223
9224 if (reg_overlap_mentioned_p (x, value))
9225 return 0;
9226 }
9227 else
9228 return 0;
9229 }
9230
9231 /* If the value has all its registers valid, return it. */
9232 if (get_last_value_validate (&value, reg_last_set_label[regno], 0))
9233 return value;
9234
9235 /* Otherwise, make a copy and replace any invalid register with
9236 (clobber (const_int 0)). If that fails for some reason, return 0. */
9237
9238 value = copy_rtx (value);
9239 if (get_last_value_validate (&value, reg_last_set_label[regno], 1))
9240 return value;
9241
9242 return 0;
9243}
9244\f
9245/* Return nonzero if expression X refers to a REG or to memory
9246 that is set in an instruction more recent than FROM_CUID. */
9247
9248static int
9249use_crosses_set_p (x, from_cuid)
9250 register rtx x;
9251 int from_cuid;
9252{
9253 register char *fmt;
9254 register int i;
9255 register enum rtx_code code = GET_CODE (x);
9256
9257 if (code == REG)
9258 {
9259 register int regno = REGNO (x);
9260#ifdef PUSH_ROUNDING
9261 /* Don't allow uses of the stack pointer to be moved,
9262 because we don't know whether the move crosses a push insn. */
9263 if (regno == STACK_POINTER_REGNUM)
9264 return 1;
9265#endif
9266 return (reg_last_set[regno]
9267 && INSN_CUID (reg_last_set[regno]) > from_cuid);
9268 }
9269
9270 if (code == MEM && mem_last_set > from_cuid)
9271 return 1;
9272
9273 fmt = GET_RTX_FORMAT (code);
9274
9275 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9276 {
9277 if (fmt[i] == 'E')
9278 {
9279 register int j;
9280 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9281 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
9282 return 1;
9283 }
9284 else if (fmt[i] == 'e'
9285 && use_crosses_set_p (XEXP (x, i), from_cuid))
9286 return 1;
9287 }
9288 return 0;
9289}
9290\f
9291/* Define three variables used for communication between the following
9292 routines. */
9293
9294static int reg_dead_regno, reg_dead_endregno;
9295static int reg_dead_flag;
9296
9297/* Function called via note_stores from reg_dead_at_p.
9298
9299 If DEST is within [reg_dead_rengno, reg_dead_endregno), set
9300 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
9301
9302static void
9303reg_dead_at_p_1 (dest, x)
9304 rtx dest;
9305 rtx x;
9306{
9307 int regno, endregno;
9308
9309 if (GET_CODE (dest) != REG)
9310 return;
9311
9312 regno = REGNO (dest);
9313 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9314 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
9315
9316 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
9317 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
9318}
9319
9320/* Return non-zero if REG is known to be dead at INSN.
9321
9322 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
9323 referencing REG, it is dead. If we hit a SET referencing REG, it is
9324 live. Otherwise, see if it is live or dead at the start of the basic
9325 block we are in. */
9326
9327static int
9328reg_dead_at_p (reg, insn)
9329 rtx reg;
9330 rtx insn;
9331{
9332 int block, i;
9333
9334 /* Set variables for reg_dead_at_p_1. */
9335 reg_dead_regno = REGNO (reg);
9336 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
9337 ? HARD_REGNO_NREGS (reg_dead_regno,
9338 GET_MODE (reg))
9339 : 1);
9340
9341 reg_dead_flag = 0;
9342
9343 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
9344 beginning of function. */
9345 for (; insn && GET_CODE (insn) != CODE_LABEL;
9346 insn = prev_nonnote_insn (insn))
9347 {
9348 note_stores (PATTERN (insn), reg_dead_at_p_1);
9349 if (reg_dead_flag)
9350 return reg_dead_flag == 1 ? 1 : 0;
9351
9352 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
9353 return 1;
9354 }
9355
9356 /* Get the basic block number that we were in. */
9357 if (insn == 0)
9358 block = 0;
9359 else
9360 {
9361 for (block = 0; block < n_basic_blocks; block++)
9362 if (insn == basic_block_head[block])
9363 break;
9364
9365 if (block == n_basic_blocks)
9366 return 0;
9367 }
9368
9369 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
9370 if (basic_block_live_at_start[block][i / REGSET_ELT_BITS]
9371 & ((REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS)))
9372 return 0;
9373
9374 return 1;
9375}
9376\f
9377/* Remove register number REGNO from the dead registers list of INSN.
9378
9379 Return the note used to record the death, if there was one. */
9380
9381rtx
9382remove_death (regno, insn)
9383 int regno;
9384 rtx insn;
9385{
9386 register rtx note = find_regno_note (insn, REG_DEAD, regno);
9387
9388 if (note)
9389 {
9390 reg_n_deaths[regno]--;
9391 remove_note (insn, note);
9392 }
9393
9394 return note;
9395}
9396
9397/* For each register (hardware or pseudo) used within expression X, if its
9398 death is in an instruction with cuid between FROM_CUID (inclusive) and
9399 TO_INSN (exclusive), put a REG_DEAD note for that register in the
9400 list headed by PNOTES.
9401
9402 This is done when X is being merged by combination into TO_INSN. These
9403 notes will then be distributed as needed. */
9404
9405static void
9406move_deaths (x, from_cuid, to_insn, pnotes)
9407 rtx x;
9408 int from_cuid;
9409 rtx to_insn;
9410 rtx *pnotes;
9411{
9412 register char *fmt;
9413 register int len, i;
9414 register enum rtx_code code = GET_CODE (x);
9415
9416 if (code == REG)
9417 {
9418 register int regno = REGNO (x);
9419 register rtx where_dead = reg_last_death[regno];
9420
9421 if (where_dead && INSN_CUID (where_dead) >= from_cuid
9422 && INSN_CUID (where_dead) < INSN_CUID (to_insn))
9423 {
9424 rtx note = remove_death (regno, where_dead);
9425
9426 /* It is possible for the call above to return 0. This can occur
9427 when reg_last_death points to I2 or I1 that we combined with.
9428 In that case make a new note.
9429
9430 We must also check for the case where X is a hard register
9431 and NOTE is a death note for a range of hard registers
9432 including X. In that case, we must put REG_DEAD notes for
9433 the remaining registers in place of NOTE. */
9434
9435 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
9436 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
9437 != GET_MODE_SIZE (GET_MODE (x))))
9438 {
9439 int deadregno = REGNO (XEXP (note, 0));
9440 int deadend
9441 = (deadregno + HARD_REGNO_NREGS (deadregno,
9442 GET_MODE (XEXP (note, 0))));
9443 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
9444 int i;
9445
9446 for (i = deadregno; i < deadend; i++)
9447 if (i < regno || i >= ourend)
9448 REG_NOTES (where_dead)
9449 = gen_rtx (EXPR_LIST, REG_DEAD,
9450 gen_rtx (REG, word_mode, i),
9451 REG_NOTES (where_dead));
9452 }
9453
9454 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
9455 {
9456 XEXP (note, 1) = *pnotes;
9457 *pnotes = note;
9458 }
9459 else
9460 *pnotes = gen_rtx (EXPR_LIST, REG_DEAD, x, *pnotes);
9461
9462 reg_n_deaths[regno]++;
9463 }
9464
9465 return;
9466 }
9467
9468 else if (GET_CODE (x) == SET)
9469 {
9470 rtx dest = SET_DEST (x);
9471
9472 move_deaths (SET_SRC (x), from_cuid, to_insn, pnotes);
9473
9474 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
9475 that accesses one word of a multi-word item, some
9476 piece of everything register in the expression is used by
9477 this insn, so remove any old death. */
9478
9479 if (GET_CODE (dest) == ZERO_EXTRACT
9480 || GET_CODE (dest) == STRICT_LOW_PART
9481 || (GET_CODE (dest) == SUBREG
9482 && (((GET_MODE_SIZE (GET_MODE (dest))
9483 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
9484 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
9485 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
9486 {
9487 move_deaths (dest, from_cuid, to_insn, pnotes);
9488 return;
9489 }
9490
9491 /* If this is some other SUBREG, we know it replaces the entire
9492 value, so use that as the destination. */
9493 if (GET_CODE (dest) == SUBREG)
9494 dest = SUBREG_REG (dest);
9495
9496 /* If this is a MEM, adjust deaths of anything used in the address.
9497 For a REG (the only other possibility), the entire value is
9498 being replaced so the old value is not used in this insn. */
9499
9500 if (GET_CODE (dest) == MEM)
9501 move_deaths (XEXP (dest, 0), from_cuid, to_insn, pnotes);
9502 return;
9503 }
9504
9505 else if (GET_CODE (x) == CLOBBER)
9506 return;
9507
9508 len = GET_RTX_LENGTH (code);
9509 fmt = GET_RTX_FORMAT (code);
9510
9511 for (i = 0; i < len; i++)
9512 {
9513 if (fmt[i] == 'E')
9514 {
9515 register int j;
9516 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9517 move_deaths (XVECEXP (x, i, j), from_cuid, to_insn, pnotes);
9518 }
9519 else if (fmt[i] == 'e')
9520 move_deaths (XEXP (x, i), from_cuid, to_insn, pnotes);
9521 }
9522}
9523\f
9524/* Return 1 if X is the target of a bit-field assignment in BODY, the
9525 pattern of an insn. X must be a REG. */
9526
9527static int
9528reg_bitfield_target_p (x, body)
9529 rtx x;
9530 rtx body;
9531{
9532 int i;
9533
9534 if (GET_CODE (body) == SET)
9535 {
9536 rtx dest = SET_DEST (body);
9537 rtx target;
9538 int regno, tregno, endregno, endtregno;
9539
9540 if (GET_CODE (dest) == ZERO_EXTRACT)
9541 target = XEXP (dest, 0);
9542 else if (GET_CODE (dest) == STRICT_LOW_PART)
9543 target = SUBREG_REG (XEXP (dest, 0));
9544 else
9545 return 0;
9546
9547 if (GET_CODE (target) == SUBREG)
9548 target = SUBREG_REG (target);
9549
9550 if (GET_CODE (target) != REG)
9551 return 0;
9552
9553 tregno = REGNO (target), regno = REGNO (x);
9554 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
9555 return target == x;
9556
9557 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
9558 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
9559
9560 return endregno > tregno && regno < endtregno;
9561 }
9562
9563 else if (GET_CODE (body) == PARALLEL)
9564 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
9565 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
9566 return 1;
9567
9568 return 0;
9569}
9570\f
9571/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
9572 as appropriate. I3 and I2 are the insns resulting from the combination
9573 insns including FROM (I2 may be zero).
9574
9575 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
9576 not need REG_DEAD notes because they are being substituted for. This
9577 saves searching in the most common cases.
9578
9579 Each note in the list is either ignored or placed on some insns, depending
9580 on the type of note. */
9581
9582static void
9583distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
9584 rtx notes;
9585 rtx from_insn;
9586 rtx i3, i2;
9587 rtx elim_i2, elim_i1;
9588{
9589 rtx note, next_note;
9590 rtx tem;
9591
9592 for (note = notes; note; note = next_note)
9593 {
9594 rtx place = 0, place2 = 0;
9595
9596 /* If this NOTE references a pseudo register, ensure it references
9597 the latest copy of that register. */
9598 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
9599 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
9600 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
9601
9602 next_note = XEXP (note, 1);
9603 switch (REG_NOTE_KIND (note))
9604 {
9605 case REG_UNUSED:
9606 /* If this register is set or clobbered in I3, put the note there
9607 unless there is one already. */
9608 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
9609 {
9610 if (! (GET_CODE (XEXP (note, 0)) == REG
9611 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
9612 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
9613 place = i3;
9614 }
9615 /* Otherwise, if this register is used by I3, then this register
9616 now dies here, so we must put a REG_DEAD note here unless there
9617 is one already. */
9618 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
9619 && ! (GET_CODE (XEXP (note, 0)) == REG
9620 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
9621 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
9622 {
9623 PUT_REG_NOTE_KIND (note, REG_DEAD);
9624 place = i3;
9625 }
9626 break;
9627
9628 case REG_EQUAL:
9629 case REG_EQUIV:
9630 case REG_NONNEG:
9631 /* These notes say something about results of an insn. We can
9632 only support them if they used to be on I3 in which case they
9633 remain on I3. Otherwise they are ignored.
9634
9635 If the note refers to an expression that is not a constant, we
9636 must also ignore the note since we cannot tell whether the
9637 equivalence is still true. It might be possible to do
9638 slightly better than this (we only have a problem if I2DEST
9639 or I1DEST is present in the expression), but it doesn't
9640 seem worth the trouble. */
9641
9642 if (from_insn == i3
9643 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
9644 place = i3;
9645 break;
9646
9647 case REG_INC:
9648 case REG_NO_CONFLICT:
9649 case REG_LABEL:
9650 /* These notes say something about how a register is used. They must
9651 be present on any use of the register in I2 or I3. */
9652 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
9653 place = i3;
9654
9655 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
9656 {
9657 if (place)
9658 place2 = i2;
9659 else
9660 place = i2;
9661 }
9662 break;
9663
9664 case REG_WAS_0:
9665 /* It is too much trouble to try to see if this note is still
9666 correct in all situations. It is better to simply delete it. */
9667 break;
9668
9669 case REG_RETVAL:
9670 /* If the insn previously containing this note still exists,
9671 put it back where it was. Otherwise move it to the previous
9672 insn. Adjust the corresponding REG_LIBCALL note. */
9673 if (GET_CODE (from_insn) != NOTE)
9674 place = from_insn;
9675 else
9676 {
9677 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
9678 place = prev_real_insn (from_insn);
9679 if (tem && place)
9680 XEXP (tem, 0) = place;
9681 }
9682 break;
9683
9684 case REG_LIBCALL:
9685 /* This is handled similarly to REG_RETVAL. */
9686 if (GET_CODE (from_insn) != NOTE)
9687 place = from_insn;
9688 else
9689 {
9690 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
9691 place = next_real_insn (from_insn);
9692 if (tem && place)
9693 XEXP (tem, 0) = place;
9694 }
9695 break;
9696
9697 case REG_DEAD:
9698 /* If the register is used as an input in I3, it dies there.
9699 Similarly for I2, if it is non-zero and adjacent to I3.
9700
9701 If the register is not used as an input in either I3 or I2
9702 and it is not one of the registers we were supposed to eliminate,
9703 there are two possibilities. We might have a non-adjacent I2
9704 or we might have somehow eliminated an additional register
9705 from a computation. For example, we might have had A & B where
9706 we discover that B will always be zero. In this case we will
9707 eliminate the reference to A.
9708
9709 In both cases, we must search to see if we can find a previous
9710 use of A and put the death note there. */
9711
9712 if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
9713 place = i3;
9714 else if (i2 != 0 && next_nonnote_insn (i2) == i3
9715 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
9716 place = i2;
9717
9718 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
9719 break;
9720
9721 /* If the register is used in both I2 and I3 and it dies in I3,
9722 we might have added another reference to it. If reg_n_refs
9723 was 2, bump it to 3. This has to be correct since the
9724 register must have been set somewhere. The reason this is
9725 done is because local-alloc.c treats 2 references as a
9726 special case. */
9727
9728 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
9729 && reg_n_refs[REGNO (XEXP (note, 0))]== 2
9730 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
9731 reg_n_refs[REGNO (XEXP (note, 0))] = 3;
9732
9733 if (place == 0)
9734 for (tem = prev_nonnote_insn (i3);
9735 tem && (GET_CODE (tem) == INSN
9736 || GET_CODE (tem) == CALL_INSN);
9737 tem = prev_nonnote_insn (tem))
9738 {
9739 /* If the register is being set at TEM, see if that is all
9740 TEM is doing. If so, delete TEM. Otherwise, make this
9741 into a REG_UNUSED note instead. */
9742 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
9743 {
9744 rtx set = single_set (tem);
9745
9746 /* Verify that it was the set, and not a clobber that
9747 modified the register. */
9748
9749 if (set != 0 && ! side_effects_p (SET_SRC (set))
9750 && rtx_equal_p (XEXP (note, 0), SET_DEST (set)))
9751 {
9752 /* Move the notes and links of TEM elsewhere.
9753 This might delete other dead insns recursively.
9754 First set the pattern to something that won't use
9755 any register. */
9756
9757 PATTERN (tem) = pc_rtx;
9758
9759 distribute_notes (REG_NOTES (tem), tem, tem,
9760 NULL_RTX, NULL_RTX, NULL_RTX);
9761 distribute_links (LOG_LINKS (tem));
9762
9763 PUT_CODE (tem, NOTE);
9764 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
9765 NOTE_SOURCE_FILE (tem) = 0;
9766 }
9767 else
9768 {
9769 PUT_REG_NOTE_KIND (note, REG_UNUSED);
9770
9771 /* If there isn't already a REG_UNUSED note, put one
9772 here. */
9773 if (! find_regno_note (tem, REG_UNUSED,
9774 REGNO (XEXP (note, 0))))
9775 place = tem;
9776 break;
9777 }
9778 }
9779 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem)))
9780 {
9781 place = tem;
9782 break;
9783 }
9784 }
9785
9786 /* If the register is set or already dead at PLACE, we needn't do
9787 anything with this note if it is still a REG_DEAD note.
9788
9789 Note that we cannot use just `dead_or_set_p' here since we can
9790 convert an assignment to a register into a bit-field assignment.
9791 Therefore, we must also omit the note if the register is the
9792 target of a bitfield assignment. */
9793
9794 if (place && REG_NOTE_KIND (note) == REG_DEAD)
9795 {
9796 int regno = REGNO (XEXP (note, 0));
9797
9798 if (dead_or_set_p (place, XEXP (note, 0))
9799 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
9800 {
9801 /* Unless the register previously died in PLACE, clear
9802 reg_last_death. [I no longer understand why this is
9803 being done.] */
9804 if (reg_last_death[regno] != place)
9805 reg_last_death[regno] = 0;
9806 place = 0;
9807 }
9808 else
9809 reg_last_death[regno] = place;
9810
9811 /* If this is a death note for a hard reg that is occupying
9812 multiple registers, ensure that we are still using all
9813 parts of the object. If we find a piece of the object
9814 that is unused, we must add a USE for that piece before
9815 PLACE and put the appropriate REG_DEAD note on it.
9816
9817 An alternative would be to put a REG_UNUSED for the pieces
9818 on the insn that set the register, but that can't be done if
9819 it is not in the same block. It is simpler, though less
9820 efficient, to add the USE insns. */
9821
9822 if (place && regno < FIRST_PSEUDO_REGISTER
9823 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
9824 {
9825 int endregno
9826 = regno + HARD_REGNO_NREGS (regno,
9827 GET_MODE (XEXP (note, 0)));
9828 int all_used = 1;
9829 int i;
9830
9831 for (i = regno; i < endregno; i++)
9832 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0))
9833 {
9834 rtx piece = gen_rtx (REG, word_mode, i);
9835 rtx p;
9836
9837 /* See if we already placed a USE note for this
9838 register in front of PLACE. */
9839 for (p = place;
9840 GET_CODE (PREV_INSN (p)) == INSN
9841 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
9842 p = PREV_INSN (p))
9843 if (rtx_equal_p (piece,
9844 XEXP (PATTERN (PREV_INSN (p)), 0)))
9845 {
9846 p = 0;
9847 break;
9848 }
9849
9850 if (p)
9851 {
9852 rtx use_insn
9853 = emit_insn_before (gen_rtx (USE, VOIDmode,
9854 piece),
9855 p);
9856 REG_NOTES (use_insn)
9857 = gen_rtx (EXPR_LIST, REG_DEAD, piece,
9858 REG_NOTES (use_insn));
9859 }
9860
9861 all_used = 0;
9862 }
9863
9864 /* Check for the case where the register dying partially
9865 overlaps the register set by this insn. */
9866 if (all_used)
9867 for (i = regno; i < endregno; i++)
9868 if (dead_or_set_regno_p (place, i))
9869 {
9870 all_used = 0;
9871 break;
9872 }
9873
9874 if (! all_used)
9875 {
9876 /* Put only REG_DEAD notes for pieces that are
9877 still used and that are not already dead or set. */
9878
9879 for (i = regno; i < endregno; i++)
9880 {
9881 rtx piece = gen_rtx (REG, word_mode, i);
9882
9883 if (reg_referenced_p (piece, PATTERN (place))
9884 && ! dead_or_set_p (place, piece)
9885 && ! reg_bitfield_target_p (piece,
9886 PATTERN (place)))
9887 REG_NOTES (place) = gen_rtx (EXPR_LIST, REG_DEAD,
9888 piece,
9889 REG_NOTES (place));
9890 }
9891
9892 place = 0;
9893 }
9894 }
9895 }
9896 break;
9897
9898 default:
9899 /* Any other notes should not be present at this point in the
9900 compilation. */
9901 abort ();
9902 }
9903
9904 if (place)
9905 {
9906 XEXP (note, 1) = REG_NOTES (place);
9907 REG_NOTES (place) = note;
9908 }
9909 else if ((REG_NOTE_KIND (note) == REG_DEAD
9910 || REG_NOTE_KIND (note) == REG_UNUSED)
9911 && GET_CODE (XEXP (note, 0)) == REG)
9912 reg_n_deaths[REGNO (XEXP (note, 0))]--;
9913
9914 if (place2)
9915 {
9916 if ((REG_NOTE_KIND (note) == REG_DEAD
9917 || REG_NOTE_KIND (note) == REG_UNUSED)
9918 && GET_CODE (XEXP (note, 0)) == REG)
9919 reg_n_deaths[REGNO (XEXP (note, 0))]++;
9920
9921 REG_NOTES (place2) = gen_rtx (GET_CODE (note), REG_NOTE_KIND (note),
9922 XEXP (note, 0), REG_NOTES (place2));
9923 }
9924 }
9925}
9926\f
9927/* Similarly to above, distribute the LOG_LINKS that used to be present on
9928 I3, I2, and I1 to new locations. This is also called in one case to
9929 add a link pointing at I3 when I3's destination is changed. */
9930
9931static void
9932distribute_links (links)
9933 rtx links;
9934{
9935 rtx link, next_link;
9936
9937 for (link = links; link; link = next_link)
9938 {
9939 rtx place = 0;
9940 rtx insn;
9941 rtx set, reg;
9942
9943 next_link = XEXP (link, 1);
9944
9945 /* If the insn that this link points to is a NOTE or isn't a single
9946 set, ignore it. In the latter case, it isn't clear what we
9947 can do other than ignore the link, since we can't tell which
9948 register it was for. Such links wouldn't be used by combine
9949 anyway.
9950
9951 It is not possible for the destination of the target of the link to
9952 have been changed by combine. The only potential of this is if we
9953 replace I3, I2, and I1 by I3 and I2. But in that case the
9954 destination of I2 also remains unchanged. */
9955
9956 if (GET_CODE (XEXP (link, 0)) == NOTE
9957 || (set = single_set (XEXP (link, 0))) == 0)
9958 continue;
9959
9960 reg = SET_DEST (set);
9961 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
9962 || GET_CODE (reg) == SIGN_EXTRACT
9963 || GET_CODE (reg) == STRICT_LOW_PART)
9964 reg = XEXP (reg, 0);
9965
9966 /* A LOG_LINK is defined as being placed on the first insn that uses
9967 a register and points to the insn that sets the register. Start
9968 searching at the next insn after the target of the link and stop
9969 when we reach a set of the register or the end of the basic block.
9970
9971 Note that this correctly handles the link that used to point from
9972 I3 to I2. Also note that not much searching is typically done here
9973 since most links don't point very far away. */
9974
9975 for (insn = NEXT_INSN (XEXP (link, 0));
9976 (insn && GET_CODE (insn) != CODE_LABEL
9977 && GET_CODE (PREV_INSN (insn)) != JUMP_INSN);
9978 insn = NEXT_INSN (insn))
9979 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
9980 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
9981 {
9982 if (reg_referenced_p (reg, PATTERN (insn)))
9983 place = insn;
9984 break;
9985 }
9986
9987 /* If we found a place to put the link, place it there unless there
9988 is already a link to the same insn as LINK at that point. */
9989
9990 if (place)
9991 {
9992 rtx link2;
9993
9994 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
9995 if (XEXP (link2, 0) == XEXP (link, 0))
9996 break;
9997
9998 if (link2 == 0)
9999 {
10000 XEXP (link, 1) = LOG_LINKS (place);
10001 LOG_LINKS (place) = link;
10002 }
10003 }
10004 }
10005}
10006\f
10007void
10008dump_combine_stats (file)
10009 FILE *file;
10010{
10011 fprintf
10012 (file,
10013 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
10014 combine_attempts, combine_merges, combine_extras, combine_successes);
10015}
10016
10017void
10018dump_combine_total_stats (file)
10019 FILE *file;
10020{
10021 fprintf
10022 (file,
10023 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
10024 total_attempts, total_merges, total_extras, total_successes);
10025}