adding GNU dc ("desk calculator")
[unix-history] / gnu / usr.bin / gcc1 / cc1 / combine.c
CommitLineData
15637ed4
RG
1/* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 1988 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 1, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21/* This module is essentially the "combiner" phase of the U. of Arizona
22 Portable Optimizer, but redone to work on our list-structured
23 representation for RTL instead of their string representation.
24
25 The LOG_LINKS of each insn identify the most recent assignment
26 to each REG used in the insn. It is a list of previous insns,
27 each of which contains a SET for a REG that is used in this insn
28 and not used or set in between. LOG_LINKs never cross basic blocks.
29 They were set up by the preceding pass (lifetime analysis).
30
31 We try to combine each pair of insns joined by a logical link.
32 We also try to combine triples of insns A, B and C when
33 C has a link back to B and B has a link back to A.
34
35 LOG_LINKS does not have links for use of the CC0. They don't
36 need to, because the insn that sets the CC0 is always immediately
37 before the insn that tests it. So we always regard a branch
38 insn as having a logical link to the preceding insn.
39
40 We check (with use_crosses_set_p) to avoid combining in such a way
41 as to move a computation to a place where its value would be different.
42
43 Combination is done by mathematically substituting the previous
44 insn(s) values for the regs they set into the expressions in
45 the later insns that refer to these regs. If the result is a valid insn
46 for our target machine, according to the machine description,
47 we install it, delete the earlier insns, and update the data flow
48 information (LOG_LINKS and REG_NOTES) for what we did.
49
50 To simplify substitution, we combine only when the earlier insn(s)
51 consist of only a single assignment. To simplify updating afterward,
52 we never combine when a subroutine call appears in the middle.
53
54 Since we do not represent assignments to CC0 explicitly except when that
55 is all an insn does, there is no LOG_LINKS entry in an insn that uses
56 the condition code for the insn that set the condition code.
57 Fortunately, these two insns must be consecutive.
58 Therefore, every JUMP_INSN is taken to have an implicit logical link
59 to the preceding insn. This is not quite right, since non-jumps can
60 also use the condition code; but in practice such insns would not
61 combine anyway. */
62
63#include <stdio.h>
64
65#include "config.h"
66#include "rtl.h"
67#include "flags.h"
68#include "regs.h"
69#include "basic-block.h"
70#include "insn-config.h"
71#include "recog.h"
72
73#define max(A,B) ((A) > (B) ? (A) : (B))
74#define min(A,B) ((A) < (B) ? (A) : (B))
75
76/* It is not safe to use ordinary gen_lowpart in combine.
77 Use gen_lowpart_for_combine instead. See comments there. */
78#define gen_lowpart dont_use_gen_lowpart_you_dummy
79
80/* Number of attempts to combine instructions in this function. */
81
82static int combine_attempts;
83static int distrib_attempts;
84
85/* Number of attempts that got as far as substitution in this function. */
86
87static int combine_merges;
88static int distrib_merges_1, distrib_merges_2;
89
90/* Number of instructions combined with added SETs in this function. */
91
92static int combine_extras;
93
94/* Number of instructions combined in this function. */
95
96static int combine_successes;
97static int distrib_successes;
98
99/* Totals over entire compilation. */
100
101static int total_attempts, total_merges, total_extras, total_successes;
102static int total_distrib_attempts, total_distrib_merges_1, total_distrib_merges_2, total_distrib_successes;
103
104
105/* Vector mapping INSN_UIDs to cuids.
106 The cuids are like uids but increase monononically always.
107 Combine always uses cuids so that it can compare them.
108 But actually renumbering the uids, which we used to do,
109 proves to be a bad idea because it makes it hard to compare
110 the dumps produced by earlier passes with those from later passes. */
111
112static int *uid_cuid;
113
114/* Get the cuid of an insn. */
115
116#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
117
118
119/* Record last point of death of (hard or pseudo) register n. */
120
121static rtx *reg_last_death;
122
123/* Record last point of modification of (hard or pseudo) register n. */
124
125static rtx *reg_last_set;
126
127/* Record the cuid of the last insn that invalidated memory
128 (anything that writes memory, and subroutine calls). */
129
130static int mem_last_set;
131
132/* Record the cuid of the last CALL_INSN
133 so we can tell whether a potential combination crosses any calls. */
134
135static int last_call_cuid;
136
137/* When `subst' is called, this is the insn that is being modified
138 (by combining in a previous insn). The PATTERN of this insn
139 is still the old pattern partially modified and it should not be
140 looked at, but this may be used to examine the successors of the insn
141 to judge whether a simplification is valid. */
142
143static rtx subst_insn;
144
145/* Record one modification to rtl structure
146 to be undone by storing old_contents into *where.
147 is_int is 1 if the contents are an int. */
148
149struct undo
150{
151 rtx *where;
152 rtx old_contents;
153 int is_int;
154};
155
156struct undo_int
157{
158 int *where;
159 int old_contents;
160 int is_int;
161};
162
163/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
164 num_undo says how many are currently recorded.
165 storage is nonzero if we must undo the allocation of new storage.
166 The value of storage is what to pass to obfree. */
167
168#define MAX_UNDO 10
169
170struct undobuf
171{
172 int num_undo;
173 char *storage;
174 struct undo undo[MAX_UNDO];
175};
176
177static struct undobuf undobuf;
178
179/* Number of times the pseudo being substituted for
180 was found and replaced. */
181
182static int n_occurrences;
183
184static void move_deaths ();
185static void move_deaths_2 ();
186void remove_death ();
187static void record_dead_and_set_regs ();
188int regno_dead_p ();
189static int use_crosses_set_p ();
190static int try_combine ();
191static rtx try_distrib ();
192static rtx subst ();
193static void undo_all ();
194static void copy_substitutions ();
195static void add_links ();
196static void remove_links ();
197static void add_incs ();
198static int adjacent_insns_p ();
199static int check_asm_operands ();
200static rtx simplify_and_const_int ();
201static rtx gen_lowpart_for_combine ();
202static void simplify_set_cc0_and ();
203\f
204/* Main entry point for combiner. F is the first insn of the function.
205 NREGS is the first unused pseudo-reg number. */
206
207void
208combine_instructions (f, nregs)
209 rtx f;
210 int nregs;
211{
212 register rtx insn;
213 register int i;
214 register rtx links, nextlinks;
215 rtx prev;
216
217 combine_attempts = 0;
218 combine_merges = 0;
219 combine_extras = 0;
220 combine_successes = 0;
221 distrib_attempts = 0;
222 distrib_merges_1 = 0;
223 distrib_merges_2 = 0;
224 distrib_successes = 0;
225
226 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
227 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
228 bzero (reg_last_death, nregs * sizeof (rtx));
229 bzero (reg_last_set, nregs * sizeof (rtx));
230
231 init_recog ();
232
233 /* Compute maximum uid value so uid_cuid can be allocated. */
234
235 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
236 if (INSN_UID (insn) > i)
237 i = INSN_UID (insn);
238
239 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
240
241 /* Compute the mapping from uids to cuids.
242 Cuids are numbers assigned to insns, like uids,
243 except that cuids increase monotonically through the code. */
244
245 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
246 INSN_CUID (insn) = ++i;
247
248 /* Now scan all the insns in forward order. */
249
250 last_call_cuid = 0;
251 mem_last_set = 0;
252 prev = 0;
253
254 for (insn = f; insn; insn = NEXT_INSN (insn))
255 {
256 if (GET_CODE (insn) == INSN
257 || GET_CODE (insn) == CALL_INSN
258 || GET_CODE (insn) == JUMP_INSN)
259 {
260 retry:
261 /* Try this insn with each insn it links back to. */
262
263 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
264 if (try_combine (insn, XEXP (links, 0), 0))
265 goto retry;
266
267 /* Try each sequence of three linked insns ending with this one. */
268
269 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
270 if (GET_CODE (XEXP (links, 0)) != NOTE)
271 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
272 nextlinks = XEXP (nextlinks, 1))
273 if (try_combine (insn, XEXP (links, 0), XEXP (nextlinks, 0)))
274 goto retry;
275
276 /* Try to combine a jump insn that uses CC0
277 with a preceding insn that sets CC0, and maybe with its
278 logical predecessor as well.
279 This is how we make decrement-and-branch insns.
280 We need this special code because data flow connections
281 via CC0 do not get entered in LOG_LINKS. */
282
283 if (GET_CODE (insn) == JUMP_INSN
284 && prev != 0
285 && GET_CODE (prev) == INSN
286 && GET_CODE (PATTERN (prev)) == SET
287 && GET_CODE (SET_DEST (PATTERN (prev))) == CC0)
288 {
289 if (try_combine (insn, prev, 0))
290 goto retry;
291
292 if (GET_CODE (prev) != NOTE)
293 for (nextlinks = LOG_LINKS (prev); nextlinks;
294 nextlinks = XEXP (nextlinks, 1))
295 if (try_combine (insn, prev, XEXP (nextlinks, 0)))
296 goto retry;
297 }
298
299 /* Try to apply the distributive law to this insn
300 and two insns that compute the operands of this one. */
301 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
302 if (GET_CODE (XEXP (links, 0)) != NOTE)
303 for (nextlinks = XEXP (links, 1); nextlinks; nextlinks = XEXP (nextlinks, 1))
304 if (GET_CODE (XEXP (nextlinks, 0)) != NOTE)
305 {
306 rtx try_from = 0;
307
308 if (GET_CODE (PATTERN (XEXP (links, 0))) == SET
309 && find_reg_note (insn, REG_DEAD, SET_DEST (PATTERN (XEXP (links, 0))))
310 && GET_CODE (PATTERN (XEXP (nextlinks, 0))) == SET
311 && find_reg_note (insn, REG_DEAD, SET_DEST (PATTERN (XEXP (nextlinks, 0)))))
312 try_from = try_distrib (insn, XEXP (links, 0), XEXP (nextlinks, 0));
313 if (try_from != 0)
314 {
315 insn = try_from;
316 goto retry;
317 }
318 }
319#if 0
320/* Turned off because on 68020 it takes four insns to make
321 something like (a[b / 32] & (1 << (31 - (b % 32)))) != 0
322 that could actually be optimized, and that's an unlikely piece of code. */
323 /* If an insn gets or sets a bit field, try combining it
324 with two different insns whose results it uses. */
325 if (GET_CODE (insn) == INSN
326 && GET_CODE (PATTERN (insn)) == SET
327 && (GET_CODE (SET_DEST (PATTERN (insn))) == ZERO_EXTRACT
328 || GET_CODE (SET_DEST (PATTERN (insn))) == SIGN_EXTRACT
329 || GET_CODE (SET_SRC (PATTERN (insn))) == ZERO_EXTRACT
330 || GET_CODE (SET_SRC (PATTERN (insn))) == SIGN_EXTRACT))
331 {
332 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
333 if (GET_CODE (XEXP (links, 0)) != NOTE)
334 for (nextlinks = XEXP (links, 1); nextlinks;
335 nextlinks = XEXP (nextlinks, 1))
336 if (try_combine (insn, XEXP (links, 0), XEXP (nextlinks, 0)))
337 goto retry;
338 }
339#endif
340 if (GET_CODE (insn) != NOTE)
341 record_dead_and_set_regs (insn);
342 prev = insn;
343 }
344 else if (GET_CODE (insn) != NOTE)
345 prev = 0;
346 }
347 total_attempts += combine_attempts;
348 total_merges += combine_merges;
349 total_extras += combine_extras;
350 total_successes += combine_successes;
351}
352\f
353/* Try to combine the insns I1 and I2 into I3.
354 Here I1 appears earlier than I2, which is earlier than I3.
355 I1 can be zero; then we combine just I2 into I3.
356
357 Return 1 if successful; if that happens, I1 and I2 are pseudo-deleted
358 by turning them into NOTEs, and I3 is modified.
359 Return 0 if the combination does not work. Then nothing is changed. */
360
361static int
362try_combine (i3, i2, i1)
363 register rtx i3, i2, i1;
364{
365 register rtx newpat;
366 int added_sets_1 = 0;
367 int added_sets_2 = 0;
368 int total_sets;
369 int i2_is_used;
370 register rtx link;
371 int insn_code_number;
372 rtx i2dest, i2src;
373 rtx i1dest, i1src;
374 int maxreg;
375 rtx temp;
376 int i;
377
378 combine_attempts++;
379
380 /* Don't combine with something already used up by combination. */
381
382 if (GET_CODE (i2) == NOTE
383 || (i1 && GET_CODE (i1) == NOTE))
384 return 0;
385
386 /* Don't combine across a CALL_INSN, because that would possibly
387 change whether the life span of some REGs crosses calls or not,
388 and it is a pain to update that information. */
389
390 if (INSN_CUID (i2) < last_call_cuid
391 || (i1 && INSN_CUID (i1) < last_call_cuid))
392 return 0;
393
394 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
395 That REG must be either set or dead by the final instruction
396 (so that we can safely forget about setting it).
397 Also test use_crosses_set_p to make sure that the value
398 that is to be substituted for the register
399 does not use any registers whose values alter in between.
400 Do not try combining with moves from one register to another
401 since it is better to let them be tied by register allocation.
402 (There is a switch to permit such combination; except the insns
403 that copy a function value into another register are never combined
404 because moving that too far away from the function call could cause
405 something else to be stored in that register in the interim.)
406
407 A set of a SUBREG is considered as if it were a set from
408 SUBREG. Thus, (SET (SUBREG:X (REG:Y...)) (something:X...))
409 is handled by substituting (SUBREG:Y (something:X...)) for (REG:Y...). */
410
411 if (GET_CODE (PATTERN (i2)) != SET)
412 return 0;
413 i2dest = SET_DEST (PATTERN (i2));
414 i2src = SET_SRC (PATTERN (i2));
415 if (GET_CODE (i2dest) == SUBREG)
416 {
417 i2dest = SUBREG_REG (i2dest);
418 i2src = gen_rtx (SUBREG, GET_MODE (i2dest), i2src, 0);
419 }
420 /* Don't eliminate a store in the stack pointer. */
421 if (i2dest == stack_pointer_rtx)
422 return 0;
423 /* Don't install a subreg involving two modes not tieable.
424 It can worsen register allocation, and can even make invalid reload insns,
425 since the reg inside may need to be copied from in the outside mode,
426 and that may be invalid if it is an fp reg copied in integer mode. */
427 if (GET_CODE (i2src) == SUBREG
428 && ! MODES_TIEABLE_P (GET_MODE (i2src), GET_MODE (SUBREG_REG (i2src))))
429 return 0;
430 if (GET_CODE (i2dest) != CC0
431 && (GET_CODE (i2dest) != REG
432 || (GET_CODE (i2src) == REG
433 /* Do allow the combination of y = x; x = y; (with x dead)
434 because the result will turn into nothing. */
435 && !(GET_CODE (PATTERN (i3)) == SET
436 && i2src == SET_DEST (PATTERN (i3)))
437 && (!flag_combine_regs
438 /* Don't substitute a function value reg for any other. */
439 || FUNCTION_VALUE_REGNO_P (REGNO (i2src))))
440 || GET_CODE (i2src) == CALL
441 /* Don't substitute into an incremented register. */
442 || find_reg_note (i3, REG_INC, i2dest)
443 || use_crosses_set_p (i2src, INSN_CUID (i2))))
444 return 0;
445 if (GET_CODE (i2src) == ASM_OPERANDS && MEM_VOLATILE_P (i2src))
446 return 0;
447 /* Don't substitute for a register intended as a clobberable operand. */
448 if (GET_CODE (PATTERN (i3)) == PARALLEL)
449 for (i = 0; i < XVECLEN (PATTERN (i3), 0); i++)
450 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
451 && XEXP (XVECEXP (PATTERN (i3), 0, i), 0) == i2dest)
452 return 0;
453
454 if (i1 != 0)
455 {
456 if (GET_CODE (PATTERN (i1)) != SET)
457 return 0;
458 i1dest = SET_DEST (PATTERN (i1));
459 i1src = SET_SRC (PATTERN (i1));
460 if (GET_CODE (i1dest) == SUBREG)
461 {
462 i1dest = SUBREG_REG (i1dest);
463 i1src = gen_rtx (SUBREG, GET_MODE (i1dest), i1src, 0);
464 }
465 if (i1dest == stack_pointer_rtx)
466 return 0;
467 if (GET_CODE (i1src) == SUBREG
468 && ! MODES_TIEABLE_P (GET_MODE (i1src),
469 GET_MODE (SUBREG_REG (i1src))))
470 return 0;
471 if (GET_CODE (i1dest) != CC0
472 && (GET_CODE (i1dest) != REG
473 || (GET_CODE (i1src) == REG
474 && (!flag_combine_regs
475 || FUNCTION_VALUE_REGNO_P (REGNO (i1src))))
476 || GET_CODE (i1src) == CALL
477 || find_reg_note (i3, REG_INC, i1dest)
478 || find_reg_note (i2, REG_INC, i1dest)
479 || use_crosses_set_p (i1src, INSN_CUID (i1))))
480 return 0;
481 if (GET_CODE (i1src) == ASM_OPERANDS && MEM_VOLATILE_P (i1src))
482 return 0;
483 /* Don't substitute for a register intended as a clobberable operand. */
484 if (GET_CODE (PATTERN (i3)) == PARALLEL)
485 for (i = 0; i < XVECLEN (PATTERN (i3), 0); i++)
486 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
487 && XEXP (XVECEXP (PATTERN (i3), 0, i), 0) == i1dest)
488 return 0;
489 }
490
491 /* If it is better that two different modes keep two different pseudos,
492 avoid combining them. */
493 if (GET_CODE (PATTERN (i3)) == SET)
494 {
495 rtx i3dest = SET_DEST (PATTERN (i3));
496 while (GET_CODE (i3dest) == SUBREG
497 || GET_CODE (i3dest) == STRICT_LOW_PART
498 || GET_CODE (i3dest) == SIGN_EXTRACT
499 || GET_CODE (i3dest) == ZERO_EXTRACT)
500 i3dest = SUBREG_REG (i3dest);
501
502 if (SET_SRC (PATTERN (i3)) == i2dest
503 && GET_CODE (i3dest) == REG
504 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (i3dest)))
505 return 0;
506 }
507
508 /* If I2 contains anything volatile, reject, unless nothing
509 volatile comes between it and I3. */
510 if (volatile_refs_p (PATTERN (i2)))
511 {
512 rtx insn;
513 for (insn = NEXT_INSN (i2); insn != i3; insn = NEXT_INSN (insn))
514 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
515 || GET_CODE (insn) == JUMP_INSN)
516 if (volatile_refs_p (PATTERN (insn)))
517 return 0;
518 }
519 /* Likewise for I1; nothing volatile can come between it and I3,
520 except optionally I2. */
521 if (i1 && volatile_refs_p (PATTERN (i1)))
522 {
523 rtx insn;
524 rtx end = (volatile_refs_p (PATTERN (i2)) ? i2 : i3);
525 for (insn = NEXT_INSN (i1); insn != end; insn = NEXT_INSN (insn))
526 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
527 || GET_CODE (insn) == JUMP_INSN)
528 if (volatile_refs_p (PATTERN (insn)))
529 return 0;
530 }
531
532 /* If I1 or I2 contains an autoincrement or autodecrement,
533 make sure that register is not used between there and I3,
534 and not already used in I3 either.
535 Also insist that I3 not be a jump; if it were one
536 and the incremented register were spilled, we would lose. */
537 for (link = REG_NOTES (i2); link; link = XEXP (link, 1))
538 if (REG_NOTE_KIND (link) == REG_INC
539 && (GET_CODE (i3) == JUMP_INSN
540 || reg_used_between_p (XEXP (link, 0), i2, i3)
541 || reg_mentioned_p (XEXP (link, 0), PATTERN (i3))))
542 return 0;
543
544 if (i1)
545 for (link = REG_NOTES (i1); link; link = XEXP (link, 1))
546 if (REG_NOTE_KIND (link) == REG_INC
547 && (GET_CODE (i3) == JUMP_INSN
548 || reg_used_between_p (XEXP (link, 0), i1, i3)
549 || reg_mentioned_p (XEXP (link, 0), PATTERN (i3))))
550 return 0;
551
552 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd,
553 EXCEPT in one case: I3 has a post-inc in an output operand. */
554 if (!(GET_CODE (PATTERN (i3)) == SET
555 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
556 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
557 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
558 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
559 /* It's not the exception. */
560 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
561 if (REG_NOTE_KIND (link) == REG_INC
562 && (reg_mentioned_p (XEXP (link, 0), PATTERN (i2))
563 || (i1 != 0
564 && reg_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
565 return 0;
566
567 /* Don't combine an insn I1 or I2 that follows a CC0-setting insn.
568 An insn that uses CC0 must not be separated from the one that sets it.
569 It would be more logical to test whether CC0 occurs inside I1 or I2,
570 but that would be much slower, and this ought to be equivalent. */
571 temp = PREV_INSN (i2);
572 while (temp && GET_CODE (temp) == NOTE)
573 temp = PREV_INSN (temp);
574 if (temp && GET_CODE (temp) == INSN && sets_cc0_p (PATTERN (temp)))
575 return 0;
576 if (i1)
577 {
578 temp = PREV_INSN (i2);
579 while (temp && GET_CODE (temp) == NOTE)
580 temp = PREV_INSN (temp);
581 if (temp && GET_CODE (temp) == INSN && sets_cc0_p (PATTERN (temp)))
582 return 0;
583 }
584
585 /* See if the SETs in i1 or i2 need to be kept around in the merged
586 instruction: whenever the value set there is still needed past i3. */
587 added_sets_2 = (GET_CODE (i2dest) != CC0
588 && ! dead_or_set_p (i3, i2dest));
589 if (i1)
590 added_sets_1 = ! (dead_or_set_p (i3, i1dest)
591 || dead_or_set_p (i2, i1dest));
592
593 combine_merges++;
594
595 undobuf.num_undo = 0;
596 undobuf.storage = 0;
597
598 /* Substitute in the latest insn for the regs set by the earlier ones. */
599
600 maxreg = max_reg_num ();
601
602 subst_insn = i3;
603 n_occurrences = 0; /* `subst' counts here */
604
605 newpat = subst (PATTERN (i3), i2dest, i2src);
606 /* Record whether i2's body now appears within i3's body. */
607 i2_is_used = n_occurrences;
608
609 if (i1)
610 {
611 n_occurrences = 0;
612 newpat = subst (newpat, i1dest, i1src);
613 }
614
615 if (GET_CODE (PATTERN (i3)) == SET
616 && SET_DEST (PATTERN (i3)) == cc0_rtx
617 && (GET_CODE (SET_SRC (PATTERN (i3))) == AND
618 || GET_CODE (SET_SRC (PATTERN (i3))) == LSHIFTRT)
619 && next_insn_tests_no_inequality (i3))
620 simplify_set_cc0_and (i3);
621
622 if (max_reg_num () != maxreg)
623 abort ();
624
625 /* If the actions of the earler insns must be kept
626 in addition to substituting them into the latest one,
627 we must make a new PARALLEL for the latest insn
628 to hold additional the SETs. */
629
630 if (added_sets_1 || added_sets_2)
631 {
632 combine_extras++;
633
634 /* Arrange to free later what we allocate now
635 if we don't accept this combination. */
636 if (!undobuf.storage)
637 undobuf.storage = (char *) oballoc (0);
638
639 if (GET_CODE (newpat) == PARALLEL)
640 {
641 rtvec old = XVEC (newpat, 0);
642 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
643 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
644 bcopy (&old->elem[0], &XVECEXP (newpat, 0, 0),
645 sizeof (old->elem[0]) * old->num_elem);
646 }
647 else
648 {
649 rtx old = newpat;
650 total_sets = 1 + added_sets_1 + added_sets_2;
651 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
652 XVECEXP (newpat, 0, 0) = old;
653 }
654 if (added_sets_1)
655 {
656 XVECEXP (newpat, 0, --total_sets) = PATTERN (i1);
657 }
658 if (added_sets_2)
659 {
660 /* If there is no I1, use I2's body as is. */
661 if (i1 == 0
662 /* If I2 was stuck into I3, then anything within it has
663 already had I1 substituted into it when that was done to I3. */
664 || i2_is_used)
665 {
666 XVECEXP (newpat, 0, --total_sets) = PATTERN (i2);
667 }
668 else
669 XVECEXP (newpat, 0, --total_sets)
670 = subst (PATTERN (i2), i1dest, i1src);
671 }
672 }
673
674 /* Fail if an autoincrement side-effect has been duplicated. */
675 if ((i2_is_used > 1 && find_reg_note (i2, REG_INC, 0) != 0)
676 || (i1 != 0 && n_occurrences > 1 && find_reg_note (i1, REG_INC, 0) != 0))
677 {
678 undo_all ();
679 return 0;
680 }
681
682 /* Is the result of combination a valid instruction? */
683 insn_code_number = recog (newpat, i3);
684
685 if (insn_code_number >= 0
686 /* Is the result a reasonable ASM_OPERANDS? */
687 || (check_asm_operands (newpat) && ! added_sets_1 && ! added_sets_2))
688 {
689 /* Yes. Install it. */
690 register int regno;
691 INSN_CODE (i3) = insn_code_number;
692 PATTERN (i3) = newpat;
693 /* If anything was substituted more than once,
694 copy it to avoid invalid shared rtl structure. */
695 copy_substitutions ();
696 /* The data flowing into I2 now flows into I3.
697 But we cannot always move all of I2's LOG_LINKS into I3,
698 since they must go to a setting of a REG from the
699 first use following. If I2 was the first use following a set,
700 I3 is now a use, but it is not the first use
701 if some instruction between I2 and I3 is also a use.
702 Here, for simplicity, we move all the links only if
703 there are no real insns between I2 and I3.
704 Otherwise, we move only links that correspond to regs
705 that used to die in I2. They are always safe to move. */
706 add_links (i3, i2, adjacent_insns_p (i2, i3));
707 /* Most REGs that previously died in I2 now die in I3. */
708 move_deaths (i2src, INSN_CUID (i2), i3);
709 if (GET_CODE (i2dest) == REG)
710 {
711 /* If the reg formerly set in I2 died only once and that was in I3,
712 zero its use count so it won't make `reload' do any work. */
713 regno = REGNO (i2dest);
714 if (! added_sets_2)
715 {
716 reg_n_sets[regno]--;
717 /* Used to check && regno_dead_p (regno, i3) also here. */
718 if (reg_n_sets[regno] == 0
719 && ! (basic_block_live_at_start[0][regno / HOST_BITS_PER_INT]
720 & (1 << (regno % HOST_BITS_PER_INT))))
721 reg_n_refs[regno] = 0;
722 }
723 /* If a ref to REGNO was substituted into I3 from I2,
724 then it still dies there if it previously did.
725 Otherwise either REGNO never did die in I3 so remove_death is safe
726 or this entire life of REGNO is gone so remove its death. */
727 if (!added_sets_2
728 && ! reg_mentioned_p (i2dest, PATTERN (i3)))
729 remove_death (regno, i3);
730 }
731 /* Any registers previously autoincremented in I2
732 are now incremented in I3. */
733 add_incs (i3, REG_NOTES (i2));
734 if (i1)
735 {
736 /* Likewise, merge the info from I1 and get rid of it. */
737 add_links (i3, i1,
738 adjacent_insns_p (i1, i2) && adjacent_insns_p (i2, i3));
739 move_deaths (i1src, INSN_CUID (i1), i3);
740 if (GET_CODE (i1dest) == REG)
741 {
742 regno = REGNO (i1dest);
743 if (! added_sets_1)
744 {
745 reg_n_sets[regno]--;
746 /* Used to also check && regno_dead_p (regno, i3) here. */
747
748 if (reg_n_sets[regno] == 0
749 && ! (basic_block_live_at_start[0][regno / HOST_BITS_PER_INT]
750 & (1 << (regno % HOST_BITS_PER_INT))))
751
752 reg_n_refs[regno] = 0;
753 }
754 /* If a ref to REGNO was substituted into I3 from I1,
755 then it still dies there if it previously did.
756 Else either REGNO never did die in I3 so remove_death is safe
757 or this entire life of REGNO is gone so remove its death. */
758 if (! added_sets_1
759 && ! reg_mentioned_p (i1dest, PATTERN (i3)))
760 remove_death (regno, i3);
761 }
762 add_incs (i3, REG_NOTES (i1));
763 LOG_LINKS (i1) = 0;
764 PUT_CODE (i1, NOTE);
765 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
766 NOTE_SOURCE_FILE (i1) = 0;
767 }
768 /* Get rid of I2. */
769 LOG_LINKS (i2) = 0;
770 PUT_CODE (i2, NOTE);
771 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
772 NOTE_SOURCE_FILE (i2) = 0;
773
774 combine_successes++;
775 return 1;
776 }
777
778 /* Failure: change I3 back the way it was. */
779 undo_all ();
780
781 return 0;
782}
783\f
784/* Undo all the modifications recorded in undobuf. */
785
786static void
787undo_all ()
788{
789 register int i;
790 if (undobuf.num_undo > MAX_UNDO)
791 undobuf.num_undo = MAX_UNDO;
792 for (i = undobuf.num_undo - 1; i >= 0; i--)
793 *undobuf.undo[i].where = undobuf.undo[i].old_contents;
794 if (undobuf.storage)
795 obfree (undobuf.storage);
796 undobuf.num_undo = 0;
797 undobuf.storage = 0;
798}
799
800/* If this insn had more than one substitution,
801 copy all but one, so that no invalid shared substructure is introduced. */
802
803static void
804copy_substitutions ()
805{
806 register int i;
807 if (undobuf.num_undo > 1)
808 {
809 for (i = undobuf.num_undo - 1; i >= 1; i--)
810 if (! undobuf.undo[i].is_int)
811 *undobuf.undo[i].where = copy_rtx (*undobuf.undo[i].where);
812 }
813}
814
815/* Throughout X, replace FROM with TO, and return the result.
816 The result is TO if X is FROM;
817 otherwise the result is X, but its contents may have been modified.
818 If they were modified, a record was made in undobuf so that
819 undo_all will (among other things) return X to its original state.
820
821 If the number of changes necessary is too much to record to undo,
822 the excess changes are not made, so the result is invalid.
823 The changes already made can still be undone.
824 undobuf.num_undo is incremented for such changes, so by testing that
825 the caller can tell whether the result is valid.
826
827 `n_occurrences' is incremented each time FROM is replaced. */
828
829static rtx
830subst (x, from, to)
831 register rtx x, from, to;
832{
833 register char *fmt;
834 register int len, i;
835 register enum rtx_code code;
836 char was_replaced[2];
837
838#define SUBST(INTO, NEWVAL) \
839 do { if (undobuf.num_undo < MAX_UNDO) \
840 { \
841 undobuf.undo[undobuf.num_undo].where = &INTO; \
842 undobuf.undo[undobuf.num_undo].old_contents = INTO; \
843 undobuf.undo[undobuf.num_undo].is_int = 0; \
844 INTO = NEWVAL; \
845 } \
846 undobuf.num_undo++; } while (0)
847
848#define SUBST_INT(INTO, NEWVAL) \
849 do { if (undobuf.num_undo < MAX_UNDO) \
850 { \
851 struct undo_int *u = (struct undo_int *)&undobuf.undo[undobuf.num_undo];\
852 u->where = &INTO; \
853 u->old_contents = INTO; \
854 u->is_int = 1; \
855 INTO = NEWVAL; \
856 } \
857 undobuf.num_undo++; } while (0)
858
859/* FAKE_EXTEND_SAFE_P (MODE, FROM) is 1 if (subreg:MODE FROM 0) is a safe
860 replacement for (zero_extend:MODE FROM) or (sign_extend:MODE FROM).
861 If it is 0, that cannot be done. We can now do this for any MEM
862 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be reloaded.
863 If not for that, MEM's would very rarely be safe. */
864
865/* Reject MODEs bigger than a word, because we might not be able
866 to reference a two-register group starting with an arbitrary register
867 (and currently gen_lowpart might crash for a SUBREG). */
868
869#define FAKE_EXTEND_SAFE_P(MODE, FROM) \
870 (GET_MODE_SIZE (MODE) <= UNITS_PER_WORD \
871 && (GET_CODE (FROM) == REG || GET_CODE (FROM) == SUBREG \
872 || GET_CODE (FROM) == MEM))
873
874 if (x == from)
875 return to;
876
877 /* It is possible to have a subexpression appear twice in the insn.
878 Suppose that FROM is a register that appears within TO.
879 Then, after that subexpression has been scanned once by `subst',
880 the second time it is scanned, TO may be found. If we were
881 to scan TO here, we would find FROM within it and create a
882 self-referent rtl structure which is completely wrong. */
883 if (x == to)
884 return to;
885
886 code = GET_CODE (x);
887
888 /* A little bit of algebraic simplification here. */
889 switch (code)
890 {
891 /* This case has no effect except to speed things up. */
892 case REG:
893 case CONST_INT:
894 case CONST:
895 case SYMBOL_REF:
896 case LABEL_REF:
897 case PC:
898 case CC0:
899 return x;
900 }
901
902 was_replaced[0] = 0;
903 was_replaced[1] = 0;
904
905 len = GET_RTX_LENGTH (code);
906 fmt = GET_RTX_FORMAT (code);
907
908 /* Don't replace FROM where it is being stored in rather than used. */
909 if (code == SET && SET_DEST (x) == from)
910 fmt = "ie";
911 if (code == SET && GET_CODE (SET_DEST (x)) == SUBREG
912 && SUBREG_REG (SET_DEST (x)) == from)
913 fmt = "ie";
914
915 for (i = 0; i < len; i++)
916 {
917 if (fmt[i] == 'E')
918 {
919 register int j;
920 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
921 {
922 register rtx new;
923 if (XVECEXP (x, i, j) == from)
924 new = to, n_occurrences++;
925 else
926 new = subst (XVECEXP (x, i, j), from, to);
927 if (new != XVECEXP (x, i, j))
928 SUBST (XVECEXP (x, i, j), new);
929 }
930 }
931 else if (fmt[i] == 'e')
932 {
933 register rtx new;
934
935 if (XEXP (x, i) == from)
936 {
937 new = to;
938 n_occurrences++;
939 if (i < 2)
940 was_replaced[i] = 1;
941 }
942 else
943 new = subst (XEXP (x, i), from, to);
944
945 if (new != XEXP (x, i))
946 SUBST (XEXP (x, i), new);
947 }
948 }
949
950 /* A little bit of algebraic simplification here. */
951 switch (code)
952 {
953 case SUBREG:
954 /* Changing mode twice with SUBREG => just change it once,
955 or not at all if changing back to starting mode. */
956 if (SUBREG_REG (x) == to
957 && GET_CODE (to) == SUBREG)
958 {
959 if (GET_MODE (x) == GET_MODE (SUBREG_REG (to)))
960 if (SUBREG_WORD (x) == 0 && SUBREG_WORD (to) == 0)
961 return SUBREG_REG (to);
962 SUBST (SUBREG_REG (x), SUBREG_REG (to));
963 if (SUBREG_WORD (to) != 0)
964 SUBST_INT (SUBREG_WORD (x), SUBREG_WORD (x) + SUBREG_WORD (to));
965 }
966 if (SUBREG_REG (x) == to
967 && (GET_CODE (to) == SIGN_EXTEND || GET_CODE (to) == ZERO_EXTEND)
968 && subreg_lowpart_p (x))
969 {
970 /* (subreg (sign_extend X)) is X, if it has same mode as X. */
971 if (GET_MODE (x) == GET_MODE (XEXP (to, 0)))
972 return XEXP (to, 0);
973 /* (subreg (sign_extend X)), if it has a mode wider than X,
974 can be done with (sign_extend X). */
975 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (XEXP (to, 0))))
976 {
977 if (!undobuf.storage)
978 undobuf.storage = (char *) oballoc (0);
979 return gen_rtx (GET_CODE (to), GET_MODE (x), XEXP (to, 0));
980 }
981 /* Extend and then truncate smaller than it was to start with:
982 no need to extend. */
983 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (GET_MODE (XEXP (to, 0))))
984 {
985 SUBST (XEXP (x, 0), XEXP (to, 0));
986 }
987 }
988 /* (subreg:A (mem:B X) N) becomes a modified MEM.
989 If we can't do that safely, then it becomes something nonsensical
990 so that this combination won't take place.
991 This avoids producing any (subreg (mem))s except in the special
992 paradoxical case where gen_lowpart_for_combine makes them. */
993 if (SUBREG_REG (x) == to
994 && GET_CODE (to) == MEM)
995 {
996 int endian_offset = 0;
997 /* Don't combine this if mode A is wider than B. */
998 if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (to)))
999 return gen_rtx (CLOBBER, VOIDmode, const0_rtx);
1000 /* Don't change the mode of the MEM
1001 if that would change the meaning of the address. */
1002 if (mode_dependent_address_p (XEXP (to, 0)))
1003 return gen_rtx (CLOBBER, VOIDmode, const0_rtx);
1004#ifdef BYTES_BIG_ENDIAN
1005 if (GET_MODE_SIZE (GET_MODE (x)) < UNITS_PER_WORD)
1006 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (GET_MODE (x));
1007 if (GET_MODE_SIZE (GET_MODE (to)) < UNITS_PER_WORD)
1008 endian_offset -= UNITS_PER_WORD - GET_MODE_SIZE (GET_MODE (to));
1009#endif
1010 if (!undobuf.storage)
1011 undobuf.storage = (char *) oballoc (0);
1012 /* Note if the plus_constant doesn't make a valid address
1013 then this combination won't be accepted. */
1014 return gen_rtx (MEM, GET_MODE (x),
1015 plus_constant (XEXP (to, 0),
1016 (SUBREG_WORD (x) * UNITS_PER_WORD
1017 + endian_offset)));
1018 }
1019 break;
1020
1021 case NOT:
1022 /* (not (minus X 1)) can become (neg X). */
1023 if (was_replaced[0]
1024 && ((GET_CODE (to) == PLUS && INTVAL (XEXP (to, 1)) == -1)
1025 || (GET_CODE (to) == MINUS && XEXP (to, 1) == const1_rtx)))
1026 {
1027 if (!undobuf.storage)
1028 undobuf.storage = (char *) oballoc (0);
1029 return gen_rtx (NEG, GET_MODE (to), XEXP (to, 0));
1030 }
1031 /* Don't let substitution introduce double-negatives. */
1032 if (was_replaced[0]
1033 && GET_CODE (to) == code)
1034 return XEXP (to, 0);
1035 break;
1036
1037 case NEG:
1038 /* (neg (minus X Y)) can become (minus Y X). */
1039 if (was_replaced[0] && GET_CODE (to) == MINUS)
1040 {
1041 if (!undobuf.storage)
1042 undobuf.storage = (char *) oballoc (0);
1043 return gen_rtx (MINUS, GET_MODE (to),
1044 XEXP (to, 1), XEXP (to, 0));
1045 }
1046 /* Don't let substitution introduce double-negatives. */
1047 if (was_replaced[0]
1048 && GET_CODE (to) == code)
1049 return XEXP (to, 0);
1050 break;
1051
1052 case FLOAT_TRUNCATE:
1053 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
1054 if (was_replaced[0]
1055 && GET_CODE (to) == FLOAT_EXTEND
1056 && GET_MODE (XEXP (to, 0)) == GET_MODE (x))
1057 return XEXP (to, 0);
1058 break;
1059
1060#if 0
1061 case COMPARE:
1062 /* -x>0 if 0>x. */
1063 if (GET_CODE (XEXP (x, 0)) == NEG && XEXP (x, 1) == const0_rtx)
1064 {
1065 SUBST (XEXP (x, 1), XEXP (XEXP (x, 0), 0));
1066 SUBST (XEXP (x, 0), const0_rtx);
1067 }
1068 if (GET_CODE (XEXP (x, 1)) == NEG && XEXP (x, 0) == const0_rtx)
1069 {
1070 SUBST (XEXP (x, 0), XEXP (XEXP (x, 1), 0));
1071 SUBST (XEXP (x, 1), const0_rtx);
1072 }
1073 break;
1074#endif
1075
1076 case PLUS:
1077#if 0 /* Turned off for caution: turn it on after 1.36. */
1078 /* Identify constant sums as such. */
1079 if ((was_replaced[0] || was_replaced[1])
1080 && CONSTANT_P (XEXP (x, 0))
1081 && CONSTANT_P (XEXP (x, 1)))
1082 {
1083 if (!undobuf.storage)
1084 undobuf.storage = (char *) oballoc (0);
1085 return gen_rtx (CONST, GET_MODE (x), x);
1086 }
1087#endif
1088 /* In (plus <foo> (ashift <bar> <n>))
1089 change the shift to a multiply so we can recognize
1090 scaled indexed addresses. */
1091 if ((was_replaced[0]
1092 || was_replaced[1])
1093 && GET_CODE (to) == ASHIFT
1094 && GET_CODE (XEXP (to, 1)) == CONST_INT
1095 && INTVAL (XEXP (to, 1)) < HOST_BITS_PER_INT)
1096 {
1097 rtx temp;
1098 if (!undobuf.storage)
1099 undobuf.storage = (char *) oballoc (0);
1100 temp = gen_rtx (MULT, GET_MODE (to),
1101 XEXP (to, 0),
1102 gen_rtx (CONST_INT, VOIDmode,
1103 1 << INTVAL (XEXP (to, 1))));
1104 if (was_replaced[0])
1105 SUBST (XEXP (x, 0), temp);
1106 else
1107 SUBST (XEXP (x, 1), temp);
1108 }
1109 /* (plus X (neg Y)) becomes (minus X Y). */
1110 if (GET_CODE (XEXP (x, 1)) == NEG)
1111 {
1112 if (!undobuf.storage)
1113 undobuf.storage = (char *) oballoc (0);
1114 return gen_rtx (MINUS, GET_MODE (x),
1115 XEXP (x, 0), XEXP (XEXP (x, 1), 0));
1116 }
1117 /* (plus (neg X) Y) becomes (minus Y X). */
1118 if (GET_CODE (XEXP (x, 0)) == NEG)
1119 {
1120 if (!undobuf.storage)
1121 undobuf.storage = (char *) oballoc (0);
1122 return gen_rtx (MINUS, GET_MODE (x),
1123 XEXP (x, 1), XEXP (XEXP (x, 0), 0));
1124 }
1125 /* (plus (plus x c1) c2) => (plus x c1+c2) */
1126 if (GET_CODE (XEXP (x, 1)) == CONST_INT
1127 && GET_CODE (XEXP (x, 0)) == PLUS
1128 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
1129 {
1130 int sum = (INTVAL (XEXP (x, 1))
1131 + INTVAL (XEXP (XEXP (x, 0), 1)));
1132 if (sum == 0)
1133 return XEXP (XEXP (x, 0), 0);
1134 if (!undobuf.storage)
1135 undobuf.storage = (char *) oballoc (0);
1136 SUBST (XEXP (x, 1), gen_rtx (CONST_INT, VOIDmode, sum));
1137 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
1138 break;
1139 }
1140 /* If we have something (putative index) being added to a sum,
1141 associate it so that any constant term is outermost.
1142 That's because that's the way indexed addresses are
1143 now supposed to appear. */
1144 if (((was_replaced[0] && GET_CODE (XEXP (x, 1)) == PLUS)
1145 || (was_replaced[1] && GET_CODE (XEXP (x, 0)) == PLUS))
1146 ||
1147 ((was_replaced[0] || was_replaced[1])
1148 && GET_CODE (to) == PLUS))
1149 {
1150 rtx offset = 0, base, index;
1151 if (GET_CODE (to) != PLUS)
1152 {
1153 index = to;
1154 base = was_replaced[0] ? XEXP (x, 1) : XEXP (x, 0);
1155 }
1156 else
1157 {
1158 index = was_replaced[0] ? XEXP (x, 1) : XEXP (x, 0);
1159 base = to;
1160 }
1161 if (CONSTANT_ADDRESS_P (XEXP (base, 0)))
1162 {
1163 offset = XEXP (base, 0);
1164 base = XEXP (base, 1);
1165 }
1166 else if (CONSTANT_ADDRESS_P (XEXP (base, 1)))
1167 {
1168 offset = XEXP (base, 1);
1169 base = XEXP (base, 0);
1170 }
1171 if (offset != 0)
1172 {
1173 if (!undobuf.storage)
1174 undobuf.storage = (char *) oballoc (0);
1175 if (GET_CODE (offset) == CONST_INT)
1176 return plus_constant (gen_rtx (PLUS, GET_MODE (index),
1177 base, index),
1178 INTVAL (offset));
1179 if (GET_CODE (index) == CONST_INT)
1180 return plus_constant (gen_rtx (PLUS, GET_MODE (offset),
1181 base, offset),
1182 INTVAL (index));
1183 return gen_rtx (PLUS, GET_MODE (index),
1184 gen_rtx (PLUS, GET_MODE (index),
1185 base, index),
1186 offset);
1187 }
1188 }
1189 break;
1190
1191 case EQ:
1192 case NE:
1193 /* If comparing a subreg against zero, discard the subreg. */
1194 if (was_replaced[0]
1195 && GET_CODE (to) == SUBREG
1196 && SUBREG_WORD (to) == 0
1197 && XEXP (x, 1) == const0_rtx)
1198 SUBST (XEXP (x, 0), SUBREG_REG (to));
1199
1200 /* If comparing a ZERO_EXTRACT against zero,
1201 canonicalize to a SIGN_EXTRACT,
1202 since the two are equivalent here. */
1203 if (was_replaced[0]
1204 && GET_CODE (to) == ZERO_EXTRACT
1205 && XEXP (x, 1) == const0_rtx)
1206 {
1207 if (!undobuf.storage)
1208 undobuf.storage = (char *) oballoc (0);
1209 SUBST (XEXP (x, 0),
1210 gen_rtx (SIGN_EXTRACT, GET_MODE (to),
1211 XEXP (to, 0), XEXP (to, 1),
1212 XEXP (to, 2)));
1213 }
1214#ifndef BITS_BIG_ENDIAN
1215 /* If we are putting (ASHIFT 1 x) into (EQ (AND ... y) 0),
1216 arrange to return (EQ (SIGN_EXTRACT y 1 x) 0),
1217 which is what jump-on-bit instructions are written with. */
1218 else if (XEXP (x, 1) == const0_rtx
1219 && GET_CODE (XEXP (x, 0)) == AND
1220 && (XEXP (XEXP (x, 0), 0) == to
1221 || XEXP (XEXP (x, 0), 1) == to)
1222 && GET_CODE (to) == ASHIFT
1223 && XEXP (to, 0) == const1_rtx)
1224 {
1225 register rtx y = XEXP (XEXP (x, 0),
1226 XEXP (XEXP (x, 0), 0) == to);
1227 if (!undobuf.storage)
1228 undobuf.storage = (char *) oballoc (0);
1229 SUBST (XEXP (x, 0),
1230 gen_rtx (SIGN_EXTRACT, GET_MODE (to),
1231 y,
1232 const1_rtx, XEXP (to, 1)));
1233 }
1234#endif /* not BITS_BIG_ENDIAN */
1235 /* Negation is a no-op before equality test against zero. */
1236 if (GET_CODE (XEXP (x, 0)) == NEG && XEXP (x, 1) == const0_rtx)
1237 {
1238 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
1239 }
1240 if (GET_CODE (XEXP (x, 1)) == NEG && XEXP (x, 0) == const0_rtx)
1241 {
1242 SUBST (XEXP (x, 1), XEXP (XEXP (x, 1), 0));
1243 }
1244 break;
1245
1246 case ZERO_EXTEND:
1247 /* Nested zero-extends are equivalent to just one. */
1248 if (was_replaced[0]
1249 && GET_CODE (to) == ZERO_EXTEND)
1250 SUBST (XEXP (x, 0), XEXP (to, 0));
1251 /* Zero extending a constant int can be replaced
1252 by a zero-extended constant. */
1253 if (was_replaced[0]
1254 && HOST_BITS_PER_INT >= GET_MODE_BITSIZE (GET_MODE (from))
1255 && GET_CODE (to) == CONST_INT)
1256 {
1257 int intval = INTVAL (to) & GET_MODE_MASK (GET_MODE (from));
1258 if (!undobuf.storage)
1259 undobuf.storage = (char *) oballoc (0);
1260 return gen_rtx (CONST_INT, VOIDmode, intval);
1261 }
1262 /* Zero-extending the result of an and with a constant can be done
1263 with a wider and. */
1264 if (was_replaced[0]
1265 && GET_CODE (to) == AND
1266 && GET_CODE (XEXP (to, 1)) == CONST_INT
1267 && FAKE_EXTEND_SAFE_P (GET_MODE (x), XEXP (to, 0))
1268 /* Avoid getting wrong result if the constant has high bits set
1269 that are irrelevant in the narrow mode where it is being used. */
1270 && 0 == (INTVAL (XEXP (to, 1))
1271 & ~ GET_MODE_MASK (GET_MODE (to))))
1272 {
1273 if (!undobuf.storage)
1274 undobuf.storage = (char *) oballoc (0);
1275 return gen_rtx (AND, GET_MODE (x),
1276 gen_lowpart_for_combine (GET_MODE (x), XEXP (to, 0)),
1277 XEXP (to, 1));
1278 }
1279 /* Change (zero_extend:M (subreg:N (zero_extract:M ...) 0))
1280 to (zero_extract:M ...) if the field extracted fits in mode N. */
1281 if (GET_CODE (XEXP (x, 0)) == SUBREG
1282 && GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTRACT
1283 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
1284 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
1285 <= GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
1286 {
1287 return XEXP (XEXP (x, 0), 0);
1288 }
1289 /* Change (zero_extend:M (subreg:N (and:M ... <const>) 0))
1290 to (and:M ...) if the significant bits fit in mode N. */
1291 if (GET_CODE (XEXP (x, 0)) == SUBREG
1292 && SUBREG_REG (XEXP (x, 0)) == to
1293 && SUBREG_WORD (XEXP (x, 0)) == 0
1294 && GET_CODE (to) == AND
1295 && GET_CODE (XEXP (to, 1)) == CONST_INT
1296 && FAKE_EXTEND_SAFE_P (GET_MODE (x), XEXP (to, 0))
1297 /* Avoid getting wrong result if the constant has high bits set
1298 that are irrelevant in the narrow mode where it is being used. */
1299 && 0 == (INTVAL (XEXP (to, 1))
1300 & ~ GET_MODE_MASK (GET_MODE (to))))
1301 {
1302 if (!undobuf.storage)
1303 undobuf.storage = (char *) oballoc (0);
1304 return gen_rtx (AND, GET_MODE (x),
1305 gen_lowpart_for_combine (GET_MODE (x), XEXP (to, 0)),
1306 XEXP (to, 1));
1307 }
1308 /* In (zero_extend:M (subreg:N (lshiftrt:M REG))),
1309 where REG was assigned from (zero_extend:M (any:N ...)),
1310 remove the outer zero extension. */
1311 if (GET_CODE (XEXP (x, 0)) == SUBREG
1312 && SUBREG_REG (XEXP (x, 0)) == to
1313 && SUBREG_WORD (XEXP (x, 0)) == 0
1314 && GET_CODE (to) == LSHIFTRT)
1315 {
1316 rtx tmp = XEXP (to, 0);
1317
1318 /* See if arg of LSHIFTRT is a register whose value we can find. */
1319 if (GET_CODE (tmp) == REG)
1320 if (reg_n_sets[REGNO (tmp)] == 1
1321 && reg_last_set[REGNO (tmp)] != 0
1322 && SET_DEST (PATTERN (reg_last_set[REGNO (tmp)])) == tmp)
1323 tmp = SET_SRC (PATTERN (reg_last_set[REGNO (tmp)]));
1324 else
1325 break;
1326
1327 if (GET_CODE (tmp) == ZERO_EXTEND
1328 && GET_MODE (tmp) == GET_MODE (x)
1329 && GET_MODE (XEXP (tmp, 0)) == GET_MODE (XEXP (x, 0)))
1330 return SUBREG_REG (XEXP (x, 0));
1331 }
1332 break;
1333
1334 case SIGN_EXTEND:
1335 /* Nested sign-extends are equivalent to just one. */
1336 if (was_replaced[0]
1337 && GET_CODE (to) == SIGN_EXTEND)
1338 SUBST (XEXP (x, 0), XEXP (to, 0));
1339 /* Sign extending a constant int can be replaced
1340 by a sign-extended constant. */
1341 if (was_replaced[0]
1342 && HOST_BITS_PER_INT >= GET_MODE_BITSIZE (GET_MODE (from))
1343 && GET_CODE (to) == CONST_INT)
1344 {
1345 int intval = INTVAL (to);
1346 if (!undobuf.storage)
1347 undobuf.storage = (char *) oballoc (0);
1348 if (intval > 0
1349 && (intval & (1 << (GET_MODE_BITSIZE (GET_MODE (from)) - 1))))
1350 intval |= ~ GET_MODE_MASK (GET_MODE (from));
1351 return gen_rtx (CONST_INT, VOIDmode, intval);
1352 }
1353 /* Sign-extending the result of an and with a constant can be done
1354 with a wider and, provided the high bit of the constant is 0. */
1355 if (was_replaced[0]
1356 && GET_CODE (to) == AND
1357 && GET_CODE (XEXP (to, 1)) == CONST_INT
1358 && FAKE_EXTEND_SAFE_P (GET_MODE (x), XEXP (to, 0))
1359 && ((INTVAL (XEXP (to, 1))
1360 & (-1 << (GET_MODE_BITSIZE (GET_MODE (to)) - 1)))
1361 == 0))
1362 {
1363 if (!undobuf.storage)
1364 undobuf.storage = (char *) oballoc (0);
1365 return gen_rtx (AND, GET_MODE (x),
1366 gen_lowpart_for_combine (GET_MODE (x), XEXP (to, 0)),
1367 XEXP (to, 1));
1368 }
1369 /* hacks added by tiemann. */
1370 /* Change (sign_extend:M (subreg:N (and:M ... <const>) 0))
1371 to (and:M ...), provided the result fits in mode N,
1372 and the high bit of the constant is 0 in mode N. */
1373 if (GET_CODE (XEXP (x, 0)) == SUBREG
1374 && SUBREG_REG (XEXP (x, 0)) == to
1375 && SUBREG_WORD (XEXP (x, 0)) == 0
1376 && GET_CODE (to) == AND
1377 && GET_CODE (XEXP (to, 1)) == CONST_INT
1378 && FAKE_EXTEND_SAFE_P (GET_MODE (x), XEXP (to, 0))
1379 && ((INTVAL (XEXP (to, 1))
1380 & (-1 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
1381 == 0))
1382 {
1383 if (!undobuf.storage)
1384 undobuf.storage = (char *) oballoc (0);
1385 return gen_rtx (AND, GET_MODE (x),
1386 gen_lowpart_for_combine (GET_MODE (x), XEXP (to, 0)),
1387 XEXP (to, 1));
1388 }
1389 /* In (sign_extend:M (subreg:N (ashiftrt:M REG))),
1390 where REG was assigned from (sign_extend:M (any:N ...)),
1391 remove the outer sign extension. */
1392 if (GET_CODE (XEXP (x, 0)) == SUBREG
1393 && SUBREG_REG (XEXP (x, 0)) == to
1394 && SUBREG_WORD (XEXP (x, 0)) == 0
1395 && GET_CODE (to) == ASHIFTRT)
1396 {
1397 rtx tmp = XEXP (to, 0);
1398
1399 /* See if arg of LSHIFTRT is a register whose value we can find. */
1400 if (GET_CODE (tmp) == REG)
1401 if (reg_n_sets[REGNO (tmp)] == 1
1402 && reg_last_set[REGNO (tmp)] != 0
1403 && SET_DEST (PATTERN (reg_last_set[REGNO (tmp)])) == tmp)
1404 tmp = SET_SRC (PATTERN (reg_last_set[REGNO (tmp)]));
1405 else
1406 break;
1407
1408 if (GET_CODE (tmp) == SIGN_EXTEND
1409 && GET_MODE (tmp) == GET_MODE (x)
1410 && GET_MODE (XEXP (tmp, 0)) == GET_MODE (XEXP (x, 0)))
1411 return SUBREG_REG (XEXP (x, 0));
1412 }
1413 break;
1414
1415 case SET:
1416 /* In (set (zero-extract <x> <n> <y>) (and <foo> <(2**n-1) | anything>))
1417 the `and' can be deleted. This can happen when storing a bit
1418 that came from a set-flag insn followed by masking to one bit. */
1419 if (GET_CODE (XEXP (x, 0)) == ZERO_EXTRACT
1420 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1421 && was_replaced[1]
1422 && GET_CODE (to) == AND
1423 && GET_CODE (XEXP (to, 1)) == CONST_INT
1424 && 0 == (((1 << INTVAL (XEXP (XEXP (x, 0), 1))) - 1)
1425 & ~ INTVAL (XEXP (to, 1))))
1426 {
1427 SUBST (XEXP (x, 1), XEXP (to, 0));
1428 }
1429 /* In (set (zero-extract <x> <n> <y>)
1430 (subreg (and <foo> <(2**n-1) | anything>)))
1431 the `and' can be deleted. */
1432 if (GET_CODE (XEXP (x, 0)) == ZERO_EXTRACT
1433 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1434 && GET_CODE (XEXP (x, 1)) == SUBREG
1435 && SUBREG_WORD (XEXP (x, 1)) == 0
1436 && GET_CODE (SUBREG_REG (XEXP (x, 1))) == AND
1437 && GET_CODE (XEXP (SUBREG_REG (XEXP (x, 1)), 1)) == CONST_INT
1438 && 0 == (((1 << INTVAL (XEXP (XEXP (x, 0), 1))) - 1)
1439 & ~ INTVAL (XEXP (SUBREG_REG (XEXP (x, 1)), 1))))
1440 {
1441 SUBST (SUBREG_REG (XEXP (x, 1)), XEXP (SUBREG_REG (XEXP (x, 1)), 0));
1442 }
1443 /* (set (zero_extract ...) (and/or/xor (zero_extract ...) const)),
1444 if both zero_extracts have the same location, size and position,
1445 can be changed to avoid the byte extracts. */
1446 if ((GET_CODE (XEXP (x, 0)) == ZERO_EXTRACT
1447 || GET_CODE (XEXP (x, 0)) == SIGN_EXTRACT)
1448 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1449 && (GET_CODE (XEXP (x, 1)) == AND
1450 || GET_CODE (XEXP (x, 1)) == IOR
1451 || GET_CODE (XEXP (x, 1)) == XOR)
1452 && rtx_equal_p (XEXP (x, 0), XEXP (XEXP (x, 1), 0))
1453 && GET_CODE (XEXP (XEXP (x, 1), 0)) == GET_CODE (XEXP (x, 0))
1454 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
1455 /* zero_extract can apply to a QImode even if the bits extracted
1456 don't fit inside that byte. In such a case, we may not do this
1457 optimization, since the OR or AND insn really would need
1458 to fit in a byte. */
1459 && (INTVAL (XEXP (XEXP (x, 0), 1)) + INTVAL (XEXP (XEXP (x, 0), 2))
1460 < GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (x, 0), 0)))))
1461 {
1462 int shiftcount;
1463 int newmask;
1464#ifdef BITS_BIG_ENDIAN
1465 shiftcount
1466 = GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (x, 0), 0)))
1467 - INTVAL (XEXP (XEXP (x, 0), 1)) - INTVAL (XEXP (XEXP (x, 0), 2));
1468#else
1469 shiftcount
1470 = INTVAL (XEXP (XEXP (x, 0), 2));
1471#endif
1472 newmask = ((INTVAL (XEXP (XEXP (x, 1), 1)) << shiftcount)
1473 + (GET_CODE (XEXP (x, 1)) == AND
1474 ? (1 << shiftcount) - 1
1475 : 0));
1476 if (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (x, 0), 0)))
1477 < HOST_BITS_PER_INT)
1478 newmask &= (1 << GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (x, 0), 0)))) - 1;
1479 if (!undobuf.storage)
1480 undobuf.storage = (char *) oballoc (0);
1481 return
1482 gen_rtx (SET, VOIDmode,
1483 XEXP (XEXP (x, 0), 0),
1484 gen_rtx (GET_CODE (XEXP (x, 1)),
1485 GET_MODE (XEXP (XEXP (x, 0), 0)),
1486 XEXP (XEXP (XEXP (x, 1), 0), 0),
1487 gen_rtx (CONST_INT, VOIDmode, newmask)));
1488 }
1489 /* Can simplify (set (cc0) (compare (zero/sign_extend FOO) CONST))
1490 to (set (cc0) (compare FOO CONST)) if CONST fits in FOO's mode
1491 and we are only testing equality.
1492 In fact, this is valid for zero_extend if what follows is an
1493 unsigned comparison, and for sign_extend with a signed comparison. */
1494 if (SET_DEST (x) == cc0_rtx
1495 && GET_CODE (SET_SRC (x)) == COMPARE
1496 && (GET_CODE (XEXP (SET_SRC (x), 0)) == ZERO_EXTEND
1497 || GET_CODE (XEXP (SET_SRC (x), 0)) == SIGN_EXTEND)
1498 && next_insn_tests_no_inequality (subst_insn)
1499 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
1500 /* This is overly cautious by one bit, but saves worrying about
1501 whether it is zero-extension or sign extension. */
1502 && ((unsigned) INTVAL (XEXP (SET_SRC (x), 1))
1503 < (1 << (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (SET_SRC (x), 0), 0))) - 1))))
1504 SUBST (XEXP (SET_SRC (x), 0), XEXP (XEXP (SET_SRC (x), 0), 0));
1505 break;
1506
1507 case AND:
1508 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
1509 {
1510 rtx tem = simplify_and_const_int (x, to);
1511 if (tem)
1512 return tem;
1513 }
1514 break;
1515
1516 case IOR:
1517 case XOR:
1518 /* (ior (ior x c1) c2) => (ior x c1|c2); likewise for xor. */
1519 if (GET_CODE (XEXP (x, 1)) == CONST_INT
1520 && GET_CODE (XEXP (x, 0)) == code
1521 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
1522 {
1523 int c0 = INTVAL (XEXP (x, 1));
1524 int c1 = INTVAL (XEXP (XEXP (x, 0), 1));
1525 int combined = (code == IOR ? c0 | c1 : c0 ^ c1);
1526
1527 if (combined == 0)
1528 return XEXP (XEXP (x, 0), 0);
1529 if (!undobuf.storage)
1530 undobuf.storage = (char *) oballoc (0);
1531 SUBST (XEXP (x, 1), gen_rtx (CONST_INT, VOIDmode, combined));
1532 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
1533 break;
1534 }
1535
1536 case FLOAT:
1537 /* (float (sign_extend <X>)) = (float <X>). */
1538 if (was_replaced[0]
1539 && GET_CODE (to) == SIGN_EXTEND)
1540 SUBST (XEXP (x, 0), XEXP (to, 0));
1541 break;
1542
1543 case ZERO_EXTRACT:
1544 /* (ZERO_EXTRACT (TRUNCATE x)...)
1545 can become (ZERO_EXTRACT x ...). */
1546 if (was_replaced[0]
1547 && GET_CODE (to) == TRUNCATE)
1548 {
1549#ifdef BITS_BIG_ENDIAN
1550 if (GET_CODE (XEXP (x, 2)) == CONST_INT)
1551 {
1552 if (!undobuf.storage)
1553 undobuf.storage = (char *) oballoc (0);
1554 /* On a big-endian machine, must increment the bit-number
1555 since sign bit is farther away in the pre-truncated value. */
1556 return gen_rtx (ZERO_EXTRACT, GET_MODE (x),
1557 XEXP (to, 0),
1558 XEXP (x, 1),
1559 gen_rtx (CONST_INT, VOIDmode,
1560 (INTVAL (XEXP (x, 2))
1561 + GET_MODE_BITSIZE (GET_MODE (XEXP (to, 0)))
1562 - GET_MODE_BITSIZE (GET_MODE (to)))));
1563 }
1564#else
1565 SUBST (XEXP (x, 0), XEXP (to, 0));
1566#endif
1567 }
1568 /* Extracting a single bit from the result of a shift:
1569 see which bit it was before the shift and extract that directly. */
1570 if (was_replaced[0]
1571 && (GET_CODE (to) == ASHIFTRT || GET_CODE (to) == LSHIFTRT
1572 || GET_CODE (to) == ASHIFT || GET_CODE (to) == LSHIFT)
1573 && GET_CODE (XEXP (to, 1)) == CONST_INT
1574 && XEXP (x, 1) == const1_rtx
1575 && GET_CODE (XEXP (x, 2)) == CONST_INT)
1576 {
1577 int shift = INTVAL (XEXP (to, 1));
1578 int newpos;
1579 if (GET_CODE (to) == ASHIFT || GET_CODE (to) == LSHIFT)
1580 shift = - shift;
1581#ifdef BITS_BIG_ENDIAN
1582 shift = - shift;
1583#endif
1584 newpos = INTVAL (XEXP (x, 2)) + shift;
1585 if (newpos >= 0 &&
1586 newpos < GET_MODE_BITSIZE (GET_MODE (to)))
1587 {
1588 if (!undobuf.storage)
1589 undobuf.storage = (char *) oballoc (0);
1590 return gen_rtx (ZERO_EXTRACT, GET_MODE (x),
1591 XEXP (to, 0), const1_rtx,
1592 gen_rtx (CONST_INT, VOIDmode, newpos));
1593 }
1594 }
1595 break;
1596
1597 case LSHIFTRT:
1598 case ASHIFTRT:
1599 case ROTATE:
1600 case ROTATERT:
1601#ifdef SHIFT_COUNT_TRUNCATED
1602 /* (lshift <X> (sign_extend <Y>)) = (lshift <X> <Y>) (most machines).
1603 True for all kinds of shifts and also for zero_extend. */
1604 if (was_replaced[1]
1605 && (GET_CODE (to) == SIGN_EXTEND
1606 || GET_CODE (to) == ZERO_EXTEND)
1607 && FAKE_EXTEND_SAFE_P (GET_MODE (to), XEXP (to, 0)))
1608 {
1609 if (!undobuf.storage)
1610 undobuf.storage = (char *) oballoc (0);
1611 SUBST (XEXP (x, 1),
1612 /* This is a perverse SUBREG, wider than its base. */
1613 gen_lowpart_for_combine (GET_MODE (to), XEXP (to, 0)));
1614 }
1615#endif
1616 /* Two shifts in a row of same kind
1617 in same direction with constant counts
1618 may be combined. */
1619 if (was_replaced[0]
1620 && GET_CODE (to) == GET_CODE (x)
1621 && GET_CODE (XEXP (x, 1)) == CONST_INT
1622 && GET_CODE (XEXP (to, 1)) == CONST_INT
1623 && INTVAL (XEXP (to, 1)) > 0
1624 && INTVAL (XEXP (x, 1)) > 0
1625 && (INTVAL (XEXP (x, 1)) + INTVAL (XEXP (to, 1))
1626 < GET_MODE_BITSIZE (GET_MODE (x))))
1627 {
1628 if (!undobuf.storage)
1629 undobuf.storage = (char *) oballoc (0);
1630 return gen_rtx (GET_CODE (x), GET_MODE (x),
1631 XEXP (to, 0),
1632 gen_rtx (CONST_INT, VOIDmode,
1633 INTVAL (XEXP (x, 1))
1634 + INTVAL (XEXP (to, 1))));
1635 }
1636 break;
1637
1638 case LSHIFT:
1639 case ASHIFT:
1640#ifdef SHIFT_COUNT_TRUNCATED
1641 /* (lshift <X> (sign_extend <Y>)) = (lshift <X> <Y>) (most machines).
1642 True for all kinds of shifts and also for zero_extend. */
1643 if (was_replaced[1]
1644 && (GET_CODE (to) == SIGN_EXTEND
1645 || GET_CODE (to) == ZERO_EXTEND)
1646 && FAKE_EXTEND_SAFE_P (GET_MODE (to), XEXP (to, 0)))
1647 {
1648 if (!undobuf.storage)
1649 undobuf.storage = (char *) oballoc (0);
1650 SUBST (XEXP (x, 1),
1651 /* This is a perverse SUBREG, wider than its base. */
1652 gen_lowpart_for_combine (GET_MODE (to), XEXP (to, 0)));
1653 }
1654#endif
1655 /* (lshift (and (lshiftrt <foo> <X>) <Y>) <X>)
1656 happens copying between bit fields in similar structures.
1657 It can be replaced by one and instruction.
1658 It does not matter whether the shifts are logical or arithmetic. */
1659 if (GET_CODE (XEXP (x, 0)) == AND
1660 && GET_CODE (XEXP (x, 1)) == CONST_INT
1661 && INTVAL (XEXP (x, 1)) > 0
1662 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
1663 && XEXP (XEXP (x, 0), 0) == to
1664 && (GET_CODE (to) == LSHIFTRT
1665 || GET_CODE (to) == ASHIFTRT)
1666#if 0
1667/* I now believe this restriction is unnecessary.
1668 The outer shift will discard those bits in any case, right? */
1669
1670 /* If inner shift is arithmetic, either it shifts left or
1671 the bits it shifts the sign into are zeroed by the and. */
1672 && (INTVAL (XEXP (x, 1)) < 0
1673 || ((unsigned) INTVAL (XEXP (XEXP (x, 0), 1))
1674 < 1 << (GET_MODE_BITSIZE (GET_MODE (x))
1675 - INTVAL (XEXP (x, 0)))))
1676#endif
1677 && GET_CODE (XEXP (to, 1)) == CONST_INT
1678 && INTVAL (XEXP (x, 1)) == INTVAL (XEXP (to, 1)))
1679 {
1680 if (!undobuf.storage)
1681 undobuf.storage = (char *) oballoc (0);
1682 /* The constant in the new `and' is <Y> << <X>
1683 but clear out all bits that don't belong in our mode. */
1684 return gen_rtx (AND, GET_MODE (x), XEXP (to, 0),
1685 gen_rtx (CONST_INT, VOIDmode,
1686 (GET_MODE_MASK (GET_MODE (x))
1687 & ((GET_MODE_MASK (GET_MODE (x))
1688 & INTVAL (XEXP (XEXP (x, 0), 1)))
1689 << INTVAL (XEXP (x, 1))))));
1690 }
1691 /* Two shifts in a row in same direction with constant counts
1692 may be combined. */
1693 if (was_replaced[0]
1694 && (GET_CODE (to) == ASHIFT || GET_CODE (to) == LSHIFT)
1695 && GET_CODE (XEXP (x, 1)) == CONST_INT
1696 && GET_CODE (XEXP (to, 1)) == CONST_INT
1697 && INTVAL (XEXP (to, 1)) > 0
1698 && INTVAL (XEXP (x, 1)) > 0
1699 && (INTVAL (XEXP (x, 1)) + INTVAL (XEXP (to, 1))
1700 < GET_MODE_BITSIZE (GET_MODE (x))))
1701 {
1702 if (!undobuf.storage)
1703 undobuf.storage = (char *) oballoc (0);
1704 return gen_rtx (GET_CODE (x), GET_MODE (x),
1705 XEXP (to, 0),
1706 gen_rtx (CONST_INT, VOIDmode,
1707 INTVAL (XEXP (x, 1))
1708 + INTVAL (XEXP (to, 1))));
1709 }
1710 /* (ashift (ashiftrt <foo> <X>) <X>)
1711 (or, on some machines, (ashift (ashift <foo> <-X>) <X>) instead)
1712 happens if you divide by 2**N and then multiply by 2**N.
1713 It can be replaced by one `and' instruction.
1714 It does not matter whether the shifts are logical or arithmetic. */
1715 if (GET_CODE (XEXP (x, 1)) == CONST_INT
1716 && INTVAL (XEXP (x, 1)) > 0
1717 && was_replaced[0]
1718 && (((GET_CODE (to) == LSHIFTRT || GET_CODE (to) == ASHIFTRT)
1719 && GET_CODE (XEXP (to, 1)) == CONST_INT
1720 && INTVAL (XEXP (x, 1)) == INTVAL (XEXP (to, 1)))
1721 ||
1722 ((GET_CODE (to) == LSHIFT || GET_CODE (to) == ASHIFT)
1723 && GET_CODE (XEXP (to, 1)) == CONST_INT
1724 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (to, 1)))))
1725 {
1726 if (!undobuf.storage)
1727 undobuf.storage = (char *) oballoc (0);
1728 /* The constant in the new `and' is -1 << <X>
1729 but clear out all bits that don't belong in our mode. */
1730 return gen_rtx (AND, GET_MODE (x), XEXP (to, 0),
1731 gen_rtx (CONST_INT, VOIDmode,
1732 (GET_MODE_MASK (GET_MODE (x))
1733 & (GET_MODE_MASK (GET_MODE (x))
1734 << INTVAL (XEXP (x, 1))))));
1735 }
1736
1737 }
1738
1739 return x;
1740}
1741
1742/* This is the AND case of the function subst. */
1743
1744static rtx
1745simplify_and_const_int (x, to)
1746 rtx x, to;
1747{
1748 register rtx varop = XEXP (x, 0);
1749 register int constop = INTVAL (XEXP (x, 1));
1750
1751 /* (and (subreg (and <foo> <constant>) 0) <constant>)
1752 results from an andsi followed by an andqi,
1753 which happens frequently when storing bit-fields
1754 on something whose result comes from an andsi. */
1755 if (GET_CODE (varop) == SUBREG
1756 && XEXP (varop, 0) == to
1757 && subreg_lowpart_p (varop)
1758 && GET_CODE (to) == AND
1759 && GET_CODE (XEXP (to, 1)) == CONST_INT
1760 /* Verify that the result of the outer `and'
1761 is not affected by any bits not defined in the inner `and'.
1762 True if the outer mode is narrower, or if the outer constant
1763 masks to zero all the bits that the inner mode doesn't have. */
1764 && (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (GET_MODE (to))
1765 || (constop & ~ GET_MODE_MASK (GET_MODE (to))) == 0))
1766 {
1767 if (!undobuf.storage)
1768 undobuf.storage = (char *) oballoc (0);
1769 return gen_rtx (AND, GET_MODE (x),
1770 gen_lowpart_for_combine (GET_MODE (x), XEXP (to, 0)),
1771 gen_rtx (CONST_INT, VOIDmode,
1772 constop
1773 /* Remember that the bits outside that mode
1774 are not being changed, so the effect
1775 is as if they were all 1. */
1776 & INTVAL (XEXP (to, 1))));
1777 }
1778 /* (and:SI (zero_extract:SI ...) <constant>)
1779 results from an andsi following a byte-fetch on risc machines.
1780 When the constant includes all bits extracted, eliminate the `and'. */
1781 if (GET_CODE (varop) == ZERO_EXTRACT
1782 && GET_CODE (XEXP (varop, 1)) == CONST_INT
1783 /* The `and' must not clear any bits that the extract can give. */
1784 && (~ constop & ((1 << INTVAL (XEXP (varop, 1))) - 1)) == 0)
1785 return varop;
1786 /* (and (zero_extend <foo>) <constant>)
1787 often results from storing in a bit-field something
1788 that was calculated as a short. Replace with a single `and'
1789 in whose constant all bits not in <foo>'s mode are zero. */
1790 if (varop == to
1791 && GET_CODE (to) == ZERO_EXTEND
1792 && FAKE_EXTEND_SAFE_P (GET_MODE (x), XEXP (to, 0)))
1793 {
1794 if (!undobuf.storage)
1795 undobuf.storage = (char *) oballoc (0);
1796 return gen_rtx (AND, GET_MODE (x),
1797 /* This is a perverse SUBREG, wider than its base. */
1798 gen_lowpart_for_combine (GET_MODE (x), XEXP (to, 0)),
1799 gen_rtx (CONST_INT, VOIDmode,
1800 constop & GET_MODE_MASK (GET_MODE (XEXP (to, 0)))));
1801 }
1802 /* (and (sign_extend <foo>) <constant>)
1803 can be replaced with (and (subreg <foo>) <constant>)
1804 if <constant> is narrower than <foo>'s mode,
1805 or with (zero_extend <foo>) if <constant> is a mask for that mode. */
1806 if (varop == to
1807 && GET_CODE (to) == SIGN_EXTEND
1808 && ((unsigned) constop <= GET_MODE_MASK (GET_MODE (XEXP (to, 0))))
1809 && FAKE_EXTEND_SAFE_P (GET_MODE (x), XEXP (to, 0)))
1810 {
1811 if (!undobuf.storage)
1812 undobuf.storage = (char *) oballoc (0);
1813 if (constop == GET_MODE_MASK (GET_MODE (XEXP (to, 0))))
1814 return gen_rtx (ZERO_EXTEND, GET_MODE (x), XEXP (to, 0));
1815 return gen_rtx (AND, GET_MODE (x),
1816 /* This is a perverse SUBREG, wider than its base. */
1817 gen_lowpart_for_combine (GET_MODE (x), XEXP (to, 0)),
1818 XEXP (x, 1));
1819 }
1820 /* (and (and <foo> <constant>) <constant>)
1821 comes from two and instructions in a row. */
1822 if (varop == to
1823 && GET_CODE (to) == AND
1824 && GET_CODE (XEXP (to, 1)) == CONST_INT)
1825 {
1826 if (!undobuf.storage)
1827 undobuf.storage = (char *) oballoc (0);
1828 return gen_rtx (AND, GET_MODE (x),
1829 XEXP (to, 0),
1830 gen_rtx (CONST_INT, VOIDmode,
1831 constop
1832 & INTVAL (XEXP (to, 1))));
1833 }
1834 /* (and (ashiftrt (ashift FOO N) N) CONST)
1835 may be simplified to (and FOO CONST) if CONST masks off the bits
1836 changed by the two shifts. */
1837 if (GET_CODE (varop) == ASHIFTRT
1838 && GET_CODE (XEXP (varop, 1)) == CONST_INT
1839 && XEXP (varop, 0) == to
1840 && GET_CODE (to) == ASHIFT
1841 && GET_CODE (XEXP (to, 1)) == CONST_INT
1842 && INTVAL (XEXP (varop, 1)) == INTVAL (XEXP (to, 1))
1843 && ((unsigned) constop >> INTVAL (XEXP (varop, 1))) == 0)
1844 {
1845 if (!undobuf.storage)
1846 undobuf.storage = (char *) oballoc (0);
1847 /* If CONST is a mask for the low byte,
1848 change this into a zero-extend instruction
1849 from just the low byte of FOO. */
1850 if (constop == GET_MODE_MASK (QImode))
1851 {
1852 rtx temp = gen_lowpart_for_combine (QImode, XEXP (to, 0));
1853 if (GET_CODE (temp) != CLOBBER)
1854 return gen_rtx (ZERO_EXTEND, GET_MODE (x), temp);
1855 }
1856 return gen_rtx (AND, GET_MODE (x),
1857 XEXP (to, 0), XEXP (x, 1));
1858 }
1859 /* (and (ashiftrt (zero_extend FOO) N) CONST)
1860 may be simplified to (and (ashiftrt (subreg FOO) N) CONST)
1861 if CONST masks off the bits changed by extension. */
1862 if ((GET_CODE (varop) == ASHIFTRT || GET_CODE (varop) == LSHIFTRT)
1863 && GET_CODE (XEXP (varop, 1)) == CONST_INT
1864 && XEXP (varop, 0) == to
1865 && (GET_CODE (to) == ZERO_EXTEND || GET_CODE (to) == SIGN_EXTEND)
1866 /* Verify the and discards all the extended bits. */
1867 && (((unsigned) constop << INTVAL (XEXP (varop, 1)))
1868 >> GET_MODE_BITSIZE (GET_MODE (XEXP (to, 0)))) == 0
1869 && FAKE_EXTEND_SAFE_P (GET_MODE (x), XEXP (to, 0)))
1870 {
1871 if (!undobuf.storage)
1872 undobuf.storage = (char *) oballoc (0);
1873 SUBST (XEXP (varop, 0),
1874 gen_lowpart_for_combine (GET_MODE (x), XEXP (to, 0)));
1875 return x;
1876 }
1877 /* (and x const) may be converted to (zero_extend (subreg x 0)). */
1878 if (constop == GET_MODE_MASK (QImode)
1879 && GET_CODE (varop) == REG)
1880 {
1881 if (!undobuf.storage)
1882 undobuf.storage = (char *) oballoc (0);
1883 return gen_rtx (ZERO_EXTEND, GET_MODE (x),
1884 gen_rtx (SUBREG, QImode, varop, 0));
1885 }
1886 if (constop == GET_MODE_MASK (HImode)
1887 && GET_CODE (varop) == REG)
1888 {
1889 if (!undobuf.storage)
1890 undobuf.storage = (char *) oballoc (0);
1891 return gen_rtx (ZERO_EXTEND, GET_MODE (x),
1892 gen_rtx (SUBREG, HImode, varop, 0));
1893 }
1894 /* No simplification applies. */
1895 return 0;
1896}
1897
1898/* Like gen_lowpart but for use by combine. In combine it is not possible
1899 to create any new pseudoregs. However, it is safe to create
1900 invalid memory addresses, because combine will try to recognize
1901 them and all they will do is make the combine attempt fail.
1902
1903 If for some reason this cannot do its job, an rtx
1904 (clobber (const_int 0)) is returned.
1905 An insn containing that will not be recognized. */
1906
1907#undef gen_lowpart
1908
1909static rtx
1910gen_lowpart_for_combine (mode, x)
1911 enum machine_mode mode;
1912 register rtx x;
1913{
1914 if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG)
1915 return gen_lowpart (mode, x);
1916 if (GET_MODE (x) == mode)
1917 return gen_rtx (CLOBBER, VOIDmode, const0_rtx);
1918 if (GET_CODE (x) == MEM)
1919 {
1920 register int offset = 0;
1921
1922 /* Refuse to work on a volatile memory ref. */
1923 if (MEM_VOLATILE_P (x))
1924 return gen_rtx (CLOBBER, VOIDmode, const0_rtx);
1925
1926 /* If we want to refer to something bigger than the original memref,
1927 generate a perverse subreg instead. That will force a reload
1928 of the original memref X. */
1929 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
1930 return gen_rtx (SUBREG, mode, x, 0);
1931
1932#ifdef WORDS_BIG_ENDIAN
1933 offset = (max (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1934 - max (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1935#endif
1936#ifdef BYTES_BIG_ENDIAN
1937 /* Adjust the address so that the address-after-the-data
1938 is unchanged. */
1939 offset -= (min (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1940 - min (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1941#endif
1942 return gen_rtx (MEM, mode, plus_constant (XEXP (x, 0),
1943 offset));
1944 }
1945 else
1946 return gen_rtx (CLOBBER, VOIDmode, const0_rtx);
1947}
1948\f
1949/* After substitution, if the resulting pattern looks like
1950 (set (cc0) (and ...)) or (set (cc0) (lshiftrt ...)),
1951 this function is called to simplify the
1952 pattern into a bit-field operation if possible. */
1953
1954static void
1955simplify_set_cc0_and (insn)
1956 rtx insn;
1957{
1958 register rtx value = XEXP (PATTERN (insn), 1);
1959 register rtx op0 = XEXP (value, 0);
1960 register rtx op1 = XEXP (value, 1);
1961 int offset = 0;
1962 rtx var = 0;
1963 rtx bitnum = 0;
1964 int temp;
1965 int unit;
1966 rtx newpat;
1967
1968 if (GET_CODE (value) == AND)
1969 {
1970 op0 = XEXP (value, 0);
1971 op1 = XEXP (value, 1);
1972 }
1973 else if (GET_CODE (value) == LSHIFTRT)
1974 {
1975 /* If there is no AND, but there is a shift that discards
1976 all but the sign bit, we can pretend that the shift result
1977 is ANDed with 1. Otherwise we cannot handle just a shift. */
1978 if (GET_CODE (XEXP (value, 1)) == CONST_INT
1979 && (INTVAL (XEXP (value, 1))
1980 == GET_MODE_BITSIZE (GET_MODE (value)) - 1))
1981 {
1982 op0 = value;
1983 op1 = const1_rtx;
1984 }
1985 else
1986 return;
1987 }
1988 else
1989 abort ();
1990
1991 /* Look for a constant power of 2 or a shifted 1
1992 on either side of the AND. Set VAR to the other side.
1993 Set BITNUM to the shift count of the 1 (as an rtx).
1994 Or, if bit number is constant, set OFFSET to the bit number. */
1995
1996 switch (GET_CODE (op0))
1997 {
1998 case CONST_INT:
1999 temp = exact_log2 (INTVAL (op0));
2000 if (temp < 0)
2001 return;
2002 offset = temp;
2003 var = op1;
2004 break;
2005
2006 case ASHIFT:
2007 case LSHIFT:
2008 if (XEXP (op0, 0) == const1_rtx)
2009 {
2010 bitnum = XEXP (op0, 1);
2011 var = op1;
2012 }
2013 }
2014 if (var == 0)
2015 switch (GET_CODE (op1))
2016 {
2017 case CONST_INT:
2018 temp = exact_log2 (INTVAL (op1));
2019 if (temp < 0)
2020 return;
2021 offset = temp;
2022 var = op0;
2023 break;
2024
2025 case ASHIFT:
2026 case LSHIFT:
2027 if (XEXP (op1, 0) == const1_rtx)
2028 {
2029 bitnum = XEXP (op1, 1);
2030 var = op0;
2031 }
2032 }
2033
2034 /* If VAR is 0, we didn't find something recognizable. */
2035 if (var == 0)
2036 return;
2037
2038 if (!undobuf.storage)
2039 undobuf.storage = (char *) oballoc (0);
2040
2041 /* If the bit position is currently exactly 0,
2042 extract a right-shift from the variable portion. */
2043 if (offset == 0
2044 && (GET_CODE (var) == ASHIFTRT || GET_CODE (var) == LSHIFTRT))
2045 {
2046 bitnum = XEXP (var, 1);
2047 var = XEXP (var, 0);
2048 }
2049
2050 if (GET_CODE (var) == SUBREG && SUBREG_WORD (var) == 0)
2051 var = SUBREG_REG (var);
2052
2053 /* Note that BITNUM and OFFSET are always little-endian thru here
2054 even on a big-endian machine. */
2055
2056#ifdef BITS_BIG_ENDIAN
2057 unit = GET_MODE_BITSIZE (GET_MODE (var)) - 1;
2058
2059 if (bitnum != 0)
2060 bitnum = gen_rtx (MINUS, SImode,
2061 gen_rtx (CONST_INT, VOIDmode, unit), bitnum);
2062 else
2063 offset = unit - offset;
2064#endif
2065
2066 if (bitnum == 0)
2067 bitnum = gen_rtx (CONST_INT, VOIDmode, offset);
2068
2069 newpat = gen_rtx (SET, VOIDmode, cc0_rtx,
2070 gen_rtx (ZERO_EXTRACT, VOIDmode, var, const1_rtx, bitnum));
2071 if (recog (newpat, insn) >= 0)
2072 {
2073 if (undobuf.num_undo < MAX_UNDO)
2074 {
2075 undobuf.undo[undobuf.num_undo].where = &XEXP (PATTERN (insn), 1);
2076 undobuf.undo[undobuf.num_undo].old_contents = value;
2077 XEXP (PATTERN (insn), 1) = XEXP (newpat, 1);
2078 }
2079 undobuf.num_undo++;
2080 }
2081}
2082\f
2083/* Update the records of when each REG was most recently set or killed
2084 for the things done by INSN. This is the last thing done in processing
2085 INSN in the combiner loop.
2086
2087 We update reg_last_set, reg_last_death, and also the similar information
2088 mem_last_set (which insn most recently modified memory)
2089 and last_call_cuid (which insn was the most recent subroutine call). */
2090
2091static void
2092record_dead_and_set_regs (insn)
2093 rtx insn;
2094{
2095 register rtx link;
2096 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2097 {
2098 if (REG_NOTE_KIND (link) == REG_DEAD)
2099 reg_last_death[REGNO (XEXP (link, 0))] = insn;
2100 else if (REG_NOTE_KIND (link) == REG_INC)
2101 reg_last_set[REGNO (XEXP (link, 0))] = insn;
2102 }
2103
2104 if (GET_CODE (insn) == CALL_INSN)
2105 last_call_cuid = mem_last_set = INSN_CUID (insn);
2106
2107 if (GET_CODE (PATTERN (insn)) == PARALLEL)
2108 {
2109 register int i;
2110 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
2111 {
2112 register rtx elt = XVECEXP (PATTERN (insn), 0, i);
2113 register enum rtx_code code = GET_CODE (elt);
2114 if (code == SET || code == CLOBBER)
2115 {
2116 rtx dest = XEXP (elt, 0);
2117 while (GET_CODE (dest) == SUBREG
2118 || GET_CODE (dest) == STRICT_LOW_PART
2119 || GET_CODE (dest) == SIGN_EXTRACT
2120 || GET_CODE (dest) == ZERO_EXTRACT)
2121 dest = XEXP (dest, 0);
2122
2123 if (GET_CODE (dest) == REG)
2124 reg_last_set[REGNO (dest)] = insn;
2125 else if (GET_CODE (dest) == MEM)
2126 mem_last_set = INSN_CUID (insn);
2127 }
2128 }
2129 }
2130 else if (GET_CODE (PATTERN (insn)) == SET
2131 || GET_CODE (PATTERN (insn)) == CLOBBER)
2132 {
2133 register rtx dest = XEXP (PATTERN (insn), 0);
2134
2135 while (GET_CODE (dest) == SUBREG
2136 || GET_CODE (dest) == STRICT_LOW_PART
2137 || GET_CODE (dest) == SIGN_EXTRACT
2138 || GET_CODE (dest) == ZERO_EXTRACT)
2139 dest = XEXP (dest, 0);
2140
2141 if (GET_CODE (dest) == REG)
2142 reg_last_set[REGNO (dest)] = insn;
2143 else if (GET_CODE (dest) == MEM)
2144 mem_last_set = INSN_CUID (insn);
2145 }
2146}
2147
2148/* Return nonzero if expression X refers to a REG or to memory
2149 that is set in an instruction more recent than FROM_CUID. */
2150
2151static int
2152use_crosses_set_p (x, from_cuid)
2153 register rtx x;
2154 int from_cuid;
2155{
2156 register char *fmt;
2157 register int i;
2158 register enum rtx_code code = GET_CODE (x);
2159
2160 if (code == REG)
2161 {
2162 register int regno = REGNO (x);
2163#ifdef PUSH_ROUNDING
2164 /* Don't allow uses of the stack pointer to be moved,
2165 because we don't know whether the move crosses a push insn. */
2166 if (regno == STACK_POINTER_REGNUM)
2167 return 1;
2168#endif
2169 return (reg_last_set[regno]
2170 && INSN_CUID (reg_last_set[regno]) > from_cuid);
2171 }
2172
2173 if (code == MEM && mem_last_set > from_cuid)
2174 return 1;
2175
2176 fmt = GET_RTX_FORMAT (code);
2177
2178 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2179 {
2180 if (fmt[i] == 'E')
2181 {
2182 register int j;
2183 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2184 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
2185 return 1;
2186 }
2187 else if (fmt[i] == 'e'
2188 && use_crosses_set_p (XEXP (x, i), from_cuid))
2189 return 1;
2190 }
2191 return 0;
2192}
2193\f
2194/* Return nonzero if reg REGNO is marked as dying in INSN. */
2195
2196int
2197regno_dead_p (regno, insn)
2198 int regno;
2199 rtx insn;
2200{
2201 register rtx link;
2202
2203 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2204 if ((REG_NOTE_KIND (link) == REG_DEAD
2205 || REG_NOTE_KIND (link) == REG_INC)
2206 && REGNO (XEXP (link, 0)) == regno)
2207 return 1;
2208
2209 return 0;
2210}
2211
2212/* Return nonzero if J is the first insn following I,
2213 not counting labels, line numbers, etc.
2214 We assume that J follows I. */
2215
2216static int
2217adjacent_insns_p (i, j)
2218 rtx i, j;
2219{
2220 register rtx insn;
2221 for (insn = NEXT_INSN (i); insn != j; insn = NEXT_INSN (insn))
2222 if (GET_CODE (insn) == INSN
2223 || GET_CODE (insn) == CALL_INSN
2224 || GET_CODE (insn) == JUMP_INSN)
2225 return 0;
2226 return 1;
2227}
2228
2229/* Check that X is an insn-body for an `asm' with operands
2230 and that the operands mentioned in it are legitimate. */
2231
2232static int
2233check_asm_operands (x)
2234 rtx x;
2235{
2236 int noperands = asm_noperands (x);
2237 rtx *operands;
2238 int i;
2239
2240 if (noperands < 0)
2241 return 0;
2242 if (noperands == 0)
2243 return 1;
2244
2245 operands = (rtx *) alloca (noperands * sizeof (rtx));
2246 decode_asm_operands (x, operands, 0, 0, 0);
2247
2248 for (i = 0; i < noperands; i++)
2249 if (!general_operand (operands[i], VOIDmode))
2250 return 0;
2251
2252 return 1;
2253}
2254\f
2255/* Concatenate the list of logical links of OINSN
2256 into INSN's list of logical links.
2257 Modifies OINSN destructively.
2258
2259 If ALL_LINKS is nonzero, move all the links that OINSN has.
2260 Otherwise, move only those that point to insns that set regs
2261 that die in the insn OINSN.
2262 Other links are clobbered so that they are no longer effective. */
2263
2264static void
2265add_links (insn, oinsn, all_links)
2266 rtx insn, oinsn;
2267 int all_links;
2268{
2269 register rtx links = LOG_LINKS (oinsn);
2270 if (! all_links)
2271 {
2272 rtx tail;
2273 for (tail = links; tail; tail = XEXP (tail, 1))
2274 {
2275 rtx target = XEXP (tail, 0);
2276 if (GET_CODE (target) != INSN
2277 || GET_CODE (PATTERN (target)) != SET
2278 || GET_CODE (SET_DEST (PATTERN (target))) != REG
2279 || ! dead_or_set_p (oinsn, SET_DEST (PATTERN (target))))
2280 /* OINSN is going to become a NOTE
2281 so a link pointing there will have no effect. */
2282 XEXP (tail, 0) = oinsn;
2283 }
2284 }
2285 if (LOG_LINKS (insn) == 0)
2286 LOG_LINKS (insn) = links;
2287 else
2288 {
2289 register rtx next, prev = LOG_LINKS (insn);
2290 while (next = XEXP (prev, 1))
2291 prev = next;
2292 XEXP (prev, 1) = links;
2293 }
2294}
2295
2296/* Delete any LOG_LINKS of INSN which point at OINSN. */
2297
2298static void
2299remove_links (insn, oinsn)
2300 rtx insn, oinsn;
2301{
2302 register rtx next = LOG_LINKS (insn), prev = 0;
2303 while (next)
2304 {
2305 if (XEXP (next, 0) == oinsn)
2306 {
2307 if (prev)
2308 XEXP (prev, 1) = XEXP (next, 1);
2309 else
2310 LOG_LINKS (insn) = XEXP (next, 1);
2311 }
2312 else
2313 prev = next;
2314 next = XEXP (next, 1);
2315 }
2316}
2317
2318/* Concatenate the any elements of the list of reg-notes INCS
2319 which are of type REG_INC
2320 into INSN's list of reg-notes. */
2321
2322static void
2323add_incs (insn, incs)
2324 rtx insn, incs;
2325{
2326 register rtx tail;
2327
2328 for (tail = incs; tail; tail = XEXP (tail, 1))
2329 if (REG_NOTE_KIND (tail) == REG_INC)
2330 REG_NOTES (insn)
2331 = gen_rtx (EXPR_LIST, REG_INC, XEXP (tail, 0), REG_NOTES (insn));
2332}
2333\f
2334/* Remove register number REGNO from the dead registers list of INSN. */
2335
2336void
2337remove_death (regno, insn)
2338 int regno;
2339 rtx insn;
2340{
2341 register rtx link, next;
2342 while ((link = REG_NOTES (insn))
2343 && REG_NOTE_KIND (link) == REG_DEAD
2344 && REGNO (XEXP (link, 0)) == regno)
2345 REG_NOTES (insn) = XEXP (link, 1);
2346
2347 if (link)
2348 while (next = XEXP (link, 1))
2349 {
2350 if (REG_NOTE_KIND (next) == REG_DEAD
2351 && REGNO (XEXP (next, 0)) == regno)
2352 XEXP (link, 1) = XEXP (next, 1);
2353 else
2354 link = next;
2355 }
2356}
2357
2358/* For each register (hardware or pseudo) used within expression X,
2359 if its death is in an instruction with cuid
2360 between FROM_CUID (inclusive) and TO_INSN (exclusive),
2361 mark it as dead in TO_INSN instead.
2362
2363 This is done when X is being merged by combination into TO_INSN. */
2364
2365static void
2366move_deaths (x, from_cuid, to_insn)
2367 rtx x;
2368 int from_cuid;
2369 rtx to_insn;
2370{
2371 register char *fmt;
2372 register int len, i;
2373 register enum rtx_code code = GET_CODE (x);
2374
2375 if (code == REG)
2376 {
2377 register rtx where_dead = reg_last_death[REGNO (x)];
2378
2379 if (where_dead && INSN_CUID (where_dead) >= from_cuid
2380 && INSN_CUID (where_dead) < INSN_CUID (to_insn))
2381 {
2382 remove_death (REGNO (x), reg_last_death[REGNO (x)]);
2383 if (! dead_or_set_p (to_insn, x))
2384 REG_NOTES (to_insn)
2385 = gen_rtx (EXPR_LIST, REG_DEAD, x, REG_NOTES (to_insn));
2386 }
2387 return;
2388 }
2389
2390 len = GET_RTX_LENGTH (code);
2391 fmt = GET_RTX_FORMAT (code);
2392
2393 for (i = 0; i < len; i++)
2394 {
2395 if (fmt[i] == 'E')
2396 {
2397 register int j;
2398 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2399 move_deaths (XVECEXP (x, i, j), from_cuid, to_insn);
2400 }
2401 else if (fmt[i] == 'e')
2402 move_deaths (XEXP (x, i), from_cuid, to_insn);
2403 }
2404}
2405\f
2406/* Like move_deaths, but deaths are moving both forward
2407 (from FROM_CUID to TO_INSN), and backwards
2408 (from FROM_INSN to TO_INSN). This is what happens
2409 when an insn is removed after applying the distributive law. */
2410
2411static void
2412move_deaths_2 (x, from_cuid, from_insn, to_insn)
2413 rtx x;
2414 int from_cuid;
2415 rtx from_insn, to_insn;
2416{
2417 register char *fmt;
2418 register int len, i;
2419 register enum rtx_code code = GET_CODE (x);
2420
2421 if (code == REG)
2422 {
2423 register rtx where_dead = reg_last_death[REGNO (x)];
2424
2425 if (where_dead && INSN_CUID (where_dead) >= from_cuid
2426 && INSN_CUID (where_dead) < INSN_CUID (to_insn))
2427 {
2428 remove_death (REGNO (x), reg_last_death[REGNO (x)]);
2429 if (! dead_or_set_p (to_insn, x))
2430 REG_NOTES (to_insn)
2431 = gen_rtx (EXPR_LIST, REG_DEAD, x, REG_NOTES (to_insn));
2432 }
2433 /* Can't use where_dead for from_insn because it has
2434 not been computed yet. */
2435 else if (dead_or_set_p (from_insn, x))
2436 {
2437 remove_death (REGNO (x), from_insn);
2438 if (! dead_or_set_p (to_insn, x))
2439 REG_NOTES (to_insn)
2440 = gen_rtx (EXPR_LIST, REG_DEAD, x, REG_NOTES (to_insn));
2441 }
2442 return;
2443 }
2444
2445 len = GET_RTX_LENGTH (code);
2446 fmt = GET_RTX_FORMAT (code);
2447
2448 for (i = 0; i < len; i++)
2449 {
2450 if (fmt[i] == 'E')
2451 {
2452 register int j;
2453 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2454 move_deaths_2 (XVECEXP (x, i, j), from_cuid, from_insn, to_insn);
2455 }
2456 else if (fmt[i] == 'e')
2457 move_deaths_2 (XEXP (x, i), from_cuid, from_insn, to_insn);
2458 }
2459}
2460\f
2461/* The distrib combiner rewrites groups of insns so that optimizations
2462 can be more easily recognized. The front-end does not know how to
2463 group certain kinds of operations for efficient execution, and the
2464 resulting code can be quite poor. For example, on a machine without
2465 bitfield instructions, bitfield references look like
2466
2467 (and (lshiftrt ... n) m)
2468
2469 When combining two bitfield operations, such as with ||, this can
2470 yield code like
2471
2472 (set z
2473 (or (and (lshiftrt x n) 1)
2474 (and (lshiftrt y n) 1)))
2475
2476 which can be more efficiently executed as
2477
2478 (set z
2479 (lshiftrt (and (or x y)
2480 (1 << m)) n))
2481
2482 From there, the combiner attempts to rewrite the insns,
2483 keeping flow information accurate for later passes,
2484 and reducing the total number of insns executed.
2485
2486 This function returns the point at which we should try
2487 looking for more simplifications. This will be before
2488 INSN if the call succeeds. We do not need to fear
2489 infinite loops, since this function is guaranteed to
2490 eliminate at least one (non-note) instruction if it returns
2491 successfully. */
2492
2493static rtx
2494try_distrib (insn, xprev1, xprev2)
2495 rtx insn, xprev1, xprev2;
2496{
2497 rtx pat = PATTERN (insn);
2498 rtx prev1, prev2, pat1, pat2, src1, src2;
2499 rtx to_prev, to_insn;
2500 enum rtx_code code;
2501 int insn_code_number, prev_code_number, regno;
2502 rtx new_insn_pat, new_prev_pat;
2503
2504 distrib_attempts++;
2505
2506 /* ??? Need to implement a test that PREV2 and PREV1
2507 are completely independent. Right now their
2508 recognition ability is sufficiently limited that
2509 it should not be necessary, but better safe than sorry. */
2510
2511 /* Let PREV1 be the later of the two insns, and PREV2 the earlier. */
2512 if (INSN_CUID (xprev1) > INSN_CUID (xprev2))
2513 {
2514 prev1 = xprev1;
2515 prev2 = xprev2;
2516 }
2517 else
2518 {
2519 prev1 = xprev2;
2520 prev2 = xprev1;
2521 }
2522
2523 pat1 = PATTERN (prev1);
2524 pat2 = PATTERN (prev2);
2525
2526 /* First, see if INSN, PREV1, and PREV2 have patterns we can expect
2527 to simplify. */
2528
2529 if (GET_CODE (pat) != SET
2530 || GET_CODE (pat1) != SET
2531 || GET_CODE (pat2) != SET)
2532 return 0;
2533
2534 code = GET_CODE (SET_SRC (pat));
2535 src1 = SET_SRC (pat1);
2536 src2 = SET_SRC (pat2);
2537
2538 if (GET_CODE (SET_DEST (pat1)) != REG
2539 || GET_CODE (SET_DEST (pat2)) != REG)
2540 return 0;
2541
2542 switch (code)
2543 {
2544 default:
2545 return 0;
2546
2547 case IOR:
2548 case AND:
2549 case XOR:
2550 case PLUS:
2551 ;
2552 }
2553
2554 /* Insns PREV1 and PREV2 must provide the two operands of the arithmetic
2555 that is done in INSN. */
2556 if (! ((XEXP (SET_SRC (pat), 0) == SET_DEST (pat1)
2557 && XEXP (SET_SRC (pat), 1) == SET_DEST (pat2))
2558 ||
2559 (XEXP (SET_SRC (pat), 0) == SET_DEST (pat2)
2560 && XEXP (SET_SRC (pat), 1) == SET_DEST (pat1))))
2561 return 0;
2562
2563 /* They must not be used in any other way in INSN.
2564 In particular, they must not be used in a result memory address. */
2565 if (reg_mentioned_p (SET_DEST (pat1), SET_DEST (pat))
2566 || reg_mentioned_p (SET_DEST (pat2), SET_DEST (pat)))
2567 return 0;
2568
2569 /* Give up if the two operands' modes don't match. */
2570 if (GET_MODE (src1) != GET_MODE (src2))
2571 return 0;
2572
2573 /* PREV1 and PREV2 must compute the same operation.
2574 Actually, there are other cases that could be handled,
2575 but are not implemented. For example:
2576
2577 (set (reg:SI 94)
2578 (and:SI (reg:SI 73)
2579 (const_int 223)))
2580
2581 (set (reg:SI 95)
2582 (zero_extend:SI (subreg:QI (reg:SI 91) 0)))
2583
2584 (set (reg:SI 96)
2585 (ior:SI (reg:SI 94)
2586 (reg:SI 95)))
2587
2588 In this case, we know that because (reg:SI 94) has
2589 been anded with 223, there is no need to zero_extend
2590 (reg:SI 91), and we could eliminate (reg:SI 95). */
2591
2592 if (GET_CODE (src1) != GET_CODE (src2))
2593 return 0;
2594
2595 /* The SETs in PREV1 and PREV2 do not need to be kept around. */
2596
2597 undobuf.num_undo = 0;
2598 undobuf.storage = 0;
2599
2600 /* Substitute in the latest insn for the regs set by the earlier ones. */
2601 subst_insn = insn;
2602 n_occurrences = 0; /* `subst' counts here */
2603
2604 switch (GET_CODE (src1))
2605 {
2606 /* case XOR: Does not distribute through anything! */
2607 case LSHIFTRT:
2608 case ASHIFTRT:
2609 /* Right-shift can't distribute through addition
2610 since the round-off would happen differently. */
2611 case AND:
2612 case IOR:
2613 /* Boolean ops don't distribute through addition. */
2614 if (code == PLUS)
2615 return 0;
2616 goto do_distrib;
2617
2618 case LSHIFT:
2619 case ASHIFT:
2620 /* Left shifts are multiplication; they distribute through
2621 addition. Also, since they work bitwise, they
2622 distribute through boolean operations. */
2623#ifdef NEGATIVE_SHIFT_COUNTS
2624 /* Negative count is really a right-shift. */
2625 if (NEGATIVE_SHIFT_COUNTS
2626 && code == PLUS
2627 && !(GET_CODE (XEXP (src1, 1))
2628 == CONST_INT && INTVAL (XEXP (src1, 1)) >= 0))
2629 return 0;
2630#endif
2631 goto do_distrib;
2632
2633 case MULT:
2634 /* Multiplication distributes through addition only. */
2635 if (code != PLUS)
2636 return 0;
2637
2638 do_distrib:
2639 if (GET_CODE (XEXP (src1, 1)) != CONST_INT
2640 || GET_CODE (XEXP (src2, 1)) != CONST_INT
2641 || INTVAL (XEXP (src1, 1)) != INTVAL (XEXP (src2, 1)))
2642 return 0;
2643
2644 /* Give up if we would move a use of a reg across an alteration.
2645 Note this is unnecessarily conservative, since a problem really
2646 happens only if this reg is set *between* PREV2 and PREV1
2647 But this test is easier. */
2648 if (use_crosses_set_p (XEXP (src2, 0), INSN_CUID (prev2)))
2649 return 0;
2650
2651 /* Try changing (+ (* x c) (* y c)) to (* (+ x y) c). */
2652 to_prev = gen_rtx (code, GET_MODE (src1),
2653 XEXP (src1, 0), XEXP (src2, 0));
2654 to_insn = gen_rtx (GET_CODE (src1), GET_MODE (src1), SET_DEST (pat1), XEXP (src1, 1));
2655 break;
2656
2657 case ZERO_EXTEND:
2658 case SIGN_EXTEND:
2659 /* Extension can't distribute through addition;
2660 the carries could be changed. */
2661 if (code == PLUS)
2662 return 0;
2663 {
2664 rtx inner1 = XEXP (src1, 0), inner2 = XEXP (src2, 0);
2665 int subreg_needed = 0;
2666
2667 /* Try changing (& (extend x) (extend y)) to (extend (& x y)). */
2668 /* But keep extend insns together with their subregs. */
2669 if (GET_CODE (inner1) == SUBREG)
2670 {
2671 if (SUBREG_WORD (inner1) != 0)
2672 return 0;
2673 else
2674 {
2675 subreg_needed = 1;
2676 inner1 = SUBREG_REG (inner1);
2677 }
2678 }
2679
2680 if (GET_CODE (inner2) == SUBREG)
2681 {
2682 if (SUBREG_WORD (inner2) != 0)
2683 return 0;
2684 else
2685 {
2686 subreg_needed = 1;
2687 inner2 = SUBREG_REG (inner2);
2688 }
2689 }
2690
2691 /* Give up if we would move a use of a reg across an alteration.
2692 Note this is unnecessarily conservative, since a problem really
2693 happens only if this reg is set *between* PREV2 and PREV1
2694 But this test is easier. */
2695 if (use_crosses_set_p (inner2, INSN_CUID (prev2)))
2696 return 0;
2697
2698 to_prev = gen_rtx (code, GET_MODE (src1), inner1, inner2);
2699 to_insn = gen_rtx (GET_CODE (src1), GET_MODE (src1),
2700 subreg_needed
2701 ? gen_rtx (SUBREG, GET_MODE (XEXP (src1, 0)),
2702 SET_DEST (pat1), 0)
2703 : SET_DEST (pat1));
2704 }
2705 break;
2706
2707 default:
2708 return 0;
2709 }
2710
2711 /* Are the results of this "substitution" a valid instruction? */
2712
2713 new_insn_pat = subst (PATTERN (insn), SET_SRC (PATTERN (insn)), to_insn);
2714 distrib_merges_1++;
2715
2716 insn_code_number = recog (new_insn_pat, insn);
2717 if (insn_code_number < 0)
2718 {
2719 undo_all ();
2720 return 0;
2721 }
2722
2723 subst_insn = prev1;
2724 new_prev_pat = subst (pat1, src1, to_prev);
2725 distrib_merges_2++;
2726
2727 prev_code_number = recog (new_prev_pat, prev1);
2728 if (prev_code_number < 0)
2729 {
2730 undo_all ();
2731 return 0;
2732 }
2733
2734 /* Everything worked; install the new patterns. */
2735 INSN_CODE (insn) = insn_code_number;
2736 PATTERN (insn) = new_insn_pat;
2737
2738 INSN_CODE (prev1) = prev_code_number;
2739 PATTERN (prev1) = new_prev_pat;
2740
2741 /* Need to change LOG_LINKS around...PREV1 now gets
2742 whatever flowed into PREV2. PREV2 is going to
2743 become a NOTE, so we clear out its LOG_LINKS. */
2744 remove_links (insn, prev2);
2745 add_links (prev1, prev2, adjacent_insns_p (prev2, prev1));
2746
2747 /* Registers which died in PREV2 now die in PREV1.
2748 Also, registers born in PREV2 dying in INSN now die in PREV1. */
2749 move_deaths_2 (src2, INSN_CUID (prev2), insn, prev1);
2750
2751 regno = REGNO (SET_DEST (pat2));
2752
2753 reg_n_sets[regno]--;
2754 if (reg_n_sets[regno] == 0
2755 && ! (basic_block_live_at_start[0][regno / HOST_BITS_PER_INT]
2756 & (1 << (regno % HOST_BITS_PER_INT))))
2757 reg_n_refs[regno] = 0;
2758 remove_death (regno, insn);
2759
2760 PUT_CODE (prev2, NOTE);
2761 NOTE_LINE_NUMBER (prev2) = NOTE_INSN_DELETED;
2762 NOTE_SOURCE_FILE (prev2) = 0;
2763
2764 distrib_successes++;
2765 return prev1;
2766}
2767\f
2768void
2769dump_combine_stats (file)
2770 FILE *file;
2771{
2772 fprintf
2773 (file,
2774 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
2775 combine_attempts, combine_merges, combine_extras, combine_successes);
2776 fprintf
2777 (file,
2778 ";; Distributer statistics: %d attempts, %d:%d substitutions,\n;; %d successes.\n\n",
2779 distrib_attempts, distrib_merges_1,
2780 distrib_merges_2, distrib_successes);
2781}
2782
2783void
2784dump_combine_total_stats (file)
2785 FILE *file;
2786{
2787 fprintf
2788 (file,
2789 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
2790 total_attempts, total_merges, total_extras, total_successes);
2791 fprintf
2792 (file,
2793 "\n;; Distributer totals: %d attempts, %d:%d substitutions,\n;; %d successes.\n",
2794 total_distrib_attempts, total_distrib_merges_1,
2795 total_distrib_merges_2, total_distrib_successes);
2796}