This commit was manufactured by cvs2svn to create tag 'FreeBSD-release/1.0'.
[unix-history] / gnu / usr.bin / cc / lib / expr.c
CommitLineData
9bf86ebb
PR
1/* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
22#include "rtl.h"
23#include "tree.h"
24#include "flags.h"
25#include "function.h"
26#include "insn-flags.h"
27#include "insn-codes.h"
28#include "expr.h"
29#include "insn-config.h"
30#include "recog.h"
31#include "output.h"
32#include "typeclass.h"
33
34#define CEIL(x,y) (((x) + (y) - 1) / (y))
35
36/* Decide whether a function's arguments should be processed
37 from first to last or from last to first.
38
39 They should if the stack and args grow in opposite directions, but
40 only if we have push insns. */
41
42#ifdef PUSH_ROUNDING
43
44#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNARD)
45#define PUSH_ARGS_REVERSED /* If it's last to first */
46#endif
47
48#endif
49
50#ifndef STACK_PUSH_CODE
51#ifdef STACK_GROWS_DOWNWARD
52#define STACK_PUSH_CODE PRE_DEC
53#else
54#define STACK_PUSH_CODE PRE_INC
55#endif
56#endif
57
58/* Like STACK_BOUNDARY but in units of bytes, not bits. */
59#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
60
61/* If this is nonzero, we do not bother generating VOLATILE
62 around volatile memory references, and we are willing to
63 output indirect addresses. If cse is to follow, we reject
64 indirect addresses so a useful potential cse is generated;
65 if it is used only once, instruction combination will produce
66 the same indirect address eventually. */
67int cse_not_expected;
68
69/* Nonzero to generate code for all the subroutines within an
70 expression before generating the upper levels of the expression.
71 Nowadays this is never zero. */
72int do_preexpand_calls = 1;
73
74/* Number of units that we should eventually pop off the stack.
75 These are the arguments to function calls that have already returned. */
76int pending_stack_adjust;
77
78/* Nonzero means stack pops must not be deferred, and deferred stack
79 pops must not be output. It is nonzero inside a function call,
80 inside a conditional expression, inside a statement expression,
81 and in other cases as well. */
82int inhibit_defer_pop;
83
84/* A list of all cleanups which belong to the arguments of
85 function calls being expanded by expand_call. */
86tree cleanups_this_call;
87
88/* Nonzero means __builtin_saveregs has already been done in this function.
89 The value is the pseudoreg containing the value __builtin_saveregs
90 returned. */
91static rtx saveregs_value;
92
93/* Similarly for __builtin_apply_args. */
94static rtx apply_args_value;
95
96/* This structure is used by move_by_pieces to describe the move to
97 be performed. */
98
99struct move_by_pieces
100{
101 rtx to;
102 rtx to_addr;
103 int autinc_to;
104 int explicit_inc_to;
105 rtx from;
106 rtx from_addr;
107 int autinc_from;
108 int explicit_inc_from;
109 int len;
110 int offset;
111 int reverse;
112};
113
114static rtx enqueue_insn PROTO((rtx, rtx));
115static int queued_subexp_p PROTO((rtx));
116static void init_queue PROTO((void));
117static void move_by_pieces PROTO((rtx, rtx, int, int));
118static int move_by_pieces_ninsns PROTO((unsigned int, int));
119static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
120 struct move_by_pieces *));
121static void group_insns PROTO((rtx));
122static void store_constructor PROTO((tree, rtx));
123static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
124 enum machine_mode, int, int, int));
125static tree save_noncopied_parts PROTO((tree, tree));
126static tree init_noncopied_parts PROTO((tree, tree));
127static int safe_from_p PROTO((rtx, tree));
128static int fixed_type_p PROTO((tree));
129static int get_pointer_alignment PROTO((tree, unsigned));
130static tree string_constant PROTO((tree, tree *));
131static tree c_strlen PROTO((tree));
132static rtx expand_builtin PROTO((tree, rtx, rtx, enum machine_mode, int));
133static int apply_args_size PROTO((void));
134static int apply_result_size PROTO((void));
135static rtx result_vector PROTO((int, rtx));
136static rtx expand_builtin_apply_args PROTO((void));
137static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
138static void expand_builtin_return PROTO((rtx));
139static rtx expand_increment PROTO((tree, int));
140static void preexpand_calls PROTO((tree));
141static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
142static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
143static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
144static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
145static void do_jump_for_compare PROTO((rtx, rtx, rtx));
146static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
147static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
148
149/* Record for each mode whether we can move a register directly to or
150 from an object of that mode in memory. If we can't, we won't try
151 to use that mode directly when accessing a field of that mode. */
152
153static char direct_load[NUM_MACHINE_MODES];
154static char direct_store[NUM_MACHINE_MODES];
155
156/* MOVE_RATIO is the number of move instructions that is better than
157 a block move. */
158
159#ifndef MOVE_RATIO
160#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
161#define MOVE_RATIO 2
162#else
163/* A value of around 6 would minimize code size; infinity would minimize
164 execution time. */
165#define MOVE_RATIO 15
166#endif
167#endif
168
169/* This array records the insn_code of insns to perform block moves. */
170enum insn_code movstr_optab[NUM_MACHINE_MODES];
171
172/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
173
174#ifndef SLOW_UNALIGNED_ACCESS
175#define SLOW_UNALIGNED_ACCESS 0
176#endif
177
178/* Register mappings for target machines without register windows. */
179#ifndef INCOMING_REGNO
180#define INCOMING_REGNO(OUT) (OUT)
181#endif
182#ifndef OUTGOING_REGNO
183#define OUTGOING_REGNO(IN) (IN)
184#endif
185\f
186/* This is run once per compilation to set up which modes can be used
187 directly in memory and to initialize the block move optab. */
188
189void
190init_expr_once ()
191{
192 rtx insn, pat;
193 enum machine_mode mode;
194 /* Try indexing by frame ptr and try by stack ptr.
195 It is known that on the Convex the stack ptr isn't a valid index.
196 With luck, one or the other is valid on any machine. */
197 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
198 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
199
200 start_sequence ();
201 insn = emit_insn (gen_rtx (SET, 0, 0));
202 pat = PATTERN (insn);
203
204 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
205 mode = (enum machine_mode) ((int) mode + 1))
206 {
207 int regno;
208 rtx reg;
209 int num_clobbers;
210
211 direct_load[(int) mode] = direct_store[(int) mode] = 0;
212 PUT_MODE (mem, mode);
213 PUT_MODE (mem1, mode);
214
215 /* See if there is some register that can be used in this mode and
216 directly loaded or stored from memory. */
217
218 if (mode != VOIDmode && mode != BLKmode)
219 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
220 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
221 regno++)
222 {
223 if (! HARD_REGNO_MODE_OK (regno, mode))
224 continue;
225
226 reg = gen_rtx (REG, mode, regno);
227
228 SET_SRC (pat) = mem;
229 SET_DEST (pat) = reg;
230 if (recog (pat, insn, &num_clobbers) >= 0)
231 direct_load[(int) mode] = 1;
232
233 SET_SRC (pat) = mem1;
234 SET_DEST (pat) = reg;
235 if (recog (pat, insn, &num_clobbers) >= 0)
236 direct_load[(int) mode] = 1;
237
238 SET_SRC (pat) = reg;
239 SET_DEST (pat) = mem;
240 if (recog (pat, insn, &num_clobbers) >= 0)
241 direct_store[(int) mode] = 1;
242
243 SET_SRC (pat) = reg;
244 SET_DEST (pat) = mem1;
245 if (recog (pat, insn, &num_clobbers) >= 0)
246 direct_store[(int) mode] = 1;
247 }
248 }
249
250 end_sequence ();
251}
252
253/* This is run at the start of compiling a function. */
254
255void
256init_expr ()
257{
258 init_queue ();
259
260 pending_stack_adjust = 0;
261 inhibit_defer_pop = 0;
262 cleanups_this_call = 0;
263 saveregs_value = 0;
264 apply_args_value = 0;
265 forced_labels = 0;
266}
267
268/* Save all variables describing the current status into the structure *P.
269 This is used before starting a nested function. */
270
271void
272save_expr_status (p)
273 struct function *p;
274{
275 /* Instead of saving the postincrement queue, empty it. */
276 emit_queue ();
277
278 p->pending_stack_adjust = pending_stack_adjust;
279 p->inhibit_defer_pop = inhibit_defer_pop;
280 p->cleanups_this_call = cleanups_this_call;
281 p->saveregs_value = saveregs_value;
282 p->apply_args_value = apply_args_value;
283 p->forced_labels = forced_labels;
284
285 pending_stack_adjust = 0;
286 inhibit_defer_pop = 0;
287 cleanups_this_call = 0;
288 saveregs_value = 0;
289 apply_args_value = 0;
290 forced_labels = 0;
291}
292
293/* Restore all variables describing the current status from the structure *P.
294 This is used after a nested function. */
295
296void
297restore_expr_status (p)
298 struct function *p;
299{
300 pending_stack_adjust = p->pending_stack_adjust;
301 inhibit_defer_pop = p->inhibit_defer_pop;
302 cleanups_this_call = p->cleanups_this_call;
303 saveregs_value = p->saveregs_value;
304 apply_args_value = p->apply_args_value;
305 forced_labels = p->forced_labels;
306}
307\f
308/* Manage the queue of increment instructions to be output
309 for POSTINCREMENT_EXPR expressions, etc. */
310
311static rtx pending_chain;
312
313/* Queue up to increment (or change) VAR later. BODY says how:
314 BODY should be the same thing you would pass to emit_insn
315 to increment right away. It will go to emit_insn later on.
316
317 The value is a QUEUED expression to be used in place of VAR
318 where you want to guarantee the pre-incrementation value of VAR. */
319
320static rtx
321enqueue_insn (var, body)
322 rtx var, body;
323{
324 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
325 var, NULL_RTX, NULL_RTX, body, pending_chain);
326 return pending_chain;
327}
328
329/* Use protect_from_queue to convert a QUEUED expression
330 into something that you can put immediately into an instruction.
331 If the queued incrementation has not happened yet,
332 protect_from_queue returns the variable itself.
333 If the incrementation has happened, protect_from_queue returns a temp
334 that contains a copy of the old value of the variable.
335
336 Any time an rtx which might possibly be a QUEUED is to be put
337 into an instruction, it must be passed through protect_from_queue first.
338 QUEUED expressions are not meaningful in instructions.
339
340 Do not pass a value through protect_from_queue and then hold
341 on to it for a while before putting it in an instruction!
342 If the queue is flushed in between, incorrect code will result. */
343
344rtx
345protect_from_queue (x, modify)
346 register rtx x;
347 int modify;
348{
349 register RTX_CODE code = GET_CODE (x);
350
351#if 0 /* A QUEUED can hang around after the queue is forced out. */
352 /* Shortcut for most common case. */
353 if (pending_chain == 0)
354 return x;
355#endif
356
357 if (code != QUEUED)
358 {
359 /* A special hack for read access to (MEM (QUEUED ...))
360 to facilitate use of autoincrement.
361 Make a copy of the contents of the memory location
362 rather than a copy of the address, but not
363 if the value is of mode BLKmode. */
364 if (code == MEM && GET_MODE (x) != BLKmode
365 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
366 {
367 register rtx y = XEXP (x, 0);
368 XEXP (x, 0) = QUEUED_VAR (y);
369 if (QUEUED_INSN (y))
370 {
371 register rtx temp = gen_reg_rtx (GET_MODE (x));
372 emit_insn_before (gen_move_insn (temp, x),
373 QUEUED_INSN (y));
374 return temp;
375 }
376 return x;
377 }
378 /* Otherwise, recursively protect the subexpressions of all
379 the kinds of rtx's that can contain a QUEUED. */
380 if (code == MEM)
381 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
382 else if (code == PLUS || code == MULT)
383 {
384 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
385 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
386 }
387 return x;
388 }
389 /* If the increment has not happened, use the variable itself. */
390 if (QUEUED_INSN (x) == 0)
391 return QUEUED_VAR (x);
392 /* If the increment has happened and a pre-increment copy exists,
393 use that copy. */
394 if (QUEUED_COPY (x) != 0)
395 return QUEUED_COPY (x);
396 /* The increment has happened but we haven't set up a pre-increment copy.
397 Set one up now, and use it. */
398 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
399 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
400 QUEUED_INSN (x));
401 return QUEUED_COPY (x);
402}
403
404/* Return nonzero if X contains a QUEUED expression:
405 if it contains anything that will be altered by a queued increment.
406 We handle only combinations of MEM, PLUS, MINUS and MULT operators
407 since memory addresses generally contain only those. */
408
409static int
410queued_subexp_p (x)
411 rtx x;
412{
413 register enum rtx_code code = GET_CODE (x);
414 switch (code)
415 {
416 case QUEUED:
417 return 1;
418 case MEM:
419 return queued_subexp_p (XEXP (x, 0));
420 case MULT:
421 case PLUS:
422 case MINUS:
423 return queued_subexp_p (XEXP (x, 0))
424 || queued_subexp_p (XEXP (x, 1));
425 }
426 return 0;
427}
428
429/* Perform all the pending incrementations. */
430
431void
432emit_queue ()
433{
434 register rtx p;
435 while (p = pending_chain)
436 {
437 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
438 pending_chain = QUEUED_NEXT (p);
439 }
440}
441
442static void
443init_queue ()
444{
445 if (pending_chain)
446 abort ();
447}
448\f
449/* Copy data from FROM to TO, where the machine modes are not the same.
450 Both modes may be integer, or both may be floating.
451 UNSIGNEDP should be nonzero if FROM is an unsigned type.
452 This causes zero-extension instead of sign-extension. */
453
454void
455convert_move (to, from, unsignedp)
456 register rtx to, from;
457 int unsignedp;
458{
459 enum machine_mode to_mode = GET_MODE (to);
460 enum machine_mode from_mode = GET_MODE (from);
461 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
462 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
463 enum insn_code code;
464 rtx libcall;
465
466 /* rtx code for making an equivalent value. */
467 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
468
469 to = protect_from_queue (to, 1);
470 from = protect_from_queue (from, 0);
471
472 if (to_real != from_real)
473 abort ();
474
475 /* If FROM is a SUBREG that indicates that we have already done at least
476 the required extension, strip it. We don't handle such SUBREGs as
477 TO here. */
478
479 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
480 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
481 >= GET_MODE_SIZE (to_mode))
482 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
483 from = gen_lowpart (to_mode, from), from_mode = to_mode;
484
485 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
486 abort ();
487
488 if (to_mode == from_mode
489 || (from_mode == VOIDmode && CONSTANT_P (from)))
490 {
491 emit_move_insn (to, from);
492 return;
493 }
494
495 if (to_real)
496 {
497#ifdef HAVE_extendqfhf2
498 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
499 {
500 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
501 return;
502 }
503#endif
504#ifdef HAVE_extendqfsf2
505 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
506 {
507 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
508 return;
509 }
510#endif
511#ifdef HAVE_extendqfdf2
512 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
513 {
514 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
515 return;
516 }
517#endif
518#ifdef HAVE_extendqfxf2
519 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
520 {
521 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
522 return;
523 }
524#endif
525#ifdef HAVE_extendqftf2
526 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
527 {
528 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
529 return;
530 }
531#endif
532
533#ifdef HAVE_extendhfsf2
534 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
535 {
536 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
537 return;
538 }
539#endif
540#ifdef HAVE_extendhfdf2
541 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
542 {
543 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
544 return;
545 }
546#endif
547#ifdef HAVE_extendhfxf2
548 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
549 {
550 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
551 return;
552 }
553#endif
554#ifdef HAVE_extendhftf2
555 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
556 {
557 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
558 return;
559 }
560#endif
561
562#ifdef HAVE_extendsfdf2
563 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
564 {
565 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
566 return;
567 }
568#endif
569#ifdef HAVE_extendsfxf2
570 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
571 {
572 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
573 return;
574 }
575#endif
576#ifdef HAVE_extendsftf2
577 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
578 {
579 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
580 return;
581 }
582#endif
583#ifdef HAVE_extenddfxf2
584 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
585 {
586 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
587 return;
588 }
589#endif
590#ifdef HAVE_extenddftf2
591 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
592 {
593 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
594 return;
595 }
596#endif
597
598#ifdef HAVE_trunchfqf2
599 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
600 {
601 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
602 return;
603 }
604#endif
605#ifdef HAVE_truncsfqf2
606 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
607 {
608 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
609 return;
610 }
611#endif
612#ifdef HAVE_truncdfqf2
613 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
614 {
615 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
616 return;
617 }
618#endif
619#ifdef HAVE_truncxfqf2
620 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
621 {
622 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
623 return;
624 }
625#endif
626#ifdef HAVE_trunctfqf2
627 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
628 {
629 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
630 return;
631 }
632#endif
633#ifdef HAVE_truncsfhf2
634 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
635 {
636 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
637 return;
638 }
639#endif
640#ifdef HAVE_truncdfhf2
641 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
642 {
643 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
644 return;
645 }
646#endif
647#ifdef HAVE_truncxfhf2
648 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
649 {
650 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
651 return;
652 }
653#endif
654#ifdef HAVE_trunctfhf2
655 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
656 {
657 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
658 return;
659 }
660#endif
661#ifdef HAVE_truncdfsf2
662 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
663 {
664 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
665 return;
666 }
667#endif
668#ifdef HAVE_truncxfsf2
669 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
670 {
671 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
672 return;
673 }
674#endif
675#ifdef HAVE_trunctfsf2
676 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
677 {
678 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
679 return;
680 }
681#endif
682#ifdef HAVE_truncxfdf2
683 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
684 {
685 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
686 return;
687 }
688#endif
689#ifdef HAVE_trunctfdf2
690 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
691 {
692 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
693 return;
694 }
695#endif
696
697 libcall = (rtx) 0;
698 switch (from_mode)
699 {
700 case SFmode:
701 switch (to_mode)
702 {
703 case DFmode:
704 libcall = extendsfdf2_libfunc;
705 break;
706
707 case XFmode:
708 libcall = extendsfxf2_libfunc;
709 break;
710
711 case TFmode:
712 libcall = extendsftf2_libfunc;
713 break;
714 }
715 break;
716
717 case DFmode:
718 switch (to_mode)
719 {
720 case SFmode:
721 libcall = truncdfsf2_libfunc;
722 break;
723
724 case XFmode:
725 libcall = extenddfxf2_libfunc;
726 break;
727
728 case TFmode:
729 libcall = extenddftf2_libfunc;
730 break;
731 }
732 break;
733
734 case XFmode:
735 switch (to_mode)
736 {
737 case SFmode:
738 libcall = truncxfsf2_libfunc;
739 break;
740
741 case DFmode:
742 libcall = truncxfdf2_libfunc;
743 break;
744 }
745 break;
746
747 case TFmode:
748 switch (to_mode)
749 {
750 case SFmode:
751 libcall = trunctfsf2_libfunc;
752 break;
753
754 case DFmode:
755 libcall = trunctfdf2_libfunc;
756 break;
757 }
758 break;
759 }
760
761 if (libcall == (rtx) 0)
762 /* This conversion is not implemented yet. */
763 abort ();
764
765 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
766 emit_move_insn (to, hard_libcall_value (to_mode));
767 return;
768 }
769
770 /* Now both modes are integers. */
771
772 /* Handle expanding beyond a word. */
773 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
774 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
775 {
776 rtx insns;
777 rtx lowpart;
778 rtx fill_value;
779 rtx lowfrom;
780 int i;
781 enum machine_mode lowpart_mode;
782 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
783
784 /* Try converting directly if the insn is supported. */
785 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
786 != CODE_FOR_nothing)
787 {
788 /* If FROM is a SUBREG, put it into a register. Do this
789 so that we always generate the same set of insns for
790 better cse'ing; if an intermediate assignment occurred,
791 we won't be doing the operation directly on the SUBREG. */
792 if (optimize > 0 && GET_CODE (from) == SUBREG)
793 from = force_reg (from_mode, from);
794 emit_unop_insn (code, to, from, equiv_code);
795 return;
796 }
797 /* Next, try converting via full word. */
798 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
799 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
800 != CODE_FOR_nothing))
801 {
802 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
803 emit_unop_insn (code, to,
804 gen_lowpart (word_mode, to), equiv_code);
805 return;
806 }
807
808 /* No special multiword conversion insn; do it by hand. */
809 start_sequence ();
810
811 /* Get a copy of FROM widened to a word, if necessary. */
812 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
813 lowpart_mode = word_mode;
814 else
815 lowpart_mode = from_mode;
816
817 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
818
819 lowpart = gen_lowpart (lowpart_mode, to);
820 emit_move_insn (lowpart, lowfrom);
821
822 /* Compute the value to put in each remaining word. */
823 if (unsignedp)
824 fill_value = const0_rtx;
825 else
826 {
827#ifdef HAVE_slt
828 if (HAVE_slt
829 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
830 && STORE_FLAG_VALUE == -1)
831 {
832 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
833 lowpart_mode, 0, 0);
834 fill_value = gen_reg_rtx (word_mode);
835 emit_insn (gen_slt (fill_value));
836 }
837 else
838#endif
839 {
840 fill_value
841 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
842 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
843 NULL_RTX, 0);
844 fill_value = convert_to_mode (word_mode, fill_value, 1);
845 }
846 }
847
848 /* Fill the remaining words. */
849 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
850 {
851 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
852 rtx subword = operand_subword (to, index, 1, to_mode);
853
854 if (subword == 0)
855 abort ();
856
857 if (fill_value != subword)
858 emit_move_insn (subword, fill_value);
859 }
860
861 insns = get_insns ();
862 end_sequence ();
863
864 emit_no_conflict_block (insns, to, from, NULL_RTX,
865 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
866 return;
867 }
868
869 /* Truncating multi-word to a word or less. */
870 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
871 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
872 {
873 convert_move (to, gen_lowpart (word_mode, from), 0);
874 return;
875 }
876
877 /* Handle pointer conversion */ /* SPEE 900220 */
878 if (to_mode == PSImode)
879 {
880 if (from_mode != SImode)
881 from = convert_to_mode (SImode, from, unsignedp);
882
883#ifdef HAVE_truncsipsi
884 if (HAVE_truncsipsi)
885 {
886 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
887 return;
888 }
889#endif /* HAVE_truncsipsi */
890 abort ();
891 }
892
893 if (from_mode == PSImode)
894 {
895 if (to_mode != SImode)
896 {
897 from = convert_to_mode (SImode, from, unsignedp);
898 from_mode = SImode;
899 }
900 else
901 {
902#ifdef HAVE_extendpsisi
903 if (HAVE_extendpsisi)
904 {
905 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
906 return;
907 }
908#endif /* HAVE_extendpsisi */
909 abort ();
910 }
911 }
912
913 /* Now follow all the conversions between integers
914 no more than a word long. */
915
916 /* For truncation, usually we can just refer to FROM in a narrower mode. */
917 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
918 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
919 GET_MODE_BITSIZE (from_mode)))
920 {
921 if (!((GET_CODE (from) == MEM
922 && ! MEM_VOLATILE_P (from)
923 && direct_load[(int) to_mode]
924 && ! mode_dependent_address_p (XEXP (from, 0)))
925 || GET_CODE (from) == REG
926 || GET_CODE (from) == SUBREG))
927 from = force_reg (from_mode, from);
928 emit_move_insn (to, gen_lowpart (to_mode, from));
929 return;
930 }
931
932 /* Handle extension. */
933 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
934 {
935 /* Convert directly if that works. */
936 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
937 != CODE_FOR_nothing)
938 {
939 /* If FROM is a SUBREG, put it into a register. Do this
940 so that we always generate the same set of insns for
941 better cse'ing; if an intermediate assignment occurred,
942 we won't be doing the operation directly on the SUBREG. */
943 if (optimize > 0 && GET_CODE (from) == SUBREG)
944 from = force_reg (from_mode, from);
945 emit_unop_insn (code, to, from, equiv_code);
946 return;
947 }
948 else
949 {
950 enum machine_mode intermediate;
951
952 /* Search for a mode to convert via. */
953 for (intermediate = from_mode; intermediate != VOIDmode;
954 intermediate = GET_MODE_WIDER_MODE (intermediate))
955 if ((can_extend_p (to_mode, intermediate, unsignedp)
956 != CODE_FOR_nothing)
957 && (can_extend_p (intermediate, from_mode, unsignedp)
958 != CODE_FOR_nothing))
959 {
960 convert_move (to, convert_to_mode (intermediate, from,
961 unsignedp), unsignedp);
962 return;
963 }
964
965 /* No suitable intermediate mode. */
966 abort ();
967 }
968 }
969
970 /* Support special truncate insns for certain modes. */
971
972 if (from_mode == DImode && to_mode == SImode)
973 {
974#ifdef HAVE_truncdisi2
975 if (HAVE_truncdisi2)
976 {
977 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
978 return;
979 }
980#endif
981 convert_move (to, force_reg (from_mode, from), unsignedp);
982 return;
983 }
984
985 if (from_mode == DImode && to_mode == HImode)
986 {
987#ifdef HAVE_truncdihi2
988 if (HAVE_truncdihi2)
989 {
990 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
991 return;
992 }
993#endif
994 convert_move (to, force_reg (from_mode, from), unsignedp);
995 return;
996 }
997
998 if (from_mode == DImode && to_mode == QImode)
999 {
1000#ifdef HAVE_truncdiqi2
1001 if (HAVE_truncdiqi2)
1002 {
1003 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1004 return;
1005 }
1006#endif
1007 convert_move (to, force_reg (from_mode, from), unsignedp);
1008 return;
1009 }
1010
1011 if (from_mode == SImode && to_mode == HImode)
1012 {
1013#ifdef HAVE_truncsihi2
1014 if (HAVE_truncsihi2)
1015 {
1016 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1017 return;
1018 }
1019#endif
1020 convert_move (to, force_reg (from_mode, from), unsignedp);
1021 return;
1022 }
1023
1024 if (from_mode == SImode && to_mode == QImode)
1025 {
1026#ifdef HAVE_truncsiqi2
1027 if (HAVE_truncsiqi2)
1028 {
1029 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1030 return;
1031 }
1032#endif
1033 convert_move (to, force_reg (from_mode, from), unsignedp);
1034 return;
1035 }
1036
1037 if (from_mode == HImode && to_mode == QImode)
1038 {
1039#ifdef HAVE_trunchiqi2
1040 if (HAVE_trunchiqi2)
1041 {
1042 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1043 return;
1044 }
1045#endif
1046 convert_move (to, force_reg (from_mode, from), unsignedp);
1047 return;
1048 }
1049
1050 /* Handle truncation of volatile memrefs, and so on;
1051 the things that couldn't be truncated directly,
1052 and for which there was no special instruction. */
1053 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1054 {
1055 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1056 emit_move_insn (to, temp);
1057 return;
1058 }
1059
1060 /* Mode combination is not recognized. */
1061 abort ();
1062}
1063
1064/* Return an rtx for a value that would result
1065 from converting X to mode MODE.
1066 Both X and MODE may be floating, or both integer.
1067 UNSIGNEDP is nonzero if X is an unsigned value.
1068 This can be done by referring to a part of X in place
1069 or by copying to a new temporary with conversion.
1070
1071 This function *must not* call protect_from_queue
1072 except when putting X into an insn (in which case convert_move does it). */
1073
1074rtx
1075convert_to_mode (mode, x, unsignedp)
1076 enum machine_mode mode;
1077 rtx x;
1078 int unsignedp;
1079{
1080 register rtx temp;
1081
1082 /* If FROM is a SUBREG that indicates that we have already done at least
1083 the required extension, strip it. */
1084
1085 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1086 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1087 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1088 x = gen_lowpart (mode, x);
1089
1090 if (mode == GET_MODE (x))
1091 return x;
1092
1093 /* There is one case that we must handle specially: If we are converting
1094 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1095 we are to interpret the constant as unsigned, gen_lowpart will do
1096 the wrong if the constant appears negative. What we want to do is
1097 make the high-order word of the constant zero, not all ones. */
1098
1099 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1100 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1101 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1102 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
1103
1104 /* We can do this with a gen_lowpart if both desired and current modes
1105 are integer, and this is either a constant integer, a register, or a
1106 non-volatile MEM. Except for the constant case, we must be narrowing
1107 the operand. */
1108
1109 if (GET_CODE (x) == CONST_INT
1110 || (GET_MODE_CLASS (mode) == MODE_INT
1111 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
1112 && (GET_CODE (x) == CONST_DOUBLE
1113 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
1114 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
1115 && direct_load[(int) mode]
1116 || GET_CODE (x) == REG)))))
1117 return gen_lowpart (mode, x);
1118
1119 temp = gen_reg_rtx (mode);
1120 convert_move (temp, x, unsignedp);
1121 return temp;
1122}
1123\f
1124/* Generate several move instructions to copy LEN bytes
1125 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1126 The caller must pass FROM and TO
1127 through protect_from_queue before calling.
1128 ALIGN (in bytes) is maximum alignment we can assume. */
1129
1130static void
1131move_by_pieces (to, from, len, align)
1132 rtx to, from;
1133 int len, align;
1134{
1135 struct move_by_pieces data;
1136 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
1137 int max_size = MOVE_MAX + 1;
1138
1139 data.offset = 0;
1140 data.to_addr = to_addr;
1141 data.from_addr = from_addr;
1142 data.to = to;
1143 data.from = from;
1144 data.autinc_to
1145 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1146 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1147 data.autinc_from
1148 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1149 || GET_CODE (from_addr) == POST_INC
1150 || GET_CODE (from_addr) == POST_DEC);
1151
1152 data.explicit_inc_from = 0;
1153 data.explicit_inc_to = 0;
1154 data.reverse
1155 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1156 if (data.reverse) data.offset = len;
1157 data.len = len;
1158
1159 /* If copying requires more than two move insns,
1160 copy addresses to registers (to make displacements shorter)
1161 and use post-increment if available. */
1162 if (!(data.autinc_from && data.autinc_to)
1163 && move_by_pieces_ninsns (len, align) > 2)
1164 {
1165#ifdef HAVE_PRE_DECREMENT
1166 if (data.reverse && ! data.autinc_from)
1167 {
1168 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1169 data.autinc_from = 1;
1170 data.explicit_inc_from = -1;
1171 }
1172#endif
1173#ifdef HAVE_POST_INCREMENT
1174 if (! data.autinc_from)
1175 {
1176 data.from_addr = copy_addr_to_reg (from_addr);
1177 data.autinc_from = 1;
1178 data.explicit_inc_from = 1;
1179 }
1180#endif
1181 if (!data.autinc_from && CONSTANT_P (from_addr))
1182 data.from_addr = copy_addr_to_reg (from_addr);
1183#ifdef HAVE_PRE_DECREMENT
1184 if (data.reverse && ! data.autinc_to)
1185 {
1186 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1187 data.autinc_to = 1;
1188 data.explicit_inc_to = -1;
1189 }
1190#endif
1191#ifdef HAVE_POST_INCREMENT
1192 if (! data.reverse && ! data.autinc_to)
1193 {
1194 data.to_addr = copy_addr_to_reg (to_addr);
1195 data.autinc_to = 1;
1196 data.explicit_inc_to = 1;
1197 }
1198#endif
1199 if (!data.autinc_to && CONSTANT_P (to_addr))
1200 data.to_addr = copy_addr_to_reg (to_addr);
1201 }
1202
1203 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1204 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1205 align = MOVE_MAX;
1206
1207 /* First move what we can in the largest integer mode, then go to
1208 successively smaller modes. */
1209
1210 while (max_size > 1)
1211 {
1212 enum machine_mode mode = VOIDmode, tmode;
1213 enum insn_code icode;
1214
1215 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1216 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1217 if (GET_MODE_SIZE (tmode) < max_size)
1218 mode = tmode;
1219
1220 if (mode == VOIDmode)
1221 break;
1222
1223 icode = mov_optab->handlers[(int) mode].insn_code;
1224 if (icode != CODE_FOR_nothing
1225 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1226 GET_MODE_SIZE (mode)))
1227 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1228
1229 max_size = GET_MODE_SIZE (mode);
1230 }
1231
1232 /* The code above should have handled everything. */
1233 if (data.len != 0)
1234 abort ();
1235}
1236
1237/* Return number of insns required to move L bytes by pieces.
1238 ALIGN (in bytes) is maximum alignment we can assume. */
1239
1240static int
1241move_by_pieces_ninsns (l, align)
1242 unsigned int l;
1243 int align;
1244{
1245 register int n_insns = 0;
1246 int max_size = MOVE_MAX + 1;
1247
1248 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1249 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1250 align = MOVE_MAX;
1251
1252 while (max_size > 1)
1253 {
1254 enum machine_mode mode = VOIDmode, tmode;
1255 enum insn_code icode;
1256
1257 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1258 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1259 if (GET_MODE_SIZE (tmode) < max_size)
1260 mode = tmode;
1261
1262 if (mode == VOIDmode)
1263 break;
1264
1265 icode = mov_optab->handlers[(int) mode].insn_code;
1266 if (icode != CODE_FOR_nothing
1267 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1268 GET_MODE_SIZE (mode)))
1269 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1270
1271 max_size = GET_MODE_SIZE (mode);
1272 }
1273
1274 return n_insns;
1275}
1276
1277/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1278 with move instructions for mode MODE. GENFUN is the gen_... function
1279 to make a move insn for that mode. DATA has all the other info. */
1280
1281static void
1282move_by_pieces_1 (genfun, mode, data)
1283 rtx (*genfun) ();
1284 enum machine_mode mode;
1285 struct move_by_pieces *data;
1286{
1287 register int size = GET_MODE_SIZE (mode);
1288 register rtx to1, from1;
1289
1290 while (data->len >= size)
1291 {
1292 if (data->reverse) data->offset -= size;
1293
1294 to1 = (data->autinc_to
1295 ? gen_rtx (MEM, mode, data->to_addr)
1296 : change_address (data->to, mode,
1297 plus_constant (data->to_addr, data->offset)));
1298 from1 =
1299 (data->autinc_from
1300 ? gen_rtx (MEM, mode, data->from_addr)
1301 : change_address (data->from, mode,
1302 plus_constant (data->from_addr, data->offset)));
1303
1304#ifdef HAVE_PRE_DECREMENT
1305 if (data->explicit_inc_to < 0)
1306 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1307 if (data->explicit_inc_from < 0)
1308 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1309#endif
1310
1311 emit_insn ((*genfun) (to1, from1));
1312#ifdef HAVE_POST_INCREMENT
1313 if (data->explicit_inc_to > 0)
1314 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1315 if (data->explicit_inc_from > 0)
1316 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1317#endif
1318
1319 if (! data->reverse) data->offset += size;
1320
1321 data->len -= size;
1322 }
1323}
1324\f
1325/* Emit code to move a block Y to a block X.
1326 This may be done with string-move instructions,
1327 with multiple scalar move instructions, or with a library call.
1328
1329 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1330 with mode BLKmode.
1331 SIZE is an rtx that says how long they are.
1332 ALIGN is the maximum alignment we can assume they have,
1333 measured in bytes. */
1334
1335void
1336emit_block_move (x, y, size, align)
1337 rtx x, y;
1338 rtx size;
1339 int align;
1340{
1341 if (GET_MODE (x) != BLKmode)
1342 abort ();
1343
1344 if (GET_MODE (y) != BLKmode)
1345 abort ();
1346
1347 x = protect_from_queue (x, 1);
1348 y = protect_from_queue (y, 0);
1349 size = protect_from_queue (size, 0);
1350
1351 if (GET_CODE (x) != MEM)
1352 abort ();
1353 if (GET_CODE (y) != MEM)
1354 abort ();
1355 if (size == 0)
1356 abort ();
1357
1358 if (GET_CODE (size) == CONST_INT
1359 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
1360 move_by_pieces (x, y, INTVAL (size), align);
1361 else
1362 {
1363 /* Try the most limited insn first, because there's no point
1364 including more than one in the machine description unless
1365 the more limited one has some advantage. */
1366
1367 rtx opalign = GEN_INT (align);
1368 enum machine_mode mode;
1369
1370 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1371 mode = GET_MODE_WIDER_MODE (mode))
1372 {
1373 enum insn_code code = movstr_optab[(int) mode];
1374
1375 if (code != CODE_FOR_nothing
1376 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1377 here because if SIZE is less than the mode mask, as it is
1378 returned by the macro, it will definitely be less than the
1379 actual mode mask. */
1380 && (unsigned HOST_WIDE_INT) INTVAL (size) <= GET_MODE_MASK (mode)
1381 && (insn_operand_predicate[(int) code][0] == 0
1382 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1383 && (insn_operand_predicate[(int) code][1] == 0
1384 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1385 && (insn_operand_predicate[(int) code][3] == 0
1386 || (*insn_operand_predicate[(int) code][3]) (opalign,
1387 VOIDmode)))
1388 {
1389 rtx op2;
1390 rtx last = get_last_insn ();
1391 rtx pat;
1392
1393 op2 = convert_to_mode (mode, size, 1);
1394 if (insn_operand_predicate[(int) code][2] != 0
1395 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
1396 op2 = copy_to_mode_reg (mode, op2);
1397
1398 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1399 if (pat)
1400 {
1401 emit_insn (pat);
1402 return;
1403 }
1404 else
1405 delete_insns_since (last);
1406 }
1407 }
1408
1409#ifdef TARGET_MEM_FUNCTIONS
1410 emit_library_call (memcpy_libfunc, 0,
1411 VOIDmode, 3, XEXP (x, 0), Pmode,
1412 XEXP (y, 0), Pmode,
1413 convert_to_mode (TYPE_MODE (sizetype), size,
1414 TREE_UNSIGNED (sizetype)),
1415 TYPE_MODE (sizetype));
1416#else
1417 emit_library_call (bcopy_libfunc, 0,
1418 VOIDmode, 3, XEXP (y, 0), Pmode,
1419 XEXP (x, 0), Pmode,
1420 convert_to_mode (TYPE_MODE (sizetype), size,
1421 TREE_UNSIGNED (sizetype)),
1422 TYPE_MODE (sizetype));
1423#endif
1424 }
1425}
1426\f
1427/* Copy all or part of a value X into registers starting at REGNO.
1428 The number of registers to be filled is NREGS. */
1429
1430void
1431move_block_to_reg (regno, x, nregs, mode)
1432 int regno;
1433 rtx x;
1434 int nregs;
1435 enum machine_mode mode;
1436{
1437 int i;
1438 rtx pat, last;
1439
1440 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1441 x = validize_mem (force_const_mem (mode, x));
1442
1443 /* See if the machine can do this with a load multiple insn. */
1444#ifdef HAVE_load_multiple
1445 last = get_last_insn ();
1446 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1447 GEN_INT (nregs));
1448 if (pat)
1449 {
1450 emit_insn (pat);
1451 return;
1452 }
1453 else
1454 delete_insns_since (last);
1455#endif
1456
1457 for (i = 0; i < nregs; i++)
1458 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1459 operand_subword_force (x, i, mode));
1460}
1461
1462/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1463 The number of registers to be filled is NREGS. */
1464
1465void
1466move_block_from_reg (regno, x, nregs)
1467 int regno;
1468 rtx x;
1469 int nregs;
1470{
1471 int i;
1472 rtx pat, last;
1473
1474 /* See if the machine can do this with a store multiple insn. */
1475#ifdef HAVE_store_multiple
1476 last = get_last_insn ();
1477 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1478 GEN_INT (nregs));
1479 if (pat)
1480 {
1481 emit_insn (pat);
1482 return;
1483 }
1484 else
1485 delete_insns_since (last);
1486#endif
1487
1488 for (i = 0; i < nregs; i++)
1489 {
1490 rtx tem = operand_subword (x, i, 1, BLKmode);
1491
1492 if (tem == 0)
1493 abort ();
1494
1495 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1496 }
1497}
1498
1499/* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1500
1501void
1502use_regs (regno, nregs)
1503 int regno;
1504 int nregs;
1505{
1506 int i;
1507
1508 for (i = 0; i < nregs; i++)
1509 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1510}
1511
1512/* Mark the instructions since PREV as a libcall block.
1513 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1514
1515static void
1516group_insns (prev)
1517 rtx prev;
1518{
1519 rtx insn_first;
1520 rtx insn_last;
1521
1522 /* Find the instructions to mark */
1523 if (prev)
1524 insn_first = NEXT_INSN (prev);
1525 else
1526 insn_first = get_insns ();
1527
1528 insn_last = get_last_insn ();
1529
1530 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1531 REG_NOTES (insn_last));
1532
1533 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1534 REG_NOTES (insn_first));
1535}
1536\f
1537/* Write zeros through the storage of OBJECT.
1538 If OBJECT has BLKmode, SIZE is its length in bytes. */
1539
1540void
1541clear_storage (object, size)
1542 rtx object;
1543 int size;
1544{
1545 if (GET_MODE (object) == BLKmode)
1546 {
1547#ifdef TARGET_MEM_FUNCTIONS
1548 emit_library_call (memset_libfunc, 0,
1549 VOIDmode, 3,
1550 XEXP (object, 0), Pmode, const0_rtx, Pmode,
1551 GEN_INT (size), Pmode);
1552#else
1553 emit_library_call (bzero_libfunc, 0,
1554 VOIDmode, 2,
1555 XEXP (object, 0), Pmode,
1556 GEN_INT (size), Pmode);
1557#endif
1558 }
1559 else
1560 emit_move_insn (object, const0_rtx);
1561}
1562
1563/* Generate code to copy Y into X.
1564 Both Y and X must have the same mode, except that
1565 Y can be a constant with VOIDmode.
1566 This mode cannot be BLKmode; use emit_block_move for that.
1567
1568 Return the last instruction emitted. */
1569
1570rtx
1571emit_move_insn (x, y)
1572 rtx x, y;
1573{
1574 enum machine_mode mode = GET_MODE (x);
1575 enum machine_mode submode;
1576 enum mode_class class = GET_MODE_CLASS (mode);
1577 int i;
1578
1579 x = protect_from_queue (x, 1);
1580 y = protect_from_queue (y, 0);
1581
1582 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1583 abort ();
1584
1585 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1586 y = force_const_mem (mode, y);
1587
1588 /* If X or Y are memory references, verify that their addresses are valid
1589 for the machine. */
1590 if (GET_CODE (x) == MEM
1591 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1592 && ! push_operand (x, GET_MODE (x)))
1593 || (flag_force_addr
1594 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1595 x = change_address (x, VOIDmode, XEXP (x, 0));
1596
1597 if (GET_CODE (y) == MEM
1598 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1599 || (flag_force_addr
1600 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1601 y = change_address (y, VOIDmode, XEXP (y, 0));
1602
1603 if (mode == BLKmode)
1604 abort ();
1605
1606 return emit_move_insn_1 (x, y);
1607}
1608
1609/* Low level part of emit_move_insn.
1610 Called just like emit_move_insn, but assumes X and Y
1611 are basically valid. */
1612
1613rtx
1614emit_move_insn_1 (x, y)
1615 rtx x, y;
1616{
1617 enum machine_mode mode = GET_MODE (x);
1618 enum machine_mode submode;
1619 enum mode_class class = GET_MODE_CLASS (mode);
1620 int i;
1621
1622 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1623 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1624 (class == MODE_COMPLEX_INT
1625 ? MODE_INT : MODE_FLOAT),
1626 0);
1627
1628 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1629 return
1630 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1631
1632 /* Expand complex moves by moving real part and imag part, if possible. */
1633 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1634 && submode != BLKmode
1635 && (mov_optab->handlers[(int) submode].insn_code
1636 != CODE_FOR_nothing))
1637 {
1638 /* Don't split destination if it is a stack push. */
1639 int stack = push_operand (x, GET_MODE (x));
1640 rtx prev = get_last_insn ();
1641
1642 /* Tell flow that the whole of the destination is being set. */
1643 if (GET_CODE (x) == REG)
1644 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1645
1646 /* If this is a stack, push the highpart first, so it
1647 will be in the argument order.
1648
1649 In that case, change_address is used only to convert
1650 the mode, not to change the address. */
1651 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1652 ((stack ? change_address (x, submode, (rtx) 0)
1653 : gen_highpart (submode, x)),
1654 gen_highpart (submode, y)));
1655 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1656 ((stack ? change_address (x, submode, (rtx) 0)
1657 : gen_lowpart (submode, x)),
1658 gen_lowpart (submode, y)));
1659
1660 group_insns (prev);
1661
1662 return get_last_insn ();
1663 }
1664
1665 /* This will handle any multi-word mode that lacks a move_insn pattern.
1666 However, you will get better code if you define such patterns,
1667 even if they must turn into multiple assembler instructions. */
1668 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
1669 {
1670 rtx last_insn = 0;
1671 rtx prev_insn = get_last_insn ();
1672
1673 for (i = 0;
1674 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1675 i++)
1676 {
1677 rtx xpart = operand_subword (x, i, 1, mode);
1678 rtx ypart = operand_subword (y, i, 1, mode);
1679
1680 /* If we can't get a part of Y, put Y into memory if it is a
1681 constant. Otherwise, force it into a register. If we still
1682 can't get a part of Y, abort. */
1683 if (ypart == 0 && CONSTANT_P (y))
1684 {
1685 y = force_const_mem (mode, y);
1686 ypart = operand_subword (y, i, 1, mode);
1687 }
1688 else if (ypart == 0)
1689 ypart = operand_subword_force (y, i, mode);
1690
1691 if (xpart == 0 || ypart == 0)
1692 abort ();
1693
1694 last_insn = emit_move_insn (xpart, ypart);
1695 }
1696 /* Mark these insns as a libcall block. */
1697 group_insns (prev_insn);
1698
1699 return last_insn;
1700 }
1701 else
1702 abort ();
1703}
1704\f
1705/* Pushing data onto the stack. */
1706
1707/* Push a block of length SIZE (perhaps variable)
1708 and return an rtx to address the beginning of the block.
1709 Note that it is not possible for the value returned to be a QUEUED.
1710 The value may be virtual_outgoing_args_rtx.
1711
1712 EXTRA is the number of bytes of padding to push in addition to SIZE.
1713 BELOW nonzero means this padding comes at low addresses;
1714 otherwise, the padding comes at high addresses. */
1715
1716rtx
1717push_block (size, extra, below)
1718 rtx size;
1719 int extra, below;
1720{
1721 register rtx temp;
1722 if (CONSTANT_P (size))
1723 anti_adjust_stack (plus_constant (size, extra));
1724 else if (GET_CODE (size) == REG && extra == 0)
1725 anti_adjust_stack (size);
1726 else
1727 {
1728 rtx temp = copy_to_mode_reg (Pmode, size);
1729 if (extra != 0)
1730 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
1731 temp, 0, OPTAB_LIB_WIDEN);
1732 anti_adjust_stack (temp);
1733 }
1734
1735#ifdef STACK_GROWS_DOWNWARD
1736 temp = virtual_outgoing_args_rtx;
1737 if (extra != 0 && below)
1738 temp = plus_constant (temp, extra);
1739#else
1740 if (GET_CODE (size) == CONST_INT)
1741 temp = plus_constant (virtual_outgoing_args_rtx,
1742 - INTVAL (size) - (below ? 0 : extra));
1743 else if (extra != 0 && !below)
1744 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1745 negate_rtx (Pmode, plus_constant (size, extra)));
1746 else
1747 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1748 negate_rtx (Pmode, size));
1749#endif
1750
1751 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1752}
1753
1754rtx
1755gen_push_operand ()
1756{
1757 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1758}
1759
1760/* Generate code to push X onto the stack, assuming it has mode MODE and
1761 type TYPE.
1762 MODE is redundant except when X is a CONST_INT (since they don't
1763 carry mode info).
1764 SIZE is an rtx for the size of data to be copied (in bytes),
1765 needed only if X is BLKmode.
1766
1767 ALIGN (in bytes) is maximum alignment we can assume.
1768
1769 If PARTIAL and REG are both nonzero, then copy that many of the first
1770 words of X into registers starting with REG, and push the rest of X.
1771 The amount of space pushed is decreased by PARTIAL words,
1772 rounded *down* to a multiple of PARM_BOUNDARY.
1773 REG must be a hard register in this case.
1774 If REG is zero but PARTIAL is not, take any all others actions for an
1775 argument partially in registers, but do not actually load any
1776 registers.
1777
1778 EXTRA is the amount in bytes of extra space to leave next to this arg.
1779 This is ignored if an argument block has already been allocated.
1780
1781 On a machine that lacks real push insns, ARGS_ADDR is the address of
1782 the bottom of the argument block for this call. We use indexing off there
1783 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1784 argument block has not been preallocated.
1785
1786 ARGS_SO_FAR is the size of args previously pushed for this call. */
1787
1788void
1789emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1790 args_addr, args_so_far)
1791 register rtx x;
1792 enum machine_mode mode;
1793 tree type;
1794 rtx size;
1795 int align;
1796 int partial;
1797 rtx reg;
1798 int extra;
1799 rtx args_addr;
1800 rtx args_so_far;
1801{
1802 rtx xinner;
1803 enum direction stack_direction
1804#ifdef STACK_GROWS_DOWNWARD
1805 = downward;
1806#else
1807 = upward;
1808#endif
1809
1810 /* Decide where to pad the argument: `downward' for below,
1811 `upward' for above, or `none' for don't pad it.
1812 Default is below for small data on big-endian machines; else above. */
1813 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1814
1815 /* Invert direction if stack is post-update. */
1816 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1817 if (where_pad != none)
1818 where_pad = (where_pad == downward ? upward : downward);
1819
1820 xinner = x = protect_from_queue (x, 0);
1821
1822 if (mode == BLKmode)
1823 {
1824 /* Copy a block into the stack, entirely or partially. */
1825
1826 register rtx temp;
1827 int used = partial * UNITS_PER_WORD;
1828 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1829 int skip;
1830
1831 if (size == 0)
1832 abort ();
1833
1834 used -= offset;
1835
1836 /* USED is now the # of bytes we need not copy to the stack
1837 because registers will take care of them. */
1838
1839 if (partial != 0)
1840 xinner = change_address (xinner, BLKmode,
1841 plus_constant (XEXP (xinner, 0), used));
1842
1843 /* If the partial register-part of the arg counts in its stack size,
1844 skip the part of stack space corresponding to the registers.
1845 Otherwise, start copying to the beginning of the stack space,
1846 by setting SKIP to 0. */
1847#ifndef REG_PARM_STACK_SPACE
1848 skip = 0;
1849#else
1850 skip = used;
1851#endif
1852
1853#ifdef PUSH_ROUNDING
1854 /* Do it with several push insns if that doesn't take lots of insns
1855 and if there is no difficulty with push insns that skip bytes
1856 on the stack for alignment purposes. */
1857 if (args_addr == 0
1858 && GET_CODE (size) == CONST_INT
1859 && skip == 0
1860 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1861 < MOVE_RATIO)
1862 /* Here we avoid the case of a structure whose weak alignment
1863 forces many pushes of a small amount of data,
1864 and such small pushes do rounding that causes trouble. */
1865 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1866 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
1867 || PUSH_ROUNDING (align) == align)
1868 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1869 {
1870 /* Push padding now if padding above and stack grows down,
1871 or if padding below and stack grows up.
1872 But if space already allocated, this has already been done. */
1873 if (extra && args_addr == 0
1874 && where_pad != none && where_pad != stack_direction)
1875 anti_adjust_stack (GEN_INT (extra));
1876
1877 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1878 INTVAL (size) - used, align);
1879 }
1880 else
1881#endif /* PUSH_ROUNDING */
1882 {
1883 /* Otherwise make space on the stack and copy the data
1884 to the address of that space. */
1885
1886 /* Deduct words put into registers from the size we must copy. */
1887 if (partial != 0)
1888 {
1889 if (GET_CODE (size) == CONST_INT)
1890 size = GEN_INT (INTVAL (size) - used);
1891 else
1892 size = expand_binop (GET_MODE (size), sub_optab, size,
1893 GEN_INT (used), NULL_RTX, 0,
1894 OPTAB_LIB_WIDEN);
1895 }
1896
1897 /* Get the address of the stack space.
1898 In this case, we do not deal with EXTRA separately.
1899 A single stack adjust will do. */
1900 if (! args_addr)
1901 {
1902 temp = push_block (size, extra, where_pad == downward);
1903 extra = 0;
1904 }
1905 else if (GET_CODE (args_so_far) == CONST_INT)
1906 temp = memory_address (BLKmode,
1907 plus_constant (args_addr,
1908 skip + INTVAL (args_so_far)));
1909 else
1910 temp = memory_address (BLKmode,
1911 plus_constant (gen_rtx (PLUS, Pmode,
1912 args_addr, args_so_far),
1913 skip));
1914
1915 /* TEMP is the address of the block. Copy the data there. */
1916 if (GET_CODE (size) == CONST_INT
1917 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1918 < MOVE_RATIO))
1919 {
1920 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1921 INTVAL (size), align);
1922 goto ret;
1923 }
1924 /* Try the most limited insn first, because there's no point
1925 including more than one in the machine description unless
1926 the more limited one has some advantage. */
1927#ifdef HAVE_movstrqi
1928 if (HAVE_movstrqi
1929 && GET_CODE (size) == CONST_INT
1930 && ((unsigned) INTVAL (size)
1931 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1932 {
1933 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1934 xinner, size, GEN_INT (align));
1935 if (pat != 0)
1936 {
1937 emit_insn (pat);
1938 goto ret;
1939 }
1940 }
1941#endif
1942#ifdef HAVE_movstrhi
1943 if (HAVE_movstrhi
1944 && GET_CODE (size) == CONST_INT
1945 && ((unsigned) INTVAL (size)
1946 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1947 {
1948 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1949 xinner, size, GEN_INT (align));
1950 if (pat != 0)
1951 {
1952 emit_insn (pat);
1953 goto ret;
1954 }
1955 }
1956#endif
1957#ifdef HAVE_movstrsi
1958 if (HAVE_movstrsi)
1959 {
1960 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1961 xinner, size, GEN_INT (align));
1962 if (pat != 0)
1963 {
1964 emit_insn (pat);
1965 goto ret;
1966 }
1967 }
1968#endif
1969#ifdef HAVE_movstrdi
1970 if (HAVE_movstrdi)
1971 {
1972 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1973 xinner, size, GEN_INT (align));
1974 if (pat != 0)
1975 {
1976 emit_insn (pat);
1977 goto ret;
1978 }
1979 }
1980#endif
1981
1982#ifndef ACCUMULATE_OUTGOING_ARGS
1983 /* If the source is referenced relative to the stack pointer,
1984 copy it to another register to stabilize it. We do not need
1985 to do this if we know that we won't be changing sp. */
1986
1987 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1988 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1989 temp = copy_to_reg (temp);
1990#endif
1991
1992 /* Make inhibit_defer_pop nonzero around the library call
1993 to force it to pop the bcopy-arguments right away. */
1994 NO_DEFER_POP;
1995#ifdef TARGET_MEM_FUNCTIONS
1996 emit_library_call (memcpy_libfunc, 0,
1997 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1998 convert_to_mode (TYPE_MODE (sizetype),
1999 size, TREE_UNSIGNED (sizetype)),
2000 TYPE_MODE (sizetype));
2001#else
2002 emit_library_call (bcopy_libfunc, 0,
2003 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2004 convert_to_mode (TYPE_MODE (sizetype),
2005 size, TREE_UNSIGNED (sizetype)),
2006 TYPE_MODE (sizetype));
2007#endif
2008 OK_DEFER_POP;
2009 }
2010 }
2011 else if (partial > 0)
2012 {
2013 /* Scalar partly in registers. */
2014
2015 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2016 int i;
2017 int not_stack;
2018 /* # words of start of argument
2019 that we must make space for but need not store. */
2020 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2021 int args_offset = INTVAL (args_so_far);
2022 int skip;
2023
2024 /* Push padding now if padding above and stack grows down,
2025 or if padding below and stack grows up.
2026 But if space already allocated, this has already been done. */
2027 if (extra && args_addr == 0
2028 && where_pad != none && where_pad != stack_direction)
2029 anti_adjust_stack (GEN_INT (extra));
2030
2031 /* If we make space by pushing it, we might as well push
2032 the real data. Otherwise, we can leave OFFSET nonzero
2033 and leave the space uninitialized. */
2034 if (args_addr == 0)
2035 offset = 0;
2036
2037 /* Now NOT_STACK gets the number of words that we don't need to
2038 allocate on the stack. */
2039 not_stack = partial - offset;
2040
2041 /* If the partial register-part of the arg counts in its stack size,
2042 skip the part of stack space corresponding to the registers.
2043 Otherwise, start copying to the beginning of the stack space,
2044 by setting SKIP to 0. */
2045#ifndef REG_PARM_STACK_SPACE
2046 skip = 0;
2047#else
2048 skip = not_stack;
2049#endif
2050
2051 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2052 x = validize_mem (force_const_mem (mode, x));
2053
2054 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2055 SUBREGs of such registers are not allowed. */
2056 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2057 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2058 x = copy_to_reg (x);
2059
2060 /* Loop over all the words allocated on the stack for this arg. */
2061 /* We can do it by words, because any scalar bigger than a word
2062 has a size a multiple of a word. */
2063#ifndef PUSH_ARGS_REVERSED
2064 for (i = not_stack; i < size; i++)
2065#else
2066 for (i = size - 1; i >= not_stack; i--)
2067#endif
2068 if (i >= not_stack + offset)
2069 emit_push_insn (operand_subword_force (x, i, mode),
2070 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2071 0, args_addr,
2072 GEN_INT (args_offset + ((i - not_stack + skip)
2073 * UNITS_PER_WORD)));
2074 }
2075 else
2076 {
2077 rtx addr;
2078
2079 /* Push padding now if padding above and stack grows down,
2080 or if padding below and stack grows up.
2081 But if space already allocated, this has already been done. */
2082 if (extra && args_addr == 0
2083 && where_pad != none && where_pad != stack_direction)
2084 anti_adjust_stack (GEN_INT (extra));
2085
2086#ifdef PUSH_ROUNDING
2087 if (args_addr == 0)
2088 addr = gen_push_operand ();
2089 else
2090#endif
2091 if (GET_CODE (args_so_far) == CONST_INT)
2092 addr
2093 = memory_address (mode,
2094 plus_constant (args_addr, INTVAL (args_so_far)));
2095 else
2096 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2097 args_so_far));
2098
2099 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2100 }
2101
2102 ret:
2103 /* If part should go in registers, copy that part
2104 into the appropriate registers. Do this now, at the end,
2105 since mem-to-mem copies above may do function calls. */
2106 if (partial > 0 && reg != 0)
2107 move_block_to_reg (REGNO (reg), x, partial, mode);
2108
2109 if (extra && args_addr == 0 && where_pad == stack_direction)
2110 anti_adjust_stack (GEN_INT (extra));
2111}
2112\f
2113/* Expand an assignment that stores the value of FROM into TO.
2114 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2115 (This may contain a QUEUED rtx.)
2116 Otherwise, the returned value is not meaningful.
2117
2118 SUGGEST_REG is no longer actually used.
2119 It used to mean, copy the value through a register
2120 and return that register, if that is possible.
2121 But now we do this if WANT_VALUE.
2122
2123 If the value stored is a constant, we return the constant. */
2124
2125rtx
2126expand_assignment (to, from, want_value, suggest_reg)
2127 tree to, from;
2128 int want_value;
2129 int suggest_reg;
2130{
2131 register rtx to_rtx = 0;
2132 rtx result;
2133
2134 /* Don't crash if the lhs of the assignment was erroneous. */
2135
2136 if (TREE_CODE (to) == ERROR_MARK)
2137 return expand_expr (from, NULL_RTX, VOIDmode, 0);
2138
2139 /* Assignment of a structure component needs special treatment
2140 if the structure component's rtx is not simply a MEM.
2141 Assignment of an array element at a constant index
2142 has the same problem. */
2143
2144 if (TREE_CODE (to) == COMPONENT_REF
2145 || TREE_CODE (to) == BIT_FIELD_REF
2146 || (TREE_CODE (to) == ARRAY_REF
2147 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2148 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2149 {
2150 enum machine_mode mode1;
2151 int bitsize;
2152 int bitpos;
2153 tree offset;
2154 int unsignedp;
2155 int volatilep = 0;
2156 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
2157 &mode1, &unsignedp, &volatilep);
2158
2159 /* If we are going to use store_bit_field and extract_bit_field,
2160 make sure to_rtx will be safe for multiple use. */
2161
2162 if (mode1 == VOIDmode && want_value)
2163 tem = stabilize_reference (tem);
2164
2165 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
2166 if (offset != 0)
2167 {
2168 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2169
2170 if (GET_CODE (to_rtx) != MEM)
2171 abort ();
2172 to_rtx = change_address (to_rtx, VOIDmode,
2173 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2174 force_reg (Pmode, offset_rtx)));
2175 }
2176 if (volatilep)
2177 {
2178 if (GET_CODE (to_rtx) == MEM)
2179 MEM_VOLATILE_P (to_rtx) = 1;
2180#if 0 /* This was turned off because, when a field is volatile
2181 in an object which is not volatile, the object may be in a register,
2182 and then we would abort over here. */
2183 else
2184 abort ();
2185#endif
2186 }
2187
2188 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2189 (want_value
2190 /* Spurious cast makes HPUX compiler happy. */
2191 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2192 : VOIDmode),
2193 unsignedp,
2194 /* Required alignment of containing datum. */
2195 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2196 int_size_in_bytes (TREE_TYPE (tem)));
2197 preserve_temp_slots (result);
2198 free_temp_slots ();
2199
2200 /* If we aren't returning a result, just pass on what expand_expr
2201 returned; it was probably const0_rtx. Otherwise, convert RESULT
2202 to the proper mode. */
2203 return (want_value ? convert_to_mode (TYPE_MODE (TREE_TYPE (to)), result,
2204 TREE_UNSIGNED (TREE_TYPE (to)))
2205 : result);
2206 }
2207
2208 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2209 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2210
2211 if (to_rtx == 0)
2212 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2213
2214 /* Don't move directly into a return register. */
2215 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2216 {
2217 rtx temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
2218 emit_move_insn (to_rtx, temp);
2219 preserve_temp_slots (to_rtx);
2220 free_temp_slots ();
2221 return to_rtx;
2222 }
2223
2224 /* In case we are returning the contents of an object which overlaps
2225 the place the value is being stored, use a safe function when copying
2226 a value through a pointer into a structure value return block. */
2227 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2228 && current_function_returns_struct
2229 && !current_function_returns_pcc_struct)
2230 {
2231 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
2232 rtx size = expr_size (from);
2233
2234#ifdef TARGET_MEM_FUNCTIONS
2235 emit_library_call (memcpy_libfunc, 0,
2236 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2237 XEXP (from_rtx, 0), Pmode,
2238 convert_to_mode (TYPE_MODE (sizetype),
2239 size, TREE_UNSIGNED (sizetype)),
2240 TYPE_MODE (sizetype));
2241#else
2242 emit_library_call (bcopy_libfunc, 0,
2243 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2244 XEXP (to_rtx, 0), Pmode,
2245 convert_to_mode (TYPE_MODE (sizetype),
2246 size, TREE_UNSIGNED (sizetype)),
2247 TYPE_MODE (sizetype));
2248#endif
2249
2250 preserve_temp_slots (to_rtx);
2251 free_temp_slots ();
2252 return to_rtx;
2253 }
2254
2255 /* Compute FROM and store the value in the rtx we got. */
2256
2257 result = store_expr (from, to_rtx, want_value);
2258 preserve_temp_slots (result);
2259 free_temp_slots ();
2260 return result;
2261}
2262
2263/* Generate code for computing expression EXP,
2264 and storing the value into TARGET.
2265 Returns TARGET or an equivalent value.
2266 TARGET may contain a QUEUED rtx.
2267
2268 If SUGGEST_REG is nonzero, copy the value through a register
2269 and return that register, if that is possible.
2270
2271 If the value stored is a constant, we return the constant. */
2272
2273rtx
2274store_expr (exp, target, suggest_reg)
2275 register tree exp;
2276 register rtx target;
2277 int suggest_reg;
2278{
2279 register rtx temp;
2280 int dont_return_target = 0;
2281
2282 if (TREE_CODE (exp) == COMPOUND_EXPR)
2283 {
2284 /* Perform first part of compound expression, then assign from second
2285 part. */
2286 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2287 emit_queue ();
2288 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2289 }
2290 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2291 {
2292 /* For conditional expression, get safe form of the target. Then
2293 test the condition, doing the appropriate assignment on either
2294 side. This avoids the creation of unnecessary temporaries.
2295 For non-BLKmode, it is more efficient not to do this. */
2296
2297 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2298
2299 emit_queue ();
2300 target = protect_from_queue (target, 1);
2301
2302 NO_DEFER_POP;
2303 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2304 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2305 emit_queue ();
2306 emit_jump_insn (gen_jump (lab2));
2307 emit_barrier ();
2308 emit_label (lab1);
2309 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2310 emit_queue ();
2311 emit_label (lab2);
2312 OK_DEFER_POP;
2313 return target;
2314 }
2315 else if (suggest_reg && GET_CODE (target) == MEM
2316 && GET_MODE (target) != BLKmode)
2317 /* If target is in memory and caller wants value in a register instead,
2318 arrange that. Pass TARGET as target for expand_expr so that,
2319 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2320 We know expand_expr will not use the target in that case. */
2321 {
2322 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
2323 GET_MODE (target), 0);
2324 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2325 temp = copy_to_reg (temp);
2326 dont_return_target = 1;
2327 }
2328 else if (queued_subexp_p (target))
2329 /* If target contains a postincrement, it is not safe
2330 to use as the returned value. It would access the wrong
2331 place by the time the queued increment gets output.
2332 So copy the value through a temporary and use that temp
2333 as the result. */
2334 {
2335 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2336 {
2337 /* Expand EXP into a new pseudo. */
2338 temp = gen_reg_rtx (GET_MODE (target));
2339 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2340 }
2341 else
2342 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
2343 dont_return_target = 1;
2344 }
2345 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2346 /* If this is an scalar in a register that is stored in a wider mode
2347 than the declared mode, compute the result into its declared mode
2348 and then convert to the wider mode. Our value is the computed
2349 expression. */
2350 {
2351 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2352 convert_move (SUBREG_REG (target), temp,
2353 SUBREG_PROMOTED_UNSIGNED_P (target));
2354 return temp;
2355 }
2356 else
2357 {
2358 temp = expand_expr (exp, target, GET_MODE (target), 0);
2359 /* DO return TARGET if it's a specified hardware register.
2360 expand_return relies on this. */
2361 if (!(target && GET_CODE (target) == REG
2362 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2363 && CONSTANT_P (temp))
2364 dont_return_target = 1;
2365 }
2366
2367 /* If value was not generated in the target, store it there.
2368 Convert the value to TARGET's type first if nec. */
2369
2370 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2371 {
2372 target = protect_from_queue (target, 1);
2373 if (GET_MODE (temp) != GET_MODE (target)
2374 && GET_MODE (temp) != VOIDmode)
2375 {
2376 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2377 if (dont_return_target)
2378 {
2379 /* In this case, we will return TEMP,
2380 so make sure it has the proper mode.
2381 But don't forget to store the value into TARGET. */
2382 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2383 emit_move_insn (target, temp);
2384 }
2385 else
2386 convert_move (target, temp, unsignedp);
2387 }
2388
2389 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2390 {
2391 /* Handle copying a string constant into an array.
2392 The string constant may be shorter than the array.
2393 So copy just the string's actual length, and clear the rest. */
2394 rtx size;
2395
2396 /* Get the size of the data type of the string,
2397 which is actually the size of the target. */
2398 size = expr_size (exp);
2399 if (GET_CODE (size) == CONST_INT
2400 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2401 emit_block_move (target, temp, size,
2402 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2403 else
2404 {
2405 /* Compute the size of the data to copy from the string. */
2406 tree copy_size
2407 = size_binop (MIN_EXPR,
2408 size_binop (CEIL_DIV_EXPR,
2409 TYPE_SIZE (TREE_TYPE (exp)),
2410 size_int (BITS_PER_UNIT)),
2411 convert (sizetype,
2412 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
2413 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2414 VOIDmode, 0);
2415 rtx label = 0;
2416
2417 /* Copy that much. */
2418 emit_block_move (target, temp, copy_size_rtx,
2419 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2420
2421 /* Figure out how much is left in TARGET
2422 that we have to clear. */
2423 if (GET_CODE (copy_size_rtx) == CONST_INT)
2424 {
2425 temp = plus_constant (XEXP (target, 0),
2426 TREE_STRING_LENGTH (exp));
2427 size = plus_constant (size,
2428 - TREE_STRING_LENGTH (exp));
2429 }
2430 else
2431 {
2432 enum machine_mode size_mode = Pmode;
2433
2434 temp = force_reg (Pmode, XEXP (target, 0));
2435 temp = expand_binop (size_mode, add_optab, temp,
2436 copy_size_rtx, NULL_RTX, 0,
2437 OPTAB_LIB_WIDEN);
2438
2439 size = expand_binop (size_mode, sub_optab, size,
2440 copy_size_rtx, NULL_RTX, 0,
2441 OPTAB_LIB_WIDEN);
2442
2443 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
2444 GET_MODE (size), 0, 0);
2445 label = gen_label_rtx ();
2446 emit_jump_insn (gen_blt (label));
2447 }
2448
2449 if (size != const0_rtx)
2450 {
2451#ifdef TARGET_MEM_FUNCTIONS
2452 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
2453 temp, Pmode, const0_rtx, Pmode, size, Pmode);
2454#else
2455 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
2456 temp, Pmode, size, Pmode);
2457#endif
2458 }
2459 if (label)
2460 emit_label (label);
2461 }
2462 }
2463 else if (GET_MODE (temp) == BLKmode)
2464 emit_block_move (target, temp, expr_size (exp),
2465 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2466 else
2467 emit_move_insn (target, temp);
2468 }
2469 if (dont_return_target)
2470 return temp;
2471 return target;
2472}
2473\f
2474/* Store the value of constructor EXP into the rtx TARGET.
2475 TARGET is either a REG or a MEM. */
2476
2477static void
2478store_constructor (exp, target)
2479 tree exp;
2480 rtx target;
2481{
2482 tree type = TREE_TYPE (exp);
2483
2484 /* We know our target cannot conflict, since safe_from_p has been called. */
2485#if 0
2486 /* Don't try copying piece by piece into a hard register
2487 since that is vulnerable to being clobbered by EXP.
2488 Instead, construct in a pseudo register and then copy it all. */
2489 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2490 {
2491 rtx temp = gen_reg_rtx (GET_MODE (target));
2492 store_constructor (exp, temp);
2493 emit_move_insn (target, temp);
2494 return;
2495 }
2496#endif
2497
2498 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
2499 {
2500 register tree elt;
2501
2502 /* Inform later passes that the whole union value is dead. */
2503 if (TREE_CODE (type) == UNION_TYPE)
2504 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2505
2506 /* If we are building a static constructor into a register,
2507 set the initial value as zero so we can fold the value into
2508 a constant. */
2509 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2510 emit_move_insn (target, const0_rtx);
2511
2512 /* If the constructor has fewer fields than the structure,
2513 clear the whole structure first. */
2514 else if (list_length (CONSTRUCTOR_ELTS (exp))
2515 != list_length (TYPE_FIELDS (type)))
2516 clear_storage (target, int_size_in_bytes (type));
2517 else
2518 /* Inform later passes that the old value is dead. */
2519 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2520
2521 /* Store each element of the constructor into
2522 the corresponding field of TARGET. */
2523
2524 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2525 {
2526 register tree field = TREE_PURPOSE (elt);
2527 register enum machine_mode mode;
2528 int bitsize;
2529 int bitpos;
2530 int unsignedp;
2531
2532 /* Just ignore missing fields.
2533 We cleared the whole structure, above,
2534 if any fields are missing. */
2535 if (field == 0)
2536 continue;
2537
2538 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2539 unsignedp = TREE_UNSIGNED (field);
2540 mode = DECL_MODE (field);
2541 if (DECL_BIT_FIELD (field))
2542 mode = VOIDmode;
2543
2544 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2545 /* ??? This case remains to be written. */
2546 abort ();
2547
2548 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2549
2550 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2551 /* The alignment of TARGET is
2552 at least what its type requires. */
2553 VOIDmode, 0,
2554 TYPE_ALIGN (type) / BITS_PER_UNIT,
2555 int_size_in_bytes (type));
2556 }
2557 }
2558 else if (TREE_CODE (type) == ARRAY_TYPE)
2559 {
2560 register tree elt;
2561 register int i;
2562 tree domain = TYPE_DOMAIN (type);
2563 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2564 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
2565 tree elttype = TREE_TYPE (type);
2566
2567 /* If the constructor has fewer fields than the structure,
2568 clear the whole structure first. Similarly if this this is
2569 static constructor of a non-BLKmode object. */
2570
2571 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2572 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
2573 clear_storage (target, int_size_in_bytes (type));
2574 else
2575 /* Inform later passes that the old value is dead. */
2576 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2577
2578 /* Store each element of the constructor into
2579 the corresponding element of TARGET, determined
2580 by counting the elements. */
2581 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2582 elt;
2583 elt = TREE_CHAIN (elt), i++)
2584 {
2585 register enum machine_mode mode;
2586 int bitsize;
2587 int bitpos;
2588 int unsignedp;
2589
2590 mode = TYPE_MODE (elttype);
2591 bitsize = GET_MODE_BITSIZE (mode);
2592 unsignedp = TREE_UNSIGNED (elttype);
2593
2594 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2595
2596 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2597 /* The alignment of TARGET is
2598 at least what its type requires. */
2599 VOIDmode, 0,
2600 TYPE_ALIGN (type) / BITS_PER_UNIT,
2601 int_size_in_bytes (type));
2602 }
2603 }
2604
2605 else
2606 abort ();
2607}
2608
2609/* Store the value of EXP (an expression tree)
2610 into a subfield of TARGET which has mode MODE and occupies
2611 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2612 If MODE is VOIDmode, it means that we are storing into a bit-field.
2613
2614 If VALUE_MODE is VOIDmode, return nothing in particular.
2615 UNSIGNEDP is not used in this case.
2616
2617 Otherwise, return an rtx for the value stored. This rtx
2618 has mode VALUE_MODE if that is convenient to do.
2619 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2620
2621 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2622 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2623
2624static rtx
2625store_field (target, bitsize, bitpos, mode, exp, value_mode,
2626 unsignedp, align, total_size)
2627 rtx target;
2628 int bitsize, bitpos;
2629 enum machine_mode mode;
2630 tree exp;
2631 enum machine_mode value_mode;
2632 int unsignedp;
2633 int align;
2634 int total_size;
2635{
2636 HOST_WIDE_INT width_mask = 0;
2637
2638 if (bitsize < HOST_BITS_PER_WIDE_INT)
2639 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
2640
2641 /* If we are storing into an unaligned field of an aligned union that is
2642 in a register, we may have the mode of TARGET being an integer mode but
2643 MODE == BLKmode. In that case, get an aligned object whose size and
2644 alignment are the same as TARGET and store TARGET into it (we can avoid
2645 the store if the field being stored is the entire width of TARGET). Then
2646 call ourselves recursively to store the field into a BLKmode version of
2647 that object. Finally, load from the object into TARGET. This is not
2648 very efficient in general, but should only be slightly more expensive
2649 than the otherwise-required unaligned accesses. Perhaps this can be
2650 cleaned up later. */
2651
2652 if (mode == BLKmode
2653 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2654 {
2655 rtx object = assign_stack_temp (GET_MODE (target),
2656 GET_MODE_SIZE (GET_MODE (target)), 0);
2657 rtx blk_object = copy_rtx (object);
2658
2659 PUT_MODE (blk_object, BLKmode);
2660
2661 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2662 emit_move_insn (object, target);
2663
2664 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2665 align, total_size);
2666
2667 emit_move_insn (target, object);
2668
2669 return target;
2670 }
2671
2672 /* If the structure is in a register or if the component
2673 is a bit field, we cannot use addressing to access it.
2674 Use bit-field techniques or SUBREG to store in it. */
2675
2676 if (mode == VOIDmode
2677 || (mode != BLKmode && ! direct_store[(int) mode])
2678 || GET_CODE (target) == REG
2679 || GET_CODE (target) == SUBREG)
2680 {
2681 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2682 /* Store the value in the bitfield. */
2683 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2684 if (value_mode != VOIDmode)
2685 {
2686 /* The caller wants an rtx for the value. */
2687 /* If possible, avoid refetching from the bitfield itself. */
2688 if (width_mask != 0
2689 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
2690 {
2691 tree count;
2692 enum machine_mode tmode;
2693
2694 if (unsignedp)
2695 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2696 tmode = GET_MODE (temp);
2697 if (tmode == VOIDmode)
2698 tmode = value_mode;
2699 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
2700 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
2701 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
2702 }
2703 return extract_bit_field (target, bitsize, bitpos, unsignedp,
2704 NULL_RTX, value_mode, 0, align,
2705 total_size);
2706 }
2707 return const0_rtx;
2708 }
2709 else
2710 {
2711 rtx addr = XEXP (target, 0);
2712 rtx to_rtx;
2713
2714 /* If a value is wanted, it must be the lhs;
2715 so make the address stable for multiple use. */
2716
2717 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2718 && ! CONSTANT_ADDRESS_P (addr)
2719 /* A frame-pointer reference is already stable. */
2720 && ! (GET_CODE (addr) == PLUS
2721 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2722 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2723 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2724 addr = copy_to_reg (addr);
2725
2726 /* Now build a reference to just the desired component. */
2727
2728 to_rtx = change_address (target, mode,
2729 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2730 MEM_IN_STRUCT_P (to_rtx) = 1;
2731
2732 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2733 }
2734}
2735\f
2736/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2737 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2738 ARRAY_REFs and find the ultimate containing object, which we return.
2739
2740 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2741 bit position, and *PUNSIGNEDP to the signedness of the field.
2742 If the position of the field is variable, we store a tree
2743 giving the variable offset (in units) in *POFFSET.
2744 This offset is in addition to the bit position.
2745 If the position is not variable, we store 0 in *POFFSET.
2746
2747 If any of the extraction expressions is volatile,
2748 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2749
2750 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2751 is a mode that can be used to access the field. In that case, *PBITSIZE
2752 is redundant.
2753
2754 If the field describes a variable-sized object, *PMODE is set to
2755 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2756 this case, but the address of the object can be found. */
2757
2758tree
2759get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
2760 punsignedp, pvolatilep)
2761 tree exp;
2762 int *pbitsize;
2763 int *pbitpos;
2764 tree *poffset;
2765 enum machine_mode *pmode;
2766 int *punsignedp;
2767 int *pvolatilep;
2768{
2769 tree size_tree = 0;
2770 enum machine_mode mode = VOIDmode;
2771 tree offset = integer_zero_node;
2772
2773 if (TREE_CODE (exp) == COMPONENT_REF)
2774 {
2775 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2776 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2777 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2778 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2779 }
2780 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2781 {
2782 size_tree = TREE_OPERAND (exp, 1);
2783 *punsignedp = TREE_UNSIGNED (exp);
2784 }
2785 else
2786 {
2787 mode = TYPE_MODE (TREE_TYPE (exp));
2788 *pbitsize = GET_MODE_BITSIZE (mode);
2789 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2790 }
2791
2792 if (size_tree)
2793 {
2794 if (TREE_CODE (size_tree) != INTEGER_CST)
2795 mode = BLKmode, *pbitsize = -1;
2796 else
2797 *pbitsize = TREE_INT_CST_LOW (size_tree);
2798 }
2799
2800 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2801 and find the ultimate containing object. */
2802
2803 *pbitpos = 0;
2804
2805 while (1)
2806 {
2807 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
2808 {
2809 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2810 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2811 : TREE_OPERAND (exp, 2));
2812
2813 /* If this field hasn't been filled in yet, don't go
2814 past it. This should only happen when folding expressions
2815 made during type construction. */
2816 if (pos == 0)
2817 break;
2818
2819 if (TREE_CODE (pos) == PLUS_EXPR)
2820 {
2821 tree constant, var;
2822 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2823 {
2824 constant = TREE_OPERAND (pos, 0);
2825 var = TREE_OPERAND (pos, 1);
2826 }
2827 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2828 {
2829 constant = TREE_OPERAND (pos, 1);
2830 var = TREE_OPERAND (pos, 0);
2831 }
2832 else
2833 abort ();
2834
2835 *pbitpos += TREE_INT_CST_LOW (constant);
2836 offset = size_binop (PLUS_EXPR, offset,
2837 size_binop (FLOOR_DIV_EXPR, var,
2838 size_int (BITS_PER_UNIT)));
2839 }
2840 else if (TREE_CODE (pos) == INTEGER_CST)
2841 *pbitpos += TREE_INT_CST_LOW (pos);
2842 else
2843 {
2844 /* Assume here that the offset is a multiple of a unit.
2845 If not, there should be an explicitly added constant. */
2846 offset = size_binop (PLUS_EXPR, offset,
2847 size_binop (FLOOR_DIV_EXPR, pos,
2848 size_int (BITS_PER_UNIT)));
2849 }
2850 }
2851
2852 else if (TREE_CODE (exp) == ARRAY_REF)
2853 {
2854 /* This code is based on the code in case ARRAY_REF in expand_expr
2855 below. We assume here that the size of an array element is
2856 always an integral multiple of BITS_PER_UNIT. */
2857
2858 tree index = TREE_OPERAND (exp, 1);
2859 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
2860 tree low_bound
2861 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
2862 tree index_type = TREE_TYPE (index);
2863
2864 if (! integer_zerop (low_bound))
2865 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
2866
2867 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
2868 {
2869 index = convert (type_for_size (POINTER_SIZE, 0), index);
2870 index_type = TREE_TYPE (index);
2871 }
2872
2873 index = fold (build (MULT_EXPR, index_type, index,
2874 TYPE_SIZE (TREE_TYPE (exp))));
2875
2876 if (TREE_CODE (index) == INTEGER_CST
2877 && TREE_INT_CST_HIGH (index) == 0)
2878 *pbitpos += TREE_INT_CST_LOW (index);
2879 else
2880 offset = size_binop (PLUS_EXPR, offset,
2881 size_binop (FLOOR_DIV_EXPR, index,
2882 size_int (BITS_PER_UNIT)));
2883 }
2884 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2885 && ! ((TREE_CODE (exp) == NOP_EXPR
2886 || TREE_CODE (exp) == CONVERT_EXPR)
2887 && (TYPE_MODE (TREE_TYPE (exp))
2888 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2889 break;
2890
2891 /* If any reference in the chain is volatile, the effect is volatile. */
2892 if (TREE_THIS_VOLATILE (exp))
2893 *pvolatilep = 1;
2894 exp = TREE_OPERAND (exp, 0);
2895 }
2896
2897 /* If this was a bit-field, see if there is a mode that allows direct
2898 access in case EXP is in memory. */
2899 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
2900 {
2901 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2902 if (mode == BLKmode)
2903 mode = VOIDmode;
2904 }
2905
2906 if (integer_zerop (offset))
2907 offset = 0;
2908
2909 *pmode = mode;
2910 *poffset = offset;
2911#if 0
2912 /* We aren't finished fixing the callers to really handle nonzero offset. */
2913 if (offset != 0)
2914 abort ();
2915#endif
2916
2917 return exp;
2918}
2919\f
2920/* Given an rtx VALUE that may contain additions and multiplications,
2921 return an equivalent value that just refers to a register or memory.
2922 This is done by generating instructions to perform the arithmetic
2923 and returning a pseudo-register containing the value.
2924
2925 The returned value may be a REG, SUBREG, MEM or constant. */
2926
2927rtx
2928force_operand (value, target)
2929 rtx value, target;
2930{
2931 register optab binoptab = 0;
2932 /* Use a temporary to force order of execution of calls to
2933 `force_operand'. */
2934 rtx tmp;
2935 register rtx op2;
2936 /* Use subtarget as the target for operand 0 of a binary operation. */
2937 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2938
2939 if (GET_CODE (value) == PLUS)
2940 binoptab = add_optab;
2941 else if (GET_CODE (value) == MINUS)
2942 binoptab = sub_optab;
2943 else if (GET_CODE (value) == MULT)
2944 {
2945 op2 = XEXP (value, 1);
2946 if (!CONSTANT_P (op2)
2947 && !(GET_CODE (op2) == REG && op2 != subtarget))
2948 subtarget = 0;
2949 tmp = force_operand (XEXP (value, 0), subtarget);
2950 return expand_mult (GET_MODE (value), tmp,
2951 force_operand (op2, NULL_RTX),
2952 target, 0);
2953 }
2954
2955 if (binoptab)
2956 {
2957 op2 = XEXP (value, 1);
2958 if (!CONSTANT_P (op2)
2959 && !(GET_CODE (op2) == REG && op2 != subtarget))
2960 subtarget = 0;
2961 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2962 {
2963 binoptab = add_optab;
2964 op2 = negate_rtx (GET_MODE (value), op2);
2965 }
2966
2967 /* Check for an addition with OP2 a constant integer and our first
2968 operand a PLUS of a virtual register and something else. In that
2969 case, we want to emit the sum of the virtual register and the
2970 constant first and then add the other value. This allows virtual
2971 register instantiation to simply modify the constant rather than
2972 creating another one around this addition. */
2973 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2974 && GET_CODE (XEXP (value, 0)) == PLUS
2975 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2976 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2977 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2978 {
2979 rtx temp = expand_binop (GET_MODE (value), binoptab,
2980 XEXP (XEXP (value, 0), 0), op2,
2981 subtarget, 0, OPTAB_LIB_WIDEN);
2982 return expand_binop (GET_MODE (value), binoptab, temp,
2983 force_operand (XEXP (XEXP (value, 0), 1), 0),
2984 target, 0, OPTAB_LIB_WIDEN);
2985 }
2986
2987 tmp = force_operand (XEXP (value, 0), subtarget);
2988 return expand_binop (GET_MODE (value), binoptab, tmp,
2989 force_operand (op2, NULL_RTX),
2990 target, 0, OPTAB_LIB_WIDEN);
2991 /* We give UNSIGNEDP = 0 to expand_binop
2992 because the only operations we are expanding here are signed ones. */
2993 }
2994 return value;
2995}
2996\f
2997/* Subroutine of expand_expr:
2998 save the non-copied parts (LIST) of an expr (LHS), and return a list
2999 which can restore these values to their previous values,
3000 should something modify their storage. */
3001
3002static tree
3003save_noncopied_parts (lhs, list)
3004 tree lhs;
3005 tree list;
3006{
3007 tree tail;
3008 tree parts = 0;
3009
3010 for (tail = list; tail; tail = TREE_CHAIN (tail))
3011 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3012 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3013 else
3014 {
3015 tree part = TREE_VALUE (tail);
3016 tree part_type = TREE_TYPE (part);
3017 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
3018 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3019 int_size_in_bytes (part_type), 0);
3020 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
3021 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
3022 parts = tree_cons (to_be_saved,
3023 build (RTL_EXPR, part_type, NULL_TREE,
3024 (tree) target),
3025 parts);
3026 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3027 }
3028 return parts;
3029}
3030
3031/* Subroutine of expand_expr:
3032 record the non-copied parts (LIST) of an expr (LHS), and return a list
3033 which specifies the initial values of these parts. */
3034
3035static tree
3036init_noncopied_parts (lhs, list)
3037 tree lhs;
3038 tree list;
3039{
3040 tree tail;
3041 tree parts = 0;
3042
3043 for (tail = list; tail; tail = TREE_CHAIN (tail))
3044 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3045 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3046 else
3047 {
3048 tree part = TREE_VALUE (tail);
3049 tree part_type = TREE_TYPE (part);
3050 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
3051 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3052 }
3053 return parts;
3054}
3055
3056/* Subroutine of expand_expr: return nonzero iff there is no way that
3057 EXP can reference X, which is being modified. */
3058
3059static int
3060safe_from_p (x, exp)
3061 rtx x;
3062 tree exp;
3063{
3064 rtx exp_rtl = 0;
3065 int i, nops;
3066
3067 if (x == 0)
3068 return 1;
3069
3070 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3071 find the underlying pseudo. */
3072 if (GET_CODE (x) == SUBREG)
3073 {
3074 x = SUBREG_REG (x);
3075 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3076 return 0;
3077 }
3078
3079 /* If X is a location in the outgoing argument area, it is always safe. */
3080 if (GET_CODE (x) == MEM
3081 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3082 || (GET_CODE (XEXP (x, 0)) == PLUS
3083 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3084 return 1;
3085
3086 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3087 {
3088 case 'd':
3089 exp_rtl = DECL_RTL (exp);
3090 break;
3091
3092 case 'c':
3093 return 1;
3094
3095 case 'x':
3096 if (TREE_CODE (exp) == TREE_LIST)
3097 return ((TREE_VALUE (exp) == 0
3098 || safe_from_p (x, TREE_VALUE (exp)))
3099 && (TREE_CHAIN (exp) == 0
3100 || safe_from_p (x, TREE_CHAIN (exp))));
3101 else
3102 return 0;
3103
3104 case '1':
3105 return safe_from_p (x, TREE_OPERAND (exp, 0));
3106
3107 case '2':
3108 case '<':
3109 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3110 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3111
3112 case 'e':
3113 case 'r':
3114 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3115 the expression. If it is set, we conflict iff we are that rtx or
3116 both are in memory. Otherwise, we check all operands of the
3117 expression recursively. */
3118
3119 switch (TREE_CODE (exp))
3120 {
3121 case ADDR_EXPR:
3122 return staticp (TREE_OPERAND (exp, 0));
3123
3124 case INDIRECT_REF:
3125 if (GET_CODE (x) == MEM)
3126 return 0;
3127 break;
3128
3129 case CALL_EXPR:
3130 exp_rtl = CALL_EXPR_RTL (exp);
3131 if (exp_rtl == 0)
3132 {
3133 /* Assume that the call will clobber all hard registers and
3134 all of memory. */
3135 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3136 || GET_CODE (x) == MEM)
3137 return 0;
3138 }
3139
3140 break;
3141
3142 case RTL_EXPR:
3143 exp_rtl = RTL_EXPR_RTL (exp);
3144 if (exp_rtl == 0)
3145 /* We don't know what this can modify. */
3146 return 0;
3147
3148 break;
3149
3150 case WITH_CLEANUP_EXPR:
3151 exp_rtl = RTL_EXPR_RTL (exp);
3152 break;
3153
3154 case SAVE_EXPR:
3155 exp_rtl = SAVE_EXPR_RTL (exp);
3156 break;
3157
3158 case BIND_EXPR:
3159 /* The only operand we look at is operand 1. The rest aren't
3160 part of the expression. */
3161 return safe_from_p (x, TREE_OPERAND (exp, 1));
3162
3163 case METHOD_CALL_EXPR:
3164 /* This takes a rtx argument, but shouldn't appear here. */
3165 abort ();
3166 }
3167
3168 /* If we have an rtx, we do not need to scan our operands. */
3169 if (exp_rtl)
3170 break;
3171
3172 nops = tree_code_length[(int) TREE_CODE (exp)];
3173 for (i = 0; i < nops; i++)
3174 if (TREE_OPERAND (exp, i) != 0
3175 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3176 return 0;
3177 }
3178
3179 /* If we have an rtl, find any enclosed object. Then see if we conflict
3180 with it. */
3181 if (exp_rtl)
3182 {
3183 if (GET_CODE (exp_rtl) == SUBREG)
3184 {
3185 exp_rtl = SUBREG_REG (exp_rtl);
3186 if (GET_CODE (exp_rtl) == REG
3187 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3188 return 0;
3189 }
3190
3191 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3192 are memory and EXP is not readonly. */
3193 return ! (rtx_equal_p (x, exp_rtl)
3194 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3195 && ! TREE_READONLY (exp)));
3196 }
3197
3198 /* If we reach here, it is safe. */
3199 return 1;
3200}
3201
3202/* Subroutine of expand_expr: return nonzero iff EXP is an
3203 expression whose type is statically determinable. */
3204
3205static int
3206fixed_type_p (exp)
3207 tree exp;
3208{
3209 if (TREE_CODE (exp) == PARM_DECL
3210 || TREE_CODE (exp) == VAR_DECL
3211 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3212 || TREE_CODE (exp) == COMPONENT_REF
3213 || TREE_CODE (exp) == ARRAY_REF)
3214 return 1;
3215 return 0;
3216}
3217\f
3218/* expand_expr: generate code for computing expression EXP.
3219 An rtx for the computed value is returned. The value is never null.
3220 In the case of a void EXP, const0_rtx is returned.
3221
3222 The value may be stored in TARGET if TARGET is nonzero.
3223 TARGET is just a suggestion; callers must assume that
3224 the rtx returned may not be the same as TARGET.
3225
3226 If TARGET is CONST0_RTX, it means that the value will be ignored.
3227
3228 If TMODE is not VOIDmode, it suggests generating the
3229 result in mode TMODE. But this is done only when convenient.
3230 Otherwise, TMODE is ignored and the value generated in its natural mode.
3231 TMODE is just a suggestion; callers must assume that
3232 the rtx returned may not have mode TMODE.
3233
3234 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3235 with a constant address even if that address is not normally legitimate.
3236 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3237
3238 If MODIFIER is EXPAND_SUM then when EXP is an addition
3239 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3240 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3241 products as above, or REG or MEM, or constant.
3242 Ordinarily in such cases we would output mul or add instructions
3243 and then return a pseudo reg containing the sum.
3244
3245 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3246 it also marks a label as absolutely required (it can't be dead).
3247 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
3248 This is used for outputting expressions used in initializers. */
3249
3250rtx
3251expand_expr (exp, target, tmode, modifier)
3252 register tree exp;
3253 rtx target;
3254 enum machine_mode tmode;
3255 enum expand_modifier modifier;
3256{
3257 register rtx op0, op1, temp;
3258 tree type = TREE_TYPE (exp);
3259 int unsignedp = TREE_UNSIGNED (type);
3260 register enum machine_mode mode = TYPE_MODE (type);
3261 register enum tree_code code = TREE_CODE (exp);
3262 optab this_optab;
3263 /* Use subtarget as the target for operand 0 of a binary operation. */
3264 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3265 rtx original_target = target;
3266 int ignore = target == const0_rtx;
3267 tree context;
3268
3269 /* Don't use hard regs as subtargets, because the combiner
3270 can only handle pseudo regs. */
3271 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3272 subtarget = 0;
3273 /* Avoid subtargets inside loops,
3274 since they hide some invariant expressions. */
3275 if (preserve_subexpressions_p ())
3276 subtarget = 0;
3277
3278 if (ignore) target = 0, original_target = 0;
3279
3280 /* If will do cse, generate all results into pseudo registers
3281 since 1) that allows cse to find more things
3282 and 2) otherwise cse could produce an insn the machine
3283 cannot support. */
3284
3285 if (! cse_not_expected && mode != BLKmode && target
3286 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3287 target = subtarget;
3288
3289 /* Ensure we reference a volatile object even if value is ignored. */
3290 if (ignore && TREE_THIS_VOLATILE (exp)
3291 && TREE_CODE (exp) != FUNCTION_DECL
3292 && mode != VOIDmode && mode != BLKmode)
3293 {
3294 target = gen_reg_rtx (mode);
3295 temp = expand_expr (exp, target, VOIDmode, modifier);
3296 if (temp != target)
3297 emit_move_insn (target, temp);
3298 return target;
3299 }
3300
3301 switch (code)
3302 {
3303 case LABEL_DECL:
3304 {
3305 tree function = decl_function_context (exp);
3306 /* Handle using a label in a containing function. */
3307 if (function != current_function_decl && function != 0)
3308 {
3309 struct function *p = find_function_data (function);
3310 /* Allocate in the memory associated with the function
3311 that the label is in. */
3312 push_obstacks (p->function_obstack,
3313 p->function_maybepermanent_obstack);
3314
3315 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3316 label_rtx (exp), p->forced_labels);
3317 pop_obstacks ();
3318 }
3319 else if (modifier == EXPAND_INITIALIZER)
3320 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3321 label_rtx (exp), forced_labels);
3322 temp = gen_rtx (MEM, FUNCTION_MODE,
3323 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
3324 if (function != current_function_decl && function != 0)
3325 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3326 return temp;
3327 }
3328
3329 case PARM_DECL:
3330 if (DECL_RTL (exp) == 0)
3331 {
3332 error_with_decl (exp, "prior parameter's size depends on `%s'");
3333 return CONST0_RTX (mode);
3334 }
3335
3336 case FUNCTION_DECL:
3337 case VAR_DECL:
3338 case RESULT_DECL:
3339 if (DECL_RTL (exp) == 0)
3340 abort ();
3341 /* Ensure variable marked as used
3342 even if it doesn't go through a parser. */
3343 TREE_USED (exp) = 1;
3344 /* Handle variables inherited from containing functions. */
3345 context = decl_function_context (exp);
3346
3347 /* We treat inline_function_decl as an alias for the current function
3348 because that is the inline function whose vars, types, etc.
3349 are being merged into the current function.
3350 See expand_inline_function. */
3351 if (context != 0 && context != current_function_decl
3352 && context != inline_function_decl
3353 /* If var is static, we don't need a static chain to access it. */
3354 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3355 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3356 {
3357 rtx addr;
3358
3359 /* Mark as non-local and addressable. */
3360 DECL_NONLOCAL (exp) = 1;
3361 mark_addressable (exp);
3362 if (GET_CODE (DECL_RTL (exp)) != MEM)
3363 abort ();
3364 addr = XEXP (DECL_RTL (exp), 0);
3365 if (GET_CODE (addr) == MEM)
3366 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3367 else
3368 addr = fix_lexical_addr (addr, exp);
3369 return change_address (DECL_RTL (exp), mode, addr);
3370 }
3371
3372 /* This is the case of an array whose size is to be determined
3373 from its initializer, while the initializer is still being parsed.
3374 See expand_decl. */
3375 if (GET_CODE (DECL_RTL (exp)) == MEM
3376 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3377 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3378 XEXP (DECL_RTL (exp), 0));
3379 if (GET_CODE (DECL_RTL (exp)) == MEM
3380 && modifier != EXPAND_CONST_ADDRESS
3381 && modifier != EXPAND_SUM
3382 && modifier != EXPAND_INITIALIZER)
3383 {
3384 /* DECL_RTL probably contains a constant address.
3385 On RISC machines where a constant address isn't valid,
3386 make some insns to get that address into a register. */
3387 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3388 || (flag_force_addr
3389 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3390 return change_address (DECL_RTL (exp), VOIDmode,
3391 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3392 }
3393
3394 /* If the mode of DECL_RTL does not match that of the decl, it
3395 must be a promoted value. We return a SUBREG of the wanted mode,
3396 but mark it so that we know that it was already extended. */
3397
3398 if (GET_CODE (DECL_RTL (exp)) == REG
3399 && GET_MODE (DECL_RTL (exp)) != mode)
3400 {
3401 enum machine_mode decl_mode = DECL_MODE (exp);
3402
3403 /* Get the signedness used for this variable. Ensure we get the
3404 same mode we got when the variable was declared. */
3405
3406 PROMOTE_MODE (decl_mode, unsignedp, type);
3407
3408 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3409 abort ();
3410
3411 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3412 SUBREG_PROMOTED_VAR_P (temp) = 1;
3413 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3414 return temp;
3415 }
3416
3417 return DECL_RTL (exp);
3418
3419 case INTEGER_CST:
3420 return immed_double_const (TREE_INT_CST_LOW (exp),
3421 TREE_INT_CST_HIGH (exp),
3422 mode);
3423
3424 case CONST_DECL:
3425 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3426
3427 case REAL_CST:
3428 /* If optimized, generate immediate CONST_DOUBLE
3429 which will be turned into memory by reload if necessary.
3430
3431 We used to force a register so that loop.c could see it. But
3432 this does not allow gen_* patterns to perform optimizations with
3433 the constants. It also produces two insns in cases like "x = 1.0;".
3434 On most machines, floating-point constants are not permitted in
3435 many insns, so we'd end up copying it to a register in any case.
3436
3437 Now, we do the copying in expand_binop, if appropriate. */
3438 return immed_real_const (exp);
3439
3440 case COMPLEX_CST:
3441 case STRING_CST:
3442 if (! TREE_CST_RTL (exp))
3443 output_constant_def (exp);
3444
3445 /* TREE_CST_RTL probably contains a constant address.
3446 On RISC machines where a constant address isn't valid,
3447 make some insns to get that address into a register. */
3448 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3449 && modifier != EXPAND_CONST_ADDRESS
3450 && modifier != EXPAND_INITIALIZER
3451 && modifier != EXPAND_SUM
3452 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3453 return change_address (TREE_CST_RTL (exp), VOIDmode,
3454 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3455 return TREE_CST_RTL (exp);
3456
3457 case SAVE_EXPR:
3458 context = decl_function_context (exp);
3459 /* We treat inline_function_decl as an alias for the current function
3460 because that is the inline function whose vars, types, etc.
3461 are being merged into the current function.
3462 See expand_inline_function. */
3463 if (context == current_function_decl || context == inline_function_decl)
3464 context = 0;
3465
3466 /* If this is non-local, handle it. */
3467 if (context)
3468 {
3469 temp = SAVE_EXPR_RTL (exp);
3470 if (temp && GET_CODE (temp) == REG)
3471 {
3472 put_var_into_stack (exp);
3473 temp = SAVE_EXPR_RTL (exp);
3474 }
3475 if (temp == 0 || GET_CODE (temp) != MEM)
3476 abort ();
3477 return change_address (temp, mode,
3478 fix_lexical_addr (XEXP (temp, 0), exp));
3479 }
3480 if (SAVE_EXPR_RTL (exp) == 0)
3481 {
3482 if (mode == BLKmode)
3483 temp
3484 = assign_stack_temp (mode,
3485 int_size_in_bytes (TREE_TYPE (exp)), 0);
3486 else
3487 {
3488 enum machine_mode var_mode = mode;
3489
3490 if (TREE_CODE (type) == INTEGER_TYPE
3491 || TREE_CODE (type) == ENUMERAL_TYPE
3492 || TREE_CODE (type) == BOOLEAN_TYPE
3493 || TREE_CODE (type) == CHAR_TYPE
3494 || TREE_CODE (type) == REAL_TYPE
3495 || TREE_CODE (type) == POINTER_TYPE
3496 || TREE_CODE (type) == OFFSET_TYPE)
3497 {
3498 PROMOTE_MODE (var_mode, unsignedp, type);
3499 }
3500
3501 temp = gen_reg_rtx (var_mode);
3502 }
3503
3504 SAVE_EXPR_RTL (exp) = temp;
3505 if (!optimize && GET_CODE (temp) == REG)
3506 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3507 save_expr_regs);
3508
3509 /* If the mode of TEMP does not match that of the expression, it
3510 must be a promoted value. We pass store_expr a SUBREG of the
3511 wanted mode but mark it so that we know that it was already
3512 extended. Note that `unsignedp' was modified above in
3513 this case. */
3514
3515 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
3516 {
3517 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3518 SUBREG_PROMOTED_VAR_P (temp) = 1;
3519 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3520 }
3521
3522 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3523 }
3524
3525 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3526 must be a promoted value. We return a SUBREG of the wanted mode,
3527 but mark it so that we know that it was already extended. Note
3528 that `unsignedp' was modified above in this case. */
3529
3530 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3531 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3532 {
3533 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3534 SUBREG_PROMOTED_VAR_P (temp) = 1;
3535 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3536 return temp;
3537 }
3538
3539 return SAVE_EXPR_RTL (exp);
3540
3541 case EXIT_EXPR:
3542 /* Exit the current loop if the body-expression is true. */
3543 {
3544 rtx label = gen_label_rtx ();
3545 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3546 expand_exit_loop (NULL_PTR);
3547 emit_label (label);
3548 }
3549 return const0_rtx;
3550
3551 case LOOP_EXPR:
3552 expand_start_loop (1);
3553 expand_expr_stmt (TREE_OPERAND (exp, 0));
3554 expand_end_loop ();
3555
3556 return const0_rtx;
3557
3558 case BIND_EXPR:
3559 {
3560 tree vars = TREE_OPERAND (exp, 0);
3561 int vars_need_expansion = 0;
3562
3563 /* Need to open a binding contour here because
3564 if there are any cleanups they most be contained here. */
3565 expand_start_bindings (0);
3566
3567 /* Mark the corresponding BLOCK for output in its proper place. */
3568 if (TREE_OPERAND (exp, 2) != 0
3569 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3570 insert_block (TREE_OPERAND (exp, 2));
3571
3572 /* If VARS have not yet been expanded, expand them now. */
3573 while (vars)
3574 {
3575 if (DECL_RTL (vars) == 0)
3576 {
3577 vars_need_expansion = 1;
3578 expand_decl (vars);
3579 }
3580 expand_decl_init (vars);
3581 vars = TREE_CHAIN (vars);
3582 }
3583
3584 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3585
3586 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3587
3588 return temp;
3589 }
3590
3591 case RTL_EXPR:
3592 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3593 abort ();
3594 emit_insns (RTL_EXPR_SEQUENCE (exp));
3595 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3596 return RTL_EXPR_RTL (exp);
3597
3598 case CONSTRUCTOR:
3599 /* All elts simple constants => refer to a constant in memory. But
3600 if this is a non-BLKmode mode, let it store a field at a time
3601 since that should make a CONST_INT or CONST_DOUBLE when we
3602 fold. */
3603 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3604 {
3605 rtx constructor = output_constant_def (exp);
3606 if (modifier != EXPAND_CONST_ADDRESS
3607 && modifier != EXPAND_INITIALIZER
3608 && modifier != EXPAND_SUM
3609 && !memory_address_p (GET_MODE (constructor),
3610 XEXP (constructor, 0)))
3611 constructor = change_address (constructor, VOIDmode,
3612 XEXP (constructor, 0));
3613 return constructor;
3614 }
3615
3616 if (ignore)
3617 {
3618 tree elt;
3619 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3620 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3621 return const0_rtx;
3622 }
3623 else
3624 {
3625 if (target == 0 || ! safe_from_p (target, exp))
3626 {
3627 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3628 target = gen_reg_rtx (mode);
3629 else
3630 {
3631 enum tree_code c = TREE_CODE (type);
3632 target
3633 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3634 if (c == RECORD_TYPE || c == UNION_TYPE
3635 || c == QUAL_UNION_TYPE || c == ARRAY_TYPE)
3636 MEM_IN_STRUCT_P (target) = 1;
3637 }
3638 }
3639 store_constructor (exp, target);
3640 return target;
3641 }
3642
3643 case INDIRECT_REF:
3644 {
3645 tree exp1 = TREE_OPERAND (exp, 0);
3646 tree exp2;
3647
3648 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3649 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3650 This code has the same general effect as simply doing
3651 expand_expr on the save expr, except that the expression PTR
3652 is computed for use as a memory address. This means different
3653 code, suitable for indexing, may be generated. */
3654 if (TREE_CODE (exp1) == SAVE_EXPR
3655 && SAVE_EXPR_RTL (exp1) == 0
3656 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3657 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3658 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3659 {
3660 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3661 VOIDmode, EXPAND_SUM);
3662 op0 = memory_address (mode, temp);
3663 op0 = copy_all_regs (op0);
3664 SAVE_EXPR_RTL (exp1) = op0;
3665 }
3666 else
3667 {
3668 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
3669 op0 = memory_address (mode, op0);
3670 }
3671
3672 temp = gen_rtx (MEM, mode, op0);
3673 /* If address was computed by addition,
3674 mark this as an element of an aggregate. */
3675 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3676 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3677 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3678 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3679 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3680 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3681 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
3682 || (TREE_CODE (exp1) == ADDR_EXPR
3683 && (exp2 = TREE_OPERAND (exp1, 0))
3684 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3685 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3686 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE
3687 || TREE_CODE (TREE_TYPE (exp2)) == QUAL_UNION_TYPE)))
3688 MEM_IN_STRUCT_P (temp) = 1;
3689 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
3690#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
3691 a location is accessed through a pointer to const does not mean
3692 that the value there can never change. */
3693 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3694#endif
3695 return temp;
3696 }
3697
3698 case ARRAY_REF:
3699 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
3700 abort ();
3701
3702 {
3703 tree array = TREE_OPERAND (exp, 0);
3704 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
3705 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3706 tree index = TREE_OPERAND (exp, 1);
3707 tree index_type = TREE_TYPE (index);
3708 int i;
3709
3710 /* Optimize the special-case of a zero lower bound. */
3711 if (! integer_zerop (low_bound))
3712 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3713
3714 if (TREE_CODE (index) != INTEGER_CST
3715 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3716 {
3717 /* Nonconstant array index or nonconstant element size.
3718 Generate the tree for *(&array+index) and expand that,
3719 except do it in a language-independent way
3720 and don't complain about non-lvalue arrays.
3721 `mark_addressable' should already have been called
3722 for any array for which this case will be reached. */
3723
3724 /* Don't forget the const or volatile flag from the array
3725 element. */
3726 tree variant_type = build_type_variant (type,
3727 TREE_READONLY (exp),
3728 TREE_THIS_VOLATILE (exp));
3729 tree array_adr = build1 (ADDR_EXPR,
3730 build_pointer_type (variant_type), array);
3731 tree elt;
3732
3733 /* Convert the integer argument to a type the same size as a
3734 pointer so the multiply won't overflow spuriously. */
3735 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3736 index = convert (type_for_size (POINTER_SIZE, 0), index);
3737
3738 /* Don't think the address has side effects
3739 just because the array does.
3740 (In some cases the address might have side effects,
3741 and we fail to record that fact here. However, it should not
3742 matter, since expand_expr should not care.) */
3743 TREE_SIDE_EFFECTS (array_adr) = 0;
3744
3745 elt = build1 (INDIRECT_REF, type,
3746 fold (build (PLUS_EXPR,
3747 TYPE_POINTER_TO (variant_type),
3748 array_adr,
3749 fold (build (MULT_EXPR,
3750 TYPE_POINTER_TO (variant_type),
3751 index,
3752 size_in_bytes (type))))));
3753
3754 /* Volatility, etc., of new expression is same as old
3755 expression. */
3756 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3757 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3758 TREE_READONLY (elt) = TREE_READONLY (exp);
3759
3760 return expand_expr (elt, target, tmode, modifier);
3761 }
3762
3763 /* Fold an expression like: "foo"[2].
3764 This is not done in fold so it won't happen inside &. */
3765
3766 if (TREE_CODE (array) == STRING_CST
3767 && TREE_CODE (index) == INTEGER_CST
3768 && !TREE_INT_CST_HIGH (index)
3769 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array))
3770 {
3771 if (TREE_TYPE (TREE_TYPE (array)) == integer_type_node)
3772 {
3773 exp = build_int_2 (((int *)TREE_STRING_POINTER (array))[i], 0);
3774 TREE_TYPE (exp) = integer_type_node;
3775 return expand_expr (exp, target, tmode, modifier);
3776 }
3777 if (TREE_TYPE (TREE_TYPE (array)) == char_type_node)
3778 {
3779 exp = build_int_2 (TREE_STRING_POINTER (array)[i], 0);
3780 TREE_TYPE (exp) = integer_type_node;
3781 return expand_expr (convert (TREE_TYPE (TREE_TYPE (array)),
3782 exp),
3783 target, tmode, modifier);
3784 }
3785 }
3786
3787 /* If this is a constant index into a constant array,
3788 just get the value from the array. Handle both the cases when
3789 we have an explicit constructor and when our operand is a variable
3790 that was declared const. */
3791
3792 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
3793 {
3794 if (TREE_CODE (index) == INTEGER_CST
3795 && TREE_INT_CST_HIGH (index) == 0)
3796 {
3797 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3798
3799 i = TREE_INT_CST_LOW (index);
3800 while (elem && i--)
3801 elem = TREE_CHAIN (elem);
3802 if (elem)
3803 return expand_expr (fold (TREE_VALUE (elem)), target,
3804 tmode, modifier);
3805 }
3806 }
3807
3808 else if (optimize >= 1
3809 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
3810 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
3811 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
3812 {
3813 if (TREE_CODE (index) == INTEGER_CST
3814 && TREE_INT_CST_HIGH (index) == 0)
3815 {
3816 tree init = DECL_INITIAL (array);
3817
3818 i = TREE_INT_CST_LOW (index);
3819 if (TREE_CODE (init) == CONSTRUCTOR)
3820 {
3821 tree elem = CONSTRUCTOR_ELTS (init);
3822
3823 while (elem && i--)
3824 elem = TREE_CHAIN (elem);
3825 if (elem)
3826 return expand_expr (fold (TREE_VALUE (elem)), target,
3827 tmode, modifier);
3828 }
3829 else if (TREE_CODE (init) == STRING_CST
3830 && i < TREE_STRING_LENGTH (init))
3831 {
3832 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3833 return convert_to_mode (mode, temp, 0);
3834 }
3835 }
3836 }
3837 }
3838
3839 /* Treat array-ref with constant index as a component-ref. */
3840
3841 case COMPONENT_REF:
3842 case BIT_FIELD_REF:
3843 /* If the operand is a CONSTRUCTOR, we can just extract the
3844 appropriate field if it is present. */
3845 if (code != ARRAY_REF
3846 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3847 {
3848 tree elt;
3849
3850 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3851 elt = TREE_CHAIN (elt))
3852 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3853 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3854 }
3855
3856 {
3857 enum machine_mode mode1;
3858 int bitsize;
3859 int bitpos;
3860 tree offset;
3861 int volatilep = 0;
3862 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
3863 &mode1, &unsignedp, &volatilep);
3864
3865 /* If we got back the original object, something is wrong. Perhaps
3866 we are evaluating an expression too early. In any event, don't
3867 infinitely recurse. */
3868 if (tem == exp)
3869 abort ();
3870
3871 /* In some cases, we will be offsetting OP0's address by a constant.
3872 So get it as a sum, if possible. If we will be using it
3873 directly in an insn, we validate it. */
3874 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
3875
3876 /* If this is a constant, put it into a register if it is a
3877 legitimate constant and memory if it isn't. */
3878 if (CONSTANT_P (op0))
3879 {
3880 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3881 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
3882 op0 = force_reg (mode, op0);
3883 else
3884 op0 = validize_mem (force_const_mem (mode, op0));
3885 }
3886
3887 if (offset != 0)
3888 {
3889 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3890
3891 if (GET_CODE (op0) != MEM)
3892 abort ();
3893 op0 = change_address (op0, VOIDmode,
3894 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3895 force_reg (Pmode, offset_rtx)));
3896 }
3897
3898 /* Don't forget about volatility even if this is a bitfield. */
3899 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3900 {
3901 op0 = copy_rtx (op0);
3902 MEM_VOLATILE_P (op0) = 1;
3903 }
3904
3905 if (mode1 == VOIDmode
3906 || (mode1 != BLKmode && ! direct_load[(int) mode1]
3907 && modifier != EXPAND_CONST_ADDRESS
3908 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
3909 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3910 {
3911 /* In cases where an aligned union has an unaligned object
3912 as a field, we might be extracting a BLKmode value from
3913 an integer-mode (e.g., SImode) object. Handle this case
3914 by doing the extract into an object as wide as the field
3915 (which we know to be the width of a basic mode), then
3916 storing into memory, and changing the mode to BLKmode. */
3917 enum machine_mode ext_mode = mode;
3918
3919 if (ext_mode == BLKmode)
3920 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3921
3922 if (ext_mode == BLKmode)
3923 abort ();
3924
3925 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3926 unsignedp, target, ext_mode, ext_mode,
3927 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3928 int_size_in_bytes (TREE_TYPE (tem)));
3929 if (mode == BLKmode)
3930 {
3931 rtx new = assign_stack_temp (ext_mode,
3932 bitsize / BITS_PER_UNIT, 0);
3933
3934 emit_move_insn (new, op0);
3935 op0 = copy_rtx (new);
3936 PUT_MODE (op0, BLKmode);
3937 }
3938
3939 return op0;
3940 }
3941
3942 /* Get a reference to just this component. */
3943 if (modifier == EXPAND_CONST_ADDRESS
3944 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3945 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3946 (bitpos / BITS_PER_UNIT)));
3947 else
3948 op0 = change_address (op0, mode1,
3949 plus_constant (XEXP (op0, 0),
3950 (bitpos / BITS_PER_UNIT)));
3951 MEM_IN_STRUCT_P (op0) = 1;
3952 MEM_VOLATILE_P (op0) |= volatilep;
3953 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3954 return op0;
3955 if (target == 0)
3956 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3957 convert_move (target, op0, unsignedp);
3958 return target;
3959 }
3960
3961 case OFFSET_REF:
3962 {
3963 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
3964 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
3965 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
3966 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3967 MEM_IN_STRUCT_P (temp) = 1;
3968 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
3969#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
3970 a location is accessed through a pointer to const does not mean
3971 that the value there can never change. */
3972 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3973#endif
3974 return temp;
3975 }
3976
3977 /* Intended for a reference to a buffer of a file-object in Pascal.
3978 But it's not certain that a special tree code will really be
3979 necessary for these. INDIRECT_REF might work for them. */
3980 case BUFFER_REF:
3981 abort ();
3982
3983 /* IN_EXPR: Inlined pascal set IN expression.
3984
3985 Algorithm:
3986 rlo = set_low - (set_low%bits_per_word);
3987 the_word = set [ (index - rlo)/bits_per_word ];
3988 bit_index = index % bits_per_word;
3989 bitmask = 1 << bit_index;
3990 return !!(the_word & bitmask); */
3991 case IN_EXPR:
3992 preexpand_calls (exp);
3993 {
3994 tree set = TREE_OPERAND (exp, 0);
3995 tree index = TREE_OPERAND (exp, 1);
3996 tree set_type = TREE_TYPE (set);
3997
3998 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
3999 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4000
4001 rtx index_val;
4002 rtx lo_r;
4003 rtx hi_r;
4004 rtx rlow;
4005 rtx diff, quo, rem, addr, bit, result;
4006 rtx setval, setaddr;
4007 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4008
4009 if (target == 0)
4010 target = gen_reg_rtx (mode);
4011
4012 /* If domain is empty, answer is no. */
4013 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4014 return const0_rtx;
4015
4016 index_val = expand_expr (index, 0, VOIDmode, 0);
4017 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4018 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4019 setval = expand_expr (set, 0, VOIDmode, 0);
4020 setaddr = XEXP (setval, 0);
4021
4022 /* Compare index against bounds, if they are constant. */
4023 if (GET_CODE (index_val) == CONST_INT
4024 && GET_CODE (lo_r) == CONST_INT
4025 && INTVAL (index_val) < INTVAL (lo_r))
4026 return const0_rtx;
4027
4028 if (GET_CODE (index_val) == CONST_INT
4029 && GET_CODE (hi_r) == CONST_INT
4030 && INTVAL (hi_r) < INTVAL (index_val))
4031 return const0_rtx;
4032
4033 /* If we get here, we have to generate the code for both cases
4034 (in range and out of range). */
4035
4036 op0 = gen_label_rtx ();
4037 op1 = gen_label_rtx ();
4038
4039 if (! (GET_CODE (index_val) == CONST_INT
4040 && GET_CODE (lo_r) == CONST_INT))
4041 {
4042 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4043 GET_MODE (index_val), 0, 0);
4044 emit_jump_insn (gen_blt (op1));
4045 }
4046
4047 if (! (GET_CODE (index_val) == CONST_INT
4048 && GET_CODE (hi_r) == CONST_INT))
4049 {
4050 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4051 GET_MODE (index_val), 0, 0);
4052 emit_jump_insn (gen_bgt (op1));
4053 }
4054
4055 /* Calculate the element number of bit zero in the first word
4056 of the set. */
4057 if (GET_CODE (lo_r) == CONST_INT)
4058 rlow = GEN_INT (INTVAL (lo_r)
4059 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
4060 else
4061 rlow = expand_binop (index_mode, and_optab, lo_r,
4062 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4063 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4064
4065 diff = expand_binop (index_mode, sub_optab,
4066 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
4067
4068 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4069 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4070 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4071 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
4072 addr = memory_address (byte_mode,
4073 expand_binop (index_mode, add_optab,
4074 diff, setaddr, NULL_RTX, 0,
4075 OPTAB_LIB_WIDEN));
4076 /* Extract the bit we want to examine */
4077 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4078 gen_rtx (MEM, byte_mode, addr),
4079 make_tree (TREE_TYPE (index), rem),
4080 NULL_RTX, 1);
4081 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4082 GET_MODE (target) == byte_mode ? target : 0,
4083 1, OPTAB_LIB_WIDEN);
4084
4085 if (result != target)
4086 convert_move (target, result, 1);
4087
4088 /* Output the code to handle the out-of-range case. */
4089 emit_jump (op0);
4090 emit_label (op1);
4091 emit_move_insn (target, const0_rtx);
4092 emit_label (op0);
4093 return target;
4094 }
4095
4096 case WITH_CLEANUP_EXPR:
4097 if (RTL_EXPR_RTL (exp) == 0)
4098 {
4099 RTL_EXPR_RTL (exp)
4100 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
4101 cleanups_this_call
4102 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
4103 /* That's it for this cleanup. */
4104 TREE_OPERAND (exp, 2) = 0;
4105 }
4106 return RTL_EXPR_RTL (exp);
4107
4108 case CALL_EXPR:
4109 /* Check for a built-in function. */
4110 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4111 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4112 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4113 return expand_builtin (exp, target, subtarget, tmode, ignore);
4114 /* If this call was expanded already by preexpand_calls,
4115 just return the result we got. */
4116 if (CALL_EXPR_RTL (exp) != 0)
4117 return CALL_EXPR_RTL (exp);
4118 return expand_call (exp, target, ignore);
4119
4120 case NON_LVALUE_EXPR:
4121 case NOP_EXPR:
4122 case CONVERT_EXPR:
4123 case REFERENCE_EXPR:
4124 if (TREE_CODE (type) == VOID_TYPE || ignore)
4125 {
4126 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4127 return const0_rtx;
4128 }
4129 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4130 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4131 if (TREE_CODE (type) == UNION_TYPE)
4132 {
4133 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4134 if (target == 0)
4135 {
4136 if (mode == BLKmode)
4137 {
4138 if (TYPE_SIZE (type) == 0
4139 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4140 abort ();
4141 target = assign_stack_temp (BLKmode,
4142 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4143 + BITS_PER_UNIT - 1)
4144 / BITS_PER_UNIT, 0);
4145 }
4146 else
4147 target = gen_reg_rtx (mode);
4148 }
4149 if (GET_CODE (target) == MEM)
4150 /* Store data into beginning of memory target. */
4151 store_expr (TREE_OPERAND (exp, 0),
4152 change_address (target, TYPE_MODE (valtype), 0), 0);
4153
4154 else if (GET_CODE (target) == REG)
4155 /* Store this field into a union of the proper type. */
4156 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4157 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4158 VOIDmode, 0, 1,
4159 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4160 else
4161 abort ();
4162
4163 /* Return the entire union. */
4164 return target;
4165 }
4166 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
4167 if (GET_MODE (op0) == mode)
4168 return op0;
4169 /* If arg is a constant integer being extended from a narrower mode,
4170 we must really truncate to get the extended bits right. Otherwise
4171 (unsigned long) (unsigned char) ("\377"[0])
4172 would come out as ffffffff. */
4173 if (GET_MODE (op0) == VOIDmode
4174 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4175 < GET_MODE_BITSIZE (mode)))
4176 {
4177 /* MODE must be narrower than HOST_BITS_PER_INT. */
4178 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4179
4180 if (width < HOST_BITS_PER_WIDE_INT)
4181 {
4182 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4183 : CONST_DOUBLE_LOW (op0));
4184 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4185 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4186 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4187 else
4188 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4189
4190 op0 = GEN_INT (val);
4191 }
4192 else
4193 {
4194 op0 = (simplify_unary_operation
4195 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4196 ? ZERO_EXTEND : SIGN_EXTEND),
4197 mode, op0,
4198 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4199 if (op0 == 0)
4200 abort ();
4201 }
4202 }
4203 if (GET_MODE (op0) == VOIDmode)
4204 return op0;
4205 if (modifier == EXPAND_INITIALIZER)
4206 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
4207 if (flag_force_mem && GET_CODE (op0) == MEM)
4208 op0 = copy_to_reg (op0);
4209
4210 if (target == 0)
4211 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4212 else
4213 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4214 return target;
4215
4216 case PLUS_EXPR:
4217 /* We come here from MINUS_EXPR when the second operand is a constant. */
4218 plus_expr:
4219 this_optab = add_optab;
4220
4221 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4222 something else, make sure we add the register to the constant and
4223 then to the other thing. This case can occur during strength
4224 reduction and doing it this way will produce better code if the
4225 frame pointer or argument pointer is eliminated.
4226
4227 fold-const.c will ensure that the constant is always in the inner
4228 PLUS_EXPR, so the only case we need to do anything about is if
4229 sp, ap, or fp is our second argument, in which case we must swap
4230 the innermost first argument and our second argument. */
4231
4232 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4233 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4234 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4235 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4236 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4237 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4238 {
4239 tree t = TREE_OPERAND (exp, 1);
4240
4241 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4242 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4243 }
4244
4245 /* If the result is to be Pmode and we are adding an integer to
4246 something, we might be forming a constant. So try to use
4247 plus_constant. If it produces a sum and we can't accept it,
4248 use force_operand. This allows P = &ARR[const] to generate
4249 efficient code on machines where a SYMBOL_REF is not a valid
4250 address.
4251
4252 If this is an EXPAND_SUM call, always return the sum. */
4253 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4254 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4255 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4256 || mode == Pmode))
4257 {
4258 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4259 EXPAND_SUM);
4260 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4261 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4262 op1 = force_operand (op1, target);
4263 return op1;
4264 }
4265
4266 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4267 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4268 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4269 || mode == Pmode))
4270 {
4271 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4272 EXPAND_SUM);
4273 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4274 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4275 op0 = force_operand (op0, target);
4276 return op0;
4277 }
4278
4279 /* No sense saving up arithmetic to be done
4280 if it's all in the wrong mode to form part of an address.
4281 And force_operand won't know whether to sign-extend or
4282 zero-extend. */
4283 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4284 || mode != Pmode) goto binop;
4285
4286 preexpand_calls (exp);
4287 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4288 subtarget = 0;
4289
4290 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
4291 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
4292
4293 /* Make sure any term that's a sum with a constant comes last. */
4294 if (GET_CODE (op0) == PLUS
4295 && CONSTANT_P (XEXP (op0, 1)))
4296 {
4297 temp = op0;
4298 op0 = op1;
4299 op1 = temp;
4300 }
4301 /* If adding to a sum including a constant,
4302 associate it to put the constant outside. */
4303 if (GET_CODE (op1) == PLUS
4304 && CONSTANT_P (XEXP (op1, 1)))
4305 {
4306 rtx constant_term = const0_rtx;
4307
4308 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4309 if (temp != 0)
4310 op0 = temp;
4311 /* Ensure that MULT comes first if there is one. */
4312 else if (GET_CODE (op0) == MULT)
4313 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
4314 else
4315 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4316
4317 /* Let's also eliminate constants from op0 if possible. */
4318 op0 = eliminate_constant_term (op0, &constant_term);
4319
4320 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4321 their sum should be a constant. Form it into OP1, since the
4322 result we want will then be OP0 + OP1. */
4323
4324 temp = simplify_binary_operation (PLUS, mode, constant_term,
4325 XEXP (op1, 1));
4326 if (temp != 0)
4327 op1 = temp;
4328 else
4329 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4330 }
4331
4332 /* Put a constant term last and put a multiplication first. */
4333 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4334 temp = op1, op1 = op0, op0 = temp;
4335
4336 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4337 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4338
4339 case MINUS_EXPR:
4340 /* Handle difference of two symbolic constants,
4341 for the sake of an initializer. */
4342 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4343 && really_constant_p (TREE_OPERAND (exp, 0))
4344 && really_constant_p (TREE_OPERAND (exp, 1)))
4345 {
4346 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4347 VOIDmode, modifier);
4348 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4349 VOIDmode, modifier);
4350 return gen_rtx (MINUS, mode, op0, op1);
4351 }
4352 /* Convert A - const to A + (-const). */
4353 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4354 {
4355 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4356 fold (build1 (NEGATE_EXPR, type,
4357 TREE_OPERAND (exp, 1))));
4358 goto plus_expr;
4359 }
4360 this_optab = sub_optab;
4361 goto binop;
4362
4363 case MULT_EXPR:
4364 preexpand_calls (exp);
4365 /* If first operand is constant, swap them.
4366 Thus the following special case checks need only
4367 check the second operand. */
4368 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4369 {
4370 register tree t1 = TREE_OPERAND (exp, 0);
4371 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4372 TREE_OPERAND (exp, 1) = t1;
4373 }
4374
4375 /* Attempt to return something suitable for generating an
4376 indexed address, for machines that support that. */
4377
4378 if (modifier == EXPAND_SUM && mode == Pmode
4379 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4380 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
4381 {
4382 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4383
4384 /* Apply distributive law if OP0 is x+c. */
4385 if (GET_CODE (op0) == PLUS
4386 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4387 return gen_rtx (PLUS, mode,
4388 gen_rtx (MULT, mode, XEXP (op0, 0),
4389 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4390 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4391 * INTVAL (XEXP (op0, 1))));
4392
4393 if (GET_CODE (op0) != REG)
4394 op0 = force_operand (op0, NULL_RTX);
4395 if (GET_CODE (op0) != REG)
4396 op0 = copy_to_mode_reg (mode, op0);
4397
4398 return gen_rtx (MULT, mode, op0,
4399 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
4400 }
4401
4402 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4403 subtarget = 0;
4404
4405 /* Check for multiplying things that have been extended
4406 from a narrower type. If this machine supports multiplying
4407 in that narrower type with a result in the desired type,
4408 do it that way, and avoid the explicit type-conversion. */
4409 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4410 && TREE_CODE (type) == INTEGER_TYPE
4411 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4412 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4413 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4414 && int_fits_type_p (TREE_OPERAND (exp, 1),
4415 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4416 /* Don't use a widening multiply if a shift will do. */
4417 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
4418 > HOST_BITS_PER_WIDE_INT)
4419 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4420 ||
4421 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4422 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4423 ==
4424 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4425 /* If both operands are extended, they must either both
4426 be zero-extended or both be sign-extended. */
4427 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4428 ==
4429 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4430 {
4431 enum machine_mode innermode
4432 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4433 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4434 ? umul_widen_optab : smul_widen_optab);
4435 if (mode == GET_MODE_WIDER_MODE (innermode)
4436 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4437 {
4438 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4439 NULL_RTX, VOIDmode, 0);
4440 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4441 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4442 VOIDmode, 0);
4443 else
4444 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
4445 NULL_RTX, VOIDmode, 0);
4446 goto binop2;
4447 }
4448 }
4449 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4450 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4451 return expand_mult (mode, op0, op1, target, unsignedp);
4452
4453 case TRUNC_DIV_EXPR:
4454 case FLOOR_DIV_EXPR:
4455 case CEIL_DIV_EXPR:
4456 case ROUND_DIV_EXPR:
4457 case EXACT_DIV_EXPR:
4458 preexpand_calls (exp);
4459 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4460 subtarget = 0;
4461 /* Possible optimization: compute the dividend with EXPAND_SUM
4462 then if the divisor is constant can optimize the case
4463 where some terms of the dividend have coeffs divisible by it. */
4464 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4465 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4466 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4467
4468 case RDIV_EXPR:
4469 this_optab = flodiv_optab;
4470 goto binop;
4471
4472 case TRUNC_MOD_EXPR:
4473 case FLOOR_MOD_EXPR:
4474 case CEIL_MOD_EXPR:
4475 case ROUND_MOD_EXPR:
4476 preexpand_calls (exp);
4477 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4478 subtarget = 0;
4479 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4480 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4481 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4482
4483 case FIX_ROUND_EXPR:
4484 case FIX_FLOOR_EXPR:
4485 case FIX_CEIL_EXPR:
4486 abort (); /* Not used for C. */
4487
4488 case FIX_TRUNC_EXPR:
4489 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4490 if (target == 0)
4491 target = gen_reg_rtx (mode);
4492 expand_fix (target, op0, unsignedp);
4493 return target;
4494
4495 case FLOAT_EXPR:
4496 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
4497 if (target == 0)
4498 target = gen_reg_rtx (mode);
4499 /* expand_float can't figure out what to do if FROM has VOIDmode.
4500 So give it the correct mode. With -O, cse will optimize this. */
4501 if (GET_MODE (op0) == VOIDmode)
4502 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4503 op0);
4504 expand_float (target, op0,
4505 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4506 return target;
4507
4508 case NEGATE_EXPR:
4509 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4510 temp = expand_unop (mode, neg_optab, op0, target, 0);
4511 if (temp == 0)
4512 abort ();
4513 return temp;
4514
4515 case ABS_EXPR:
4516 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4517
4518 /* Handle complex values specially. */
4519 {
4520 enum machine_mode opmode
4521 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4522
4523 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
4524 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
4525 return expand_complex_abs (opmode, op0, target, unsignedp);
4526 }
4527
4528 /* Unsigned abs is simply the operand. Testing here means we don't
4529 risk generating incorrect code below. */
4530 if (TREE_UNSIGNED (type))
4531 return op0;
4532
4533 /* First try to do it with a special abs instruction. */
4534 temp = expand_unop (mode, abs_optab, op0, target, 0);
4535 if (temp != 0)
4536 return temp;
4537
4538 /* If this machine has expensive jumps, we can do integer absolute
4539 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4540 where W is the width of MODE. */
4541
4542 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4543 {
4544 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4545 size_int (GET_MODE_BITSIZE (mode) - 1),
4546 NULL_RTX, 0);
4547
4548 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4549 OPTAB_LIB_WIDEN);
4550 if (temp != 0)
4551 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4552 OPTAB_LIB_WIDEN);
4553
4554 if (temp != 0)
4555 return temp;
4556 }
4557
4558 /* If that does not win, use conditional jump and negate. */
4559 target = original_target;
4560 temp = gen_label_rtx ();
4561 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4562 || (GET_CODE (target) == REG
4563 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4564 target = gen_reg_rtx (mode);
4565 emit_move_insn (target, op0);
4566 emit_cmp_insn (target,
4567 expand_expr (convert (type, integer_zero_node),
4568 NULL_RTX, VOIDmode, 0),
4569 GE, NULL_RTX, mode, 0, 0);
4570 NO_DEFER_POP;
4571 emit_jump_insn (gen_bge (temp));
4572 op0 = expand_unop (mode, neg_optab, target, target, 0);
4573 if (op0 != target)
4574 emit_move_insn (target, op0);
4575 emit_label (temp);
4576 OK_DEFER_POP;
4577 return target;
4578
4579 case MAX_EXPR:
4580 case MIN_EXPR:
4581 target = original_target;
4582 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4583 || (GET_CODE (target) == REG
4584 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4585 target = gen_reg_rtx (mode);
4586 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
4587 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4588
4589 /* First try to do it with a special MIN or MAX instruction.
4590 If that does not win, use a conditional jump to select the proper
4591 value. */
4592 this_optab = (TREE_UNSIGNED (type)
4593 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4594 : (code == MIN_EXPR ? smin_optab : smax_optab));
4595
4596 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4597 OPTAB_WIDEN);
4598 if (temp != 0)
4599 return temp;
4600
4601 if (target != op0)
4602 emit_move_insn (target, op0);
4603 op0 = gen_label_rtx ();
4604 /* If this mode is an integer too wide to compare properly,
4605 compare word by word. Rely on cse to optimize constant cases. */
4606 if (GET_MODE_CLASS (mode) == MODE_INT
4607 && !can_compare_p (mode))
4608 {
4609 if (code == MAX_EXPR)
4610 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), target, op1, NULL, op0);
4611 else
4612 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), op1, target, NULL, op0);
4613 emit_move_insn (target, op1);
4614 }
4615 else
4616 {
4617 if (code == MAX_EXPR)
4618 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4619 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4620 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4621 else
4622 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4623 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4624 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4625 if (temp == const0_rtx)
4626 emit_move_insn (target, op1);
4627 else if (temp != const_true_rtx)
4628 {
4629 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4630 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4631 else
4632 abort ();
4633 emit_move_insn (target, op1);
4634 }
4635 }
4636 emit_label (op0);
4637 return target;
4638
4639/* ??? Can optimize when the operand of this is a bitwise operation,
4640 by using a different bitwise operation. */
4641 case BIT_NOT_EXPR:
4642 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4643 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4644 if (temp == 0)
4645 abort ();
4646 return temp;
4647
4648 case FFS_EXPR:
4649 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4650 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4651 if (temp == 0)
4652 abort ();
4653 return temp;
4654
4655/* ??? Can optimize bitwise operations with one arg constant.
4656 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4657 and (a bitwise1 b) bitwise2 b (etc)
4658 but that is probably not worth while. */
4659
4660/* BIT_AND_EXPR is for bitwise anding.
4661 TRUTH_AND_EXPR is for anding two boolean values
4662 when we want in all cases to compute both of them.
4663 In general it is fastest to do TRUTH_AND_EXPR by
4664 computing both operands as actual zero-or-1 values
4665 and then bitwise anding. In cases where there cannot
4666 be any side effects, better code would be made by
4667 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4668 but the question is how to recognize those cases. */
4669
4670 case TRUTH_AND_EXPR:
4671 case BIT_AND_EXPR:
4672 this_optab = and_optab;
4673 goto binop;
4674
4675/* See comment above about TRUTH_AND_EXPR; it applies here too. */
4676 case TRUTH_OR_EXPR:
4677 case BIT_IOR_EXPR:
4678 this_optab = ior_optab;
4679 goto binop;
4680
4681 case TRUTH_XOR_EXPR:
4682 case BIT_XOR_EXPR:
4683 this_optab = xor_optab;
4684 goto binop;
4685
4686 case LSHIFT_EXPR:
4687 case RSHIFT_EXPR:
4688 case LROTATE_EXPR:
4689 case RROTATE_EXPR:
4690 preexpand_calls (exp);
4691 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4692 subtarget = 0;
4693 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4694 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4695 unsignedp);
4696
4697/* Could determine the answer when only additive constants differ.
4698 Also, the addition of one can be handled by changing the condition. */
4699 case LT_EXPR:
4700 case LE_EXPR:
4701 case GT_EXPR:
4702 case GE_EXPR:
4703 case EQ_EXPR:
4704 case NE_EXPR:
4705 preexpand_calls (exp);
4706 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4707 if (temp != 0)
4708 return temp;
4709 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4710 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4711 && original_target
4712 && GET_CODE (original_target) == REG
4713 && (GET_MODE (original_target)
4714 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4715 {
4716 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4717 if (temp != original_target)
4718 temp = copy_to_reg (temp);
4719 op1 = gen_label_rtx ();
4720 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
4721 GET_MODE (temp), unsignedp, 0);
4722 emit_jump_insn (gen_beq (op1));
4723 emit_move_insn (temp, const1_rtx);
4724 emit_label (op1);
4725 return temp;
4726 }
4727 /* If no set-flag instruction, must generate a conditional
4728 store into a temporary variable. Drop through
4729 and handle this like && and ||. */
4730
4731 case TRUTH_ANDIF_EXPR:
4732 case TRUTH_ORIF_EXPR:
4733 if (target == 0 || ! safe_from_p (target, exp)
4734 /* Make sure we don't have a hard reg (such as function's return
4735 value) live across basic blocks, if not optimizing. */
4736 || (!optimize && GET_CODE (target) == REG
4737 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4738 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4739 emit_clr_insn (target);
4740 op1 = gen_label_rtx ();
4741 jumpifnot (exp, op1);
4742 emit_0_to_1_insn (target);
4743 emit_label (op1);
4744 return target;
4745
4746 case TRUTH_NOT_EXPR:
4747 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4748 /* The parser is careful to generate TRUTH_NOT_EXPR
4749 only with operands that are always zero or one. */
4750 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
4751 target, 1, OPTAB_LIB_WIDEN);
4752 if (temp == 0)
4753 abort ();
4754 return temp;
4755
4756 case COMPOUND_EXPR:
4757 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4758 emit_queue ();
4759 return expand_expr (TREE_OPERAND (exp, 1),
4760 (ignore ? const0_rtx : target),
4761 VOIDmode, 0);
4762
4763 case COND_EXPR:
4764 {
4765 /* Note that COND_EXPRs whose type is a structure or union
4766 are required to be constructed to contain assignments of
4767 a temporary variable, so that we can evaluate them here
4768 for side effect only. If type is void, we must do likewise. */
4769
4770 /* If an arm of the branch requires a cleanup,
4771 only that cleanup is performed. */
4772
4773 tree singleton = 0;
4774 tree binary_op = 0, unary_op = 0;
4775 tree old_cleanups = cleanups_this_call;
4776 cleanups_this_call = 0;
4777
4778 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4779 convert it to our mode, if necessary. */
4780 if (integer_onep (TREE_OPERAND (exp, 1))
4781 && integer_zerop (TREE_OPERAND (exp, 2))
4782 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4783 {
4784 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4785 if (GET_MODE (op0) == mode)
4786 return op0;
4787 if (target == 0)
4788 target = gen_reg_rtx (mode);
4789 convert_move (target, op0, unsignedp);
4790 return target;
4791 }
4792
4793 /* If we are not to produce a result, we have no target. Otherwise,
4794 if a target was specified use it; it will not be used as an
4795 intermediate target unless it is safe. If no target, use a
4796 temporary. */
4797
4798 if (mode == VOIDmode || ignore)
4799 temp = 0;
4800 else if (original_target
4801 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4802 temp = original_target;
4803 else if (mode == BLKmode)
4804 {
4805 if (TYPE_SIZE (type) == 0
4806 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4807 abort ();
4808 temp = assign_stack_temp (BLKmode,
4809 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4810 + BITS_PER_UNIT - 1)
4811 / BITS_PER_UNIT, 0);
4812 }
4813 else
4814 temp = gen_reg_rtx (mode);
4815
4816 /* Check for X ? A + B : A. If we have this, we can copy
4817 A to the output and conditionally add B. Similarly for unary
4818 operations. Don't do this if X has side-effects because
4819 those side effects might affect A or B and the "?" operation is
4820 a sequence point in ANSI. (We test for side effects later.) */
4821
4822 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4823 && operand_equal_p (TREE_OPERAND (exp, 2),
4824 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4825 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4826 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4827 && operand_equal_p (TREE_OPERAND (exp, 1),
4828 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4829 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4830 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4831 && operand_equal_p (TREE_OPERAND (exp, 2),
4832 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4833 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4834 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4835 && operand_equal_p (TREE_OPERAND (exp, 1),
4836 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4837 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4838
4839 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4840 operation, do this as A + (X != 0). Similarly for other simple
4841 binary operators. */
4842 if (singleton && binary_op
4843 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4844 && (TREE_CODE (binary_op) == PLUS_EXPR
4845 || TREE_CODE (binary_op) == MINUS_EXPR
4846 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4847 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4848 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4849 && integer_onep (TREE_OPERAND (binary_op, 1))
4850 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4851 {
4852 rtx result;
4853 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4854 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4855 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4856 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4857 : and_optab);
4858
4859 /* If we had X ? A : A + 1, do this as A + (X == 0).
4860
4861 We have to invert the truth value here and then put it
4862 back later if do_store_flag fails. We cannot simply copy
4863 TREE_OPERAND (exp, 0) to another variable and modify that
4864 because invert_truthvalue can modify the tree pointed to
4865 by its argument. */
4866 if (singleton == TREE_OPERAND (exp, 1))
4867 TREE_OPERAND (exp, 0)
4868 = invert_truthvalue (TREE_OPERAND (exp, 0));
4869
4870 result = do_store_flag (TREE_OPERAND (exp, 0),
4871 (safe_from_p (temp, singleton)
4872 ? temp : NULL_RTX),
4873 mode, BRANCH_COST <= 1);
4874
4875 if (result)
4876 {
4877 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
4878 return expand_binop (mode, boptab, op1, result, temp,
4879 unsignedp, OPTAB_LIB_WIDEN);
4880 }
4881 else if (singleton == TREE_OPERAND (exp, 1))
4882 TREE_OPERAND (exp, 0)
4883 = invert_truthvalue (TREE_OPERAND (exp, 0));
4884 }
4885
4886 NO_DEFER_POP;
4887 op0 = gen_label_rtx ();
4888
4889 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4890 {
4891 if (temp != 0)
4892 {
4893 /* If the target conflicts with the other operand of the
4894 binary op, we can't use it. Also, we can't use the target
4895 if it is a hard register, because evaluating the condition
4896 might clobber it. */
4897 if ((binary_op
4898 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4899 || (GET_CODE (temp) == REG
4900 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4901 temp = gen_reg_rtx (mode);
4902 store_expr (singleton, temp, 0);
4903 }
4904 else
4905 expand_expr (singleton,
78ed81a3 4906 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
9bf86ebb
PR
4907 if (cleanups_this_call)
4908 {
4909 sorry ("aggregate value in COND_EXPR");
4910 cleanups_this_call = 0;
4911 }
4912 if (singleton == TREE_OPERAND (exp, 1))
4913 jumpif (TREE_OPERAND (exp, 0), op0);
4914 else
4915 jumpifnot (TREE_OPERAND (exp, 0), op0);
4916
4917 if (binary_op && temp == 0)
4918 /* Just touch the other operand. */
4919 expand_expr (TREE_OPERAND (binary_op, 1),
4920 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
4921 else if (binary_op)
4922 store_expr (build (TREE_CODE (binary_op), type,
4923 make_tree (type, temp),
4924 TREE_OPERAND (binary_op, 1)),
4925 temp, 0);
4926 else
4927 store_expr (build1 (TREE_CODE (unary_op), type,
4928 make_tree (type, temp)),
4929 temp, 0);
4930 op1 = op0;
4931 }
4932#if 0
4933 /* This is now done in jump.c and is better done there because it
4934 produces shorter register lifetimes. */
4935
4936 /* Check for both possibilities either constants or variables
4937 in registers (but not the same as the target!). If so, can
4938 save branches by assigning one, branching, and assigning the
4939 other. */
4940 else if (temp && GET_MODE (temp) != BLKmode
4941 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4942 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4943 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4944 && DECL_RTL (TREE_OPERAND (exp, 1))
4945 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4946 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4947 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4948 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4949 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4950 && DECL_RTL (TREE_OPERAND (exp, 2))
4951 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4952 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4953 {
4954 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4955 temp = gen_reg_rtx (mode);
4956 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4957 jumpifnot (TREE_OPERAND (exp, 0), op0);
4958 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4959 op1 = op0;
4960 }
4961#endif
4962 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4963 comparison operator. If we have one of these cases, set the
4964 output to A, branch on A (cse will merge these two references),
4965 then set the output to FOO. */
4966 else if (temp
4967 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4968 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4969 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4970 TREE_OPERAND (exp, 1), 0)
4971 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4972 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4973 {
4974 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4975 temp = gen_reg_rtx (mode);
4976 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4977 jumpif (TREE_OPERAND (exp, 0), op0);
4978 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4979 op1 = op0;
4980 }
4981 else if (temp
4982 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4983 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4984 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4985 TREE_OPERAND (exp, 2), 0)
4986 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4987 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4988 {
4989 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4990 temp = gen_reg_rtx (mode);
4991 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4992 jumpifnot (TREE_OPERAND (exp, 0), op0);
4993 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4994 op1 = op0;
4995 }
4996 else
4997 {
4998 op1 = gen_label_rtx ();
4999 jumpifnot (TREE_OPERAND (exp, 0), op0);
5000 if (temp != 0)
5001 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5002 else
5003 expand_expr (TREE_OPERAND (exp, 1),
5004 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5005 if (cleanups_this_call)
5006 {
5007 sorry ("aggregate value in COND_EXPR");
5008 cleanups_this_call = 0;
5009 }
5010
5011 emit_queue ();
5012 emit_jump_insn (gen_jump (op1));
5013 emit_barrier ();
5014 emit_label (op0);
5015 if (temp != 0)
5016 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5017 else
5018 expand_expr (TREE_OPERAND (exp, 2),
5019 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
5020 }
5021
5022 if (cleanups_this_call)
5023 {
5024 sorry ("aggregate value in COND_EXPR");
5025 cleanups_this_call = 0;
5026 }
5027
5028 emit_queue ();
5029 emit_label (op1);
5030 OK_DEFER_POP;
5031 cleanups_this_call = old_cleanups;
5032 return temp;
5033 }
5034
5035 case TARGET_EXPR:
5036 {
5037 /* Something needs to be initialized, but we didn't know
5038 where that thing was when building the tree. For example,
5039 it could be the return value of a function, or a parameter
5040 to a function which lays down in the stack, or a temporary
5041 variable which must be passed by reference.
5042
5043 We guarantee that the expression will either be constructed
5044 or copied into our original target. */
5045
5046 tree slot = TREE_OPERAND (exp, 0);
5047 tree exp1;
5048
5049 if (TREE_CODE (slot) != VAR_DECL)
5050 abort ();
5051
5052 if (target == 0)
5053 {
5054 if (DECL_RTL (slot) != 0)
5055 {
5056 target = DECL_RTL (slot);
5057 /* If we have already expanded the slot, so don't do
5058 it again. (mrs) */
5059 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5060 return target;
5061 }
5062 else
5063 {
5064 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5065 /* All temp slots at this level must not conflict. */
5066 preserve_temp_slots (target);
5067 DECL_RTL (slot) = target;
5068 }
5069
5070#if 0
5071 /* I bet this needs to be done, and I bet that it needs to
5072 be above, inside the else clause. The reason is
5073 simple, how else is it going to get cleaned up? (mrs)
5074
5075 The reason is probably did not work before, and was
5076 commented out is because this was re-expanding already
5077 expanded target_exprs (target == 0 and DECL_RTL (slot)
5078 != 0) also cleaning them up many times as well. :-( */
5079
5080 /* Since SLOT is not known to the called function
5081 to belong to its stack frame, we must build an explicit
5082 cleanup. This case occurs when we must build up a reference
5083 to pass the reference as an argument. In this case,
5084 it is very likely that such a reference need not be
5085 built here. */
5086
5087 if (TREE_OPERAND (exp, 2) == 0)
5088 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5089 if (TREE_OPERAND (exp, 2))
5090 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5091 cleanups_this_call);
5092#endif
5093 }
5094 else
5095 {
5096 /* This case does occur, when expanding a parameter which
5097 needs to be constructed on the stack. The target
5098 is the actual stack address that we want to initialize.
5099 The function we call will perform the cleanup in this case. */
5100
5101 /* If we have already assigned it space, use that space,
5102 not target that we were passed in, as our target
5103 parameter is only a hint. */
5104 if (DECL_RTL (slot) != 0)
5105 {
5106 target = DECL_RTL (slot);
5107 /* If we have already expanded the slot, so don't do
5108 it again. (mrs) */
5109 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5110 return target;
5111 }
5112
5113 DECL_RTL (slot) = target;
5114 }
5115
5116 exp1 = TREE_OPERAND (exp, 1);
5117 /* Mark it as expanded. */
5118 TREE_OPERAND (exp, 1) = NULL_TREE;
5119
5120 return expand_expr (exp1, target, tmode, modifier);
5121 }
5122
5123 case INIT_EXPR:
5124 {
5125 tree lhs = TREE_OPERAND (exp, 0);
5126 tree rhs = TREE_OPERAND (exp, 1);
5127 tree noncopied_parts = 0;
5128 tree lhs_type = TREE_TYPE (lhs);
5129
5130 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5131 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5132 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5133 TYPE_NONCOPIED_PARTS (lhs_type));
5134 while (noncopied_parts != 0)
5135 {
5136 expand_assignment (TREE_VALUE (noncopied_parts),
5137 TREE_PURPOSE (noncopied_parts), 0, 0);
5138 noncopied_parts = TREE_CHAIN (noncopied_parts);
5139 }
5140 return temp;
5141 }
5142
5143 case MODIFY_EXPR:
5144 {
5145 /* If lhs is complex, expand calls in rhs before computing it.
5146 That's so we don't compute a pointer and save it over a call.
5147 If lhs is simple, compute it first so we can give it as a
5148 target if the rhs is just a call. This avoids an extra temp and copy
5149 and that prevents a partial-subsumption which makes bad code.
5150 Actually we could treat component_ref's of vars like vars. */
5151
5152 tree lhs = TREE_OPERAND (exp, 0);
5153 tree rhs = TREE_OPERAND (exp, 1);
5154 tree noncopied_parts = 0;
5155 tree lhs_type = TREE_TYPE (lhs);
5156
5157 temp = 0;
5158
5159 if (TREE_CODE (lhs) != VAR_DECL
5160 && TREE_CODE (lhs) != RESULT_DECL
5161 && TREE_CODE (lhs) != PARM_DECL)
5162 preexpand_calls (exp);
5163
5164 /* Check for |= or &= of a bitfield of size one into another bitfield
5165 of size 1. In this case, (unless we need the result of the
5166 assignment) we can do this more efficiently with a
5167 test followed by an assignment, if necessary.
5168
5169 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5170 things change so we do, this code should be enhanced to
5171 support it. */
5172 if (ignore
5173 && TREE_CODE (lhs) == COMPONENT_REF
5174 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5175 || TREE_CODE (rhs) == BIT_AND_EXPR)
5176 && TREE_OPERAND (rhs, 0) == lhs
5177 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5178 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5179 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5180 {
5181 rtx label = gen_label_rtx ();
5182
5183 do_jump (TREE_OPERAND (rhs, 1),
5184 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5185 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5186 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5187 (TREE_CODE (rhs) == BIT_IOR_EXPR
5188 ? integer_one_node
5189 : integer_zero_node)),
5190 0, 0);
5191 do_pending_stack_adjust ();
5192 emit_label (label);
5193 return const0_rtx;
5194 }
5195
5196 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5197 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5198 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5199 TYPE_NONCOPIED_PARTS (lhs_type));
5200
5201 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5202 while (noncopied_parts != 0)
5203 {
5204 expand_assignment (TREE_PURPOSE (noncopied_parts),
5205 TREE_VALUE (noncopied_parts), 0, 0);
5206 noncopied_parts = TREE_CHAIN (noncopied_parts);
5207 }
5208 return temp;
5209 }
5210
5211 case PREINCREMENT_EXPR:
5212 case PREDECREMENT_EXPR:
5213 return expand_increment (exp, 0);
5214
5215 case POSTINCREMENT_EXPR:
5216 case POSTDECREMENT_EXPR:
5217 /* Faster to treat as pre-increment if result is not used. */
5218 return expand_increment (exp, ! ignore);
5219
5220 case ADDR_EXPR:
5221 /* Are we taking the address of a nested function? */
5222 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5223 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5224 {
5225 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5226 op0 = force_operand (op0, target);
5227 }
5228 else
5229 {
5230 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
5231 (modifier == EXPAND_INITIALIZER
5232 ? modifier : EXPAND_CONST_ADDRESS));
5233
5234 /* We would like the object in memory. If it is a constant,
5235 we can have it be statically allocated into memory. For
5236 a non-constant (REG or SUBREG), we need to allocate some
5237 memory and store the value into it. */
5238
5239 if (CONSTANT_P (op0))
5240 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5241 op0);
5242
5243 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
5244 {
5245 /* If this object is in a register, it must be not
5246 be BLKmode. */
5247 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
5248 enum machine_mode inner_mode = TYPE_MODE (inner_type);
5249 rtx memloc
5250 = assign_stack_temp (inner_mode,
5251 int_size_in_bytes (inner_type), 1);
5252
5253 emit_move_insn (memloc, op0);
5254 op0 = memloc;
5255 }
5256
5257 if (GET_CODE (op0) != MEM)
5258 abort ();
5259
5260 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5261 return XEXP (op0, 0);
5262 op0 = force_operand (XEXP (op0, 0), target);
5263 }
5264 if (flag_force_addr && GET_CODE (op0) != REG)
5265 return force_reg (Pmode, op0);
5266 return op0;
5267
5268 case ENTRY_VALUE_EXPR:
5269 abort ();
5270
5271 /* COMPLEX type for Extended Pascal & Fortran */
5272 case COMPLEX_EXPR:
5273 {
5274 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5275
5276 rtx prev;
5277
5278 /* Get the rtx code of the operands. */
5279 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5280 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5281
5282 if (! target)
5283 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5284
5285 prev = get_last_insn ();
5286
5287 /* Tell flow that the whole of the destination is being set. */
5288 if (GET_CODE (target) == REG)
5289 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5290
5291 /* Move the real (op0) and imaginary (op1) parts to their location. */
5292 emit_move_insn (gen_realpart (mode, target), op0);
5293 emit_move_insn (gen_imagpart (mode, target), op1);
5294
5295 /* Complex construction should appear as a single unit. */
5296 group_insns (prev);
5297
5298 return target;
5299 }
5300
5301 case REALPART_EXPR:
5302 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5303 return gen_realpart (mode, op0);
5304
5305 case IMAGPART_EXPR:
5306 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5307 return gen_imagpart (mode, op0);
5308
5309 case CONJ_EXPR:
5310 {
5311 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5312 rtx imag_t;
5313 rtx prev;
5314
5315 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5316
5317 if (! target)
5318 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5319
5320 prev = get_last_insn ();
5321
5322 /* Tell flow that the whole of the destination is being set. */
5323 if (GET_CODE (target) == REG)
5324 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5325
5326 /* Store the realpart and the negated imagpart to target. */
5327 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
5328
5329 imag_t = gen_imagpart (mode, target);
5330 temp = expand_unop (mode, neg_optab,
5331 gen_imagpart (mode, op0), imag_t, 0);
5332 if (temp != imag_t)
5333 emit_move_insn (imag_t, temp);
5334
5335 /* Conjugate should appear as a single unit */
5336 group_insns (prev);
5337
5338 return target;
5339 }
5340
5341 case ERROR_MARK:
5342 op0 = CONST0_RTX (tmode);
5343 if (op0 != 0)
5344 return op0;
5345 return const0_rtx;
5346
5347 default:
5348 return (*lang_expand_expr) (exp, target, tmode, modifier);
5349 }
5350
5351 /* Here to do an ordinary binary operator, generating an instruction
5352 from the optab already placed in `this_optab'. */
5353 binop:
5354 preexpand_calls (exp);
5355 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5356 subtarget = 0;
5357 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5358 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
5359 binop2:
5360 temp = expand_binop (mode, this_optab, op0, op1, target,
5361 unsignedp, OPTAB_LIB_WIDEN);
5362 if (temp == 0)
5363 abort ();
5364 return temp;
5365}
5366\f
5367/* Return the alignment in bits of EXP, a pointer valued expression.
5368 But don't return more than MAX_ALIGN no matter what.
5369 The alignment returned is, by default, the alignment of the thing that
5370 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5371
5372 Otherwise, look at the expression to see if we can do better, i.e., if the
5373 expression is actually pointing at an object whose alignment is tighter. */
5374
5375static int
5376get_pointer_alignment (exp, max_align)
5377 tree exp;
5378 unsigned max_align;
5379{
5380 unsigned align, inner;
5381
5382 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5383 return 0;
5384
5385 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5386 align = MIN (align, max_align);
5387
5388 while (1)
5389 {
5390 switch (TREE_CODE (exp))
5391 {
5392 case NOP_EXPR:
5393 case CONVERT_EXPR:
5394 case NON_LVALUE_EXPR:
5395 exp = TREE_OPERAND (exp, 0);
5396 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5397 return align;
5398 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5399 inner = MIN (inner, max_align);
5400 align = MAX (align, inner);
5401 break;
5402
5403 case PLUS_EXPR:
5404 /* If sum of pointer + int, restrict our maximum alignment to that
5405 imposed by the integer. If not, we can't do any better than
5406 ALIGN. */
5407 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5408 return align;
5409
5410 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5411 & (max_align - 1))
5412 != 0)
5413 max_align >>= 1;
5414
5415 exp = TREE_OPERAND (exp, 0);
5416 break;
5417
5418 case ADDR_EXPR:
5419 /* See what we are pointing at and look at its alignment. */
5420 exp = TREE_OPERAND (exp, 0);
5421 if (TREE_CODE (exp) == FUNCTION_DECL)
5422 align = MAX (align, FUNCTION_BOUNDARY);
5423 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5424 align = MAX (align, DECL_ALIGN (exp));
5425#ifdef CONSTANT_ALIGNMENT
5426 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5427 align = CONSTANT_ALIGNMENT (exp, align);
5428#endif
5429 return MIN (align, max_align);
5430
5431 default:
5432 return align;
5433 }
5434 }
5435}
5436\f
5437/* Return the tree node and offset if a given argument corresponds to
5438 a string constant. */
5439
5440static tree
5441string_constant (arg, ptr_offset)
5442 tree arg;
5443 tree *ptr_offset;
5444{
5445 STRIP_NOPS (arg);
5446
5447 if (TREE_CODE (arg) == ADDR_EXPR
5448 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5449 {
5450 *ptr_offset = integer_zero_node;
5451 return TREE_OPERAND (arg, 0);
5452 }
5453 else if (TREE_CODE (arg) == PLUS_EXPR)
5454 {
5455 tree arg0 = TREE_OPERAND (arg, 0);
5456 tree arg1 = TREE_OPERAND (arg, 1);
5457
5458 STRIP_NOPS (arg0);
5459 STRIP_NOPS (arg1);
5460
5461 if (TREE_CODE (arg0) == ADDR_EXPR
5462 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5463 {
5464 *ptr_offset = arg1;
5465 return TREE_OPERAND (arg0, 0);
5466 }
5467 else if (TREE_CODE (arg1) == ADDR_EXPR
5468 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5469 {
5470 *ptr_offset = arg0;
5471 return TREE_OPERAND (arg1, 0);
5472 }
5473 }
5474
5475 return 0;
5476}
5477
5478/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5479 way, because it could contain a zero byte in the middle.
5480 TREE_STRING_LENGTH is the size of the character array, not the string.
5481
5482 Unfortunately, string_constant can't access the values of const char
5483 arrays with initializers, so neither can we do so here. */
5484
5485static tree
5486c_strlen (src)
5487 tree src;
5488{
5489 tree offset_node;
5490 int offset, max;
5491 char *ptr;
5492
5493 src = string_constant (src, &offset_node);
5494 if (src == 0)
5495 return 0;
5496 max = TREE_STRING_LENGTH (src);
5497 ptr = TREE_STRING_POINTER (src);
5498 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5499 {
5500 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5501 compute the offset to the following null if we don't know where to
5502 start searching for it. */
5503 int i;
5504 for (i = 0; i < max; i++)
5505 if (ptr[i] == 0)
5506 return 0;
5507 /* We don't know the starting offset, but we do know that the string
5508 has no internal zero bytes. We can assume that the offset falls
5509 within the bounds of the string; otherwise, the programmer deserves
5510 what he gets. Subtract the offset from the length of the string,
5511 and return that. */
5512 /* This would perhaps not be valid if we were dealing with named
5513 arrays in addition to literal string constants. */
5514 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5515 }
5516
5517 /* We have a known offset into the string. Start searching there for
5518 a null character. */
5519 if (offset_node == 0)
5520 offset = 0;
5521 else
5522 {
5523 /* Did we get a long long offset? If so, punt. */
5524 if (TREE_INT_CST_HIGH (offset_node) != 0)
5525 return 0;
5526 offset = TREE_INT_CST_LOW (offset_node);
5527 }
5528 /* If the offset is known to be out of bounds, warn, and call strlen at
5529 runtime. */
5530 if (offset < 0 || offset > max)
5531 {
5532 warning ("offset outside bounds of constant string");
5533 return 0;
5534 }
5535 /* Use strlen to search for the first zero byte. Since any strings
5536 constructed with build_string will have nulls appended, we win even
5537 if we get handed something like (char[4])"abcd".
5538
5539 Since OFFSET is our starting index into the string, no further
5540 calculation is needed. */
5541 return size_int (strlen (ptr + offset));
5542}
5543\f
5544/* Expand an expression EXP that calls a built-in function,
5545 with result going to TARGET if that's convenient
5546 (and in mode MODE if that's convenient).
5547 SUBTARGET may be used as the target for computing one of EXP's operands.
5548 IGNORE is nonzero if the value is to be ignored. */
5549
5550static rtx
5551expand_builtin (exp, target, subtarget, mode, ignore)
5552 tree exp;
5553 rtx target;
5554 rtx subtarget;
5555 enum machine_mode mode;
5556 int ignore;
5557{
5558 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5559 tree arglist = TREE_OPERAND (exp, 1);
5560 rtx op0;
5561 rtx lab1, insns;
5562 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
5563 optab builtin_optab;
5564
5565 switch (DECL_FUNCTION_CODE (fndecl))
5566 {
5567 case BUILT_IN_ABS:
5568 case BUILT_IN_LABS:
5569 case BUILT_IN_FABS:
5570 /* build_function_call changes these into ABS_EXPR. */
5571 abort ();
5572
5573 case BUILT_IN_SIN:
5574 case BUILT_IN_COS:
5575 case BUILT_IN_FSQRT:
5576 /* If not optimizing, call the library function. */
5577 if (! optimize)
5578 break;
5579
5580 if (arglist == 0
5581 /* Arg could be wrong type if user redeclared this fcn wrong. */
5582 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
5583 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
5584
5585 /* Stabilize and compute the argument. */
5586 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5587 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5588 {
5589 exp = copy_node (exp);
5590 arglist = copy_node (arglist);
5591 TREE_OPERAND (exp, 1) = arglist;
5592 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5593 }
5594 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5595
5596 /* Make a suitable register to place result in. */
5597 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5598
5599 emit_queue ();
5600 start_sequence ();
5601
5602 switch (DECL_FUNCTION_CODE (fndecl))
5603 {
5604 case BUILT_IN_SIN:
5605 builtin_optab = sin_optab; break;
5606 case BUILT_IN_COS:
5607 builtin_optab = cos_optab; break;
5608 case BUILT_IN_FSQRT:
5609 builtin_optab = sqrt_optab; break;
5610 default:
5611 abort ();
5612 }
5613
5614 /* Compute into TARGET.
5615 Set TARGET to wherever the result comes back. */
5616 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5617 builtin_optab, op0, target, 0);
5618
5619 /* If we were unable to expand via the builtin, stop the
5620 sequence (without outputting the insns) and break, causing
5621 a call the the library function. */
5622 if (target == 0)
5623 {
5624 end_sequence ();
5625 break;
5626 }
5627
5628 /* Check the results by default. But if flag_fast_math is turned on,
5629 then assume sqrt will always be called with valid arguments. */
5630
5631 if (! flag_fast_math)
5632 {
5633 /* Don't define the builtin FP instructions
5634 if your machine is not IEEE. */
5635 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5636 abort ();
5637
5638 lab1 = gen_label_rtx ();
5639
5640 /* Test the result; if it is NaN, set errno=EDOM because
5641 the argument was not in the domain. */
5642 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5643 emit_jump_insn (gen_beq (lab1));
5644
5645#if TARGET_EDOM
5646 {
5647#ifdef GEN_ERRNO_RTX
5648 rtx errno_rtx = GEN_ERRNO_RTX;
5649#else
5650 rtx errno_rtx
5651 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5652#endif
5653
5654 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5655 }
5656#else
5657 /* We can't set errno=EDOM directly; let the library call do it.
5658 Pop the arguments right away in case the call gets deleted. */
5659 NO_DEFER_POP;
5660 expand_call (exp, target, 0);
5661 OK_DEFER_POP;
5662#endif
5663
5664 emit_label (lab1);
5665 }
5666
5667 /* Output the entire sequence. */
5668 insns = get_insns ();
5669 end_sequence ();
5670 emit_insns (insns);
5671
5672 return target;
5673
5674 /* __builtin_apply_args returns block of memory allocated on
5675 the stack into which is stored the arg pointer, structure
5676 value address, static chain, and all the registers that might
5677 possibly be used in performing a function call. The code is
5678 moved to the start of the function so the incoming values are
5679 saved. */
5680 case BUILT_IN_APPLY_ARGS:
5681 /* Don't do __builtin_apply_args more than once in a function.
5682 Save the result of the first call and reuse it. */
5683 if (apply_args_value != 0)
5684 return apply_args_value;
5685 {
5686 /* When this function is called, it means that registers must be
5687 saved on entry to this function. So we migrate the
5688 call to the first insn of this function. */
5689 rtx temp;
5690 rtx seq;
5691
5692 start_sequence ();
5693 temp = expand_builtin_apply_args ();
5694 seq = get_insns ();
5695 end_sequence ();
5696
5697 apply_args_value = temp;
5698
5699 /* Put the sequence after the NOTE that starts the function.
5700 If this is inside a SEQUENCE, make the outer-level insn
5701 chain current, so the code is placed at the start of the
5702 function. */
5703 push_topmost_sequence ();
5704 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5705 pop_topmost_sequence ();
5706 return temp;
5707 }
5708
5709 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5710 FUNCTION with a copy of the parameters described by
5711 ARGUMENTS, and ARGSIZE. It returns a block of memory
5712 allocated on the stack into which is stored all the registers
5713 that might possibly be used for returning the result of a
5714 function. ARGUMENTS is the value returned by
5715 __builtin_apply_args. ARGSIZE is the number of bytes of
5716 arguments that must be copied. ??? How should this value be
5717 computed? We'll also need a safe worst case value for varargs
5718 functions. */
5719 case BUILT_IN_APPLY:
5720 if (arglist == 0
5721 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5722 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5723 || TREE_CHAIN (arglist) == 0
5724 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5725 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5726 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5727 return const0_rtx;
5728 else
5729 {
5730 int i;
5731 tree t;
5732 rtx ops[3];
5733
5734 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
5735 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
5736
5737 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5738 }
5739
5740 /* __builtin_return (RESULT) causes the function to return the
5741 value described by RESULT. RESULT is address of the block of
5742 memory returned by __builtin_apply. */
5743 case BUILT_IN_RETURN:
5744 if (arglist
5745 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5746 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
5747 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
5748 NULL_RTX, VOIDmode, 0));
5749 return const0_rtx;
5750
5751 case BUILT_IN_SAVEREGS:
5752 /* Don't do __builtin_saveregs more than once in a function.
5753 Save the result of the first call and reuse it. */
5754 if (saveregs_value != 0)
5755 return saveregs_value;
5756 {
5757 /* When this function is called, it means that registers must be
5758 saved on entry to this function. So we migrate the
5759 call to the first insn of this function. */
5760 rtx temp;
5761 rtx seq;
5762 rtx valreg, saved_valreg;
5763
5764 /* Now really call the function. `expand_call' does not call
5765 expand_builtin, so there is no danger of infinite recursion here. */
5766 start_sequence ();
5767
5768#ifdef EXPAND_BUILTIN_SAVEREGS
5769 /* Do whatever the machine needs done in this case. */
5770 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5771#else
5772 /* The register where the function returns its value
5773 is likely to have something else in it, such as an argument.
5774 So preserve that register around the call. */
5775 if (value_mode != VOIDmode)
5776 {
5777 valreg = hard_libcall_value (value_mode);
5778 saved_valreg = gen_reg_rtx (value_mode);
5779 emit_move_insn (saved_valreg, valreg);
5780 }
5781
5782 /* Generate the call, putting the value in a pseudo. */
5783 temp = expand_call (exp, target, ignore);
5784
5785 if (value_mode != VOIDmode)
5786 emit_move_insn (valreg, saved_valreg);
5787#endif
5788
5789 seq = get_insns ();
5790 end_sequence ();
5791
5792 saveregs_value = temp;
5793
5794 /* Put the sequence after the NOTE that starts the function.
5795 If this is inside a SEQUENCE, make the outer-level insn
5796 chain current, so the code is placed at the start of the
5797 function. */
5798 push_topmost_sequence ();
5799 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5800 pop_topmost_sequence ();
5801 return temp;
5802 }
5803
5804 /* __builtin_args_info (N) returns word N of the arg space info
5805 for the current function. The number and meanings of words
5806 is controlled by the definition of CUMULATIVE_ARGS. */
5807 case BUILT_IN_ARGS_INFO:
5808 {
5809 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5810 int i;
5811 int *word_ptr = (int *) &current_function_args_info;
5812 tree type, elts, result;
5813
5814 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5815 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5816 __FILE__, __LINE__);
5817
5818 if (arglist != 0)
5819 {
5820 tree arg = TREE_VALUE (arglist);
5821 if (TREE_CODE (arg) != INTEGER_CST)
5822 error ("argument of `__builtin_args_info' must be constant");
5823 else
5824 {
5825 int wordnum = TREE_INT_CST_LOW (arg);
5826
5827 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
5828 error ("argument of `__builtin_args_info' out of range");
5829 else
5830 return GEN_INT (word_ptr[wordnum]);
5831 }
5832 }
5833 else
5834 error ("missing argument in `__builtin_args_info'");
5835
5836 return const0_rtx;
5837
5838#if 0
5839 for (i = 0; i < nwords; i++)
5840 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5841
5842 type = build_array_type (integer_type_node,
5843 build_index_type (build_int_2 (nwords, 0)));
5844 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5845 TREE_CONSTANT (result) = 1;
5846 TREE_STATIC (result) = 1;
5847 result = build (INDIRECT_REF, build_pointer_type (type), result);
5848 TREE_CONSTANT (result) = 1;
5849 return expand_expr (result, NULL_RTX, VOIDmode, 0);
5850#endif
5851 }
5852
5853 /* Return the address of the first anonymous stack arg. */
5854 case BUILT_IN_NEXT_ARG:
5855 {
5856 tree fntype = TREE_TYPE (current_function_decl);
5857 if (!(TYPE_ARG_TYPES (fntype) != 0
5858 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5859 != void_type_node)))
5860 {
5861 error ("`va_start' used in function with fixed args");
5862 return const0_rtx;
5863 }
5864 }
5865
5866 return expand_binop (Pmode, add_optab,
5867 current_function_internal_arg_pointer,
5868 current_function_arg_offset_rtx,
5869 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5870
5871 case BUILT_IN_CLASSIFY_TYPE:
5872 if (arglist != 0)
5873 {
5874 tree type = TREE_TYPE (TREE_VALUE (arglist));
5875 enum tree_code code = TREE_CODE (type);
5876 if (code == VOID_TYPE)
5877 return GEN_INT (void_type_class);
5878 if (code == INTEGER_TYPE)
5879 return GEN_INT (integer_type_class);
5880 if (code == CHAR_TYPE)
5881 return GEN_INT (char_type_class);
5882 if (code == ENUMERAL_TYPE)
5883 return GEN_INT (enumeral_type_class);
5884 if (code == BOOLEAN_TYPE)
5885 return GEN_INT (boolean_type_class);
5886 if (code == POINTER_TYPE)
5887 return GEN_INT (pointer_type_class);
5888 if (code == REFERENCE_TYPE)
5889 return GEN_INT (reference_type_class);
5890 if (code == OFFSET_TYPE)
5891 return GEN_INT (offset_type_class);
5892 if (code == REAL_TYPE)
5893 return GEN_INT (real_type_class);
5894 if (code == COMPLEX_TYPE)
5895 return GEN_INT (complex_type_class);
5896 if (code == FUNCTION_TYPE)
5897 return GEN_INT (function_type_class);
5898 if (code == METHOD_TYPE)
5899 return GEN_INT (method_type_class);
5900 if (code == RECORD_TYPE)
5901 return GEN_INT (record_type_class);
5902 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
5903 return GEN_INT (union_type_class);
5904 if (code == ARRAY_TYPE)
5905 return GEN_INT (array_type_class);
5906 if (code == STRING_TYPE)
5907 return GEN_INT (string_type_class);
5908 if (code == SET_TYPE)
5909 return GEN_INT (set_type_class);
5910 if (code == FILE_TYPE)
5911 return GEN_INT (file_type_class);
5912 if (code == LANG_TYPE)
5913 return GEN_INT (lang_type_class);
5914 }
5915 return GEN_INT (no_type_class);
5916
5917 case BUILT_IN_CONSTANT_P:
5918 if (arglist == 0)
5919 return const0_rtx;
5920 else
5921 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
5922 ? const1_rtx : const0_rtx);
5923
5924 case BUILT_IN_FRAME_ADDRESS:
5925 /* The argument must be a nonnegative integer constant.
5926 It counts the number of frames to scan up the stack.
5927 The value is the address of that frame. */
5928 case BUILT_IN_RETURN_ADDRESS:
5929 /* The argument must be a nonnegative integer constant.
5930 It counts the number of frames to scan up the stack.
5931 The value is the return address saved in that frame. */
5932 if (arglist == 0)
5933 /* Warning about missing arg was already issued. */
5934 return const0_rtx;
5935 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5936 {
5937 error ("invalid arg to `__builtin_return_address'");
5938 return const0_rtx;
5939 }
5940 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5941 {
5942 error ("invalid arg to `__builtin_return_address'");
5943 return const0_rtx;
5944 }
5945 else
5946 {
5947 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5948 rtx tem = frame_pointer_rtx;
5949 int i;
5950
5951 /* Some machines need special handling before we can access arbitrary
5952 frames. For example, on the sparc, we must first flush all
5953 register windows to the stack. */
5954#ifdef SETUP_FRAME_ADDRESSES
5955 SETUP_FRAME_ADDRESSES ();
5956#endif
5957
5958 /* On the sparc, the return address is not in the frame, it is
5959 in a register. There is no way to access it off of the current
5960 frame pointer, but it can be accessed off the previous frame
5961 pointer by reading the value from the register window save
5962 area. */
5963#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
5964 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
5965 count--;
5966#endif
5967
5968 /* Scan back COUNT frames to the specified frame. */
5969 for (i = 0; i < count; i++)
5970 {
5971 /* Assume the dynamic chain pointer is in the word that
5972 the frame address points to, unless otherwise specified. */
5973#ifdef DYNAMIC_CHAIN_ADDRESS
5974 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5975#endif
5976 tem = memory_address (Pmode, tem);
5977 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5978 }
5979
5980 /* For __builtin_frame_address, return what we've got. */
5981 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5982 return tem;
5983
5984 /* For __builtin_return_address,
5985 Get the return address from that frame. */
5986#ifdef RETURN_ADDR_RTX
5987 return RETURN_ADDR_RTX (count, tem);
5988#else
5989 tem = memory_address (Pmode,
5990 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5991 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5992#endif
5993 }
5994
5995 case BUILT_IN_ALLOCA:
5996 if (arglist == 0
5997 /* Arg could be non-integer if user redeclared this fcn wrong. */
5998 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5999 return const0_rtx;
6000 current_function_calls_alloca = 1;
6001 /* Compute the argument. */
6002 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
6003
6004 /* Allocate the desired space. */
6005 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
6006
6007 /* Record the new stack level for nonlocal gotos. */
6008 if (nonlocal_goto_handler_slot != 0)
6009 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
6010 return target;
6011
6012 case BUILT_IN_FFS:
6013 /* If not optimizing, call the library function. */
6014 if (!optimize)
6015 break;
6016
6017 if (arglist == 0
6018 /* Arg could be non-integer if user redeclared this fcn wrong. */
6019 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6020 return const0_rtx;
6021
6022 /* Compute the argument. */
6023 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6024 /* Compute ffs, into TARGET if possible.
6025 Set TARGET to wherever the result comes back. */
6026 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6027 ffs_optab, op0, target, 1);
6028 if (target == 0)
6029 abort ();
6030 return target;
6031
6032 case BUILT_IN_STRLEN:
6033 /* If not optimizing, call the library function. */
6034 if (!optimize)
6035 break;
6036
6037 if (arglist == 0
6038 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6039 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
6040 return const0_rtx;
6041 else
6042 {
6043 tree src = TREE_VALUE (arglist);
6044 tree len = c_strlen (src);
6045
6046 int align
6047 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6048
6049 rtx result, src_rtx, char_rtx;
6050 enum machine_mode insn_mode = value_mode, char_mode;
6051 enum insn_code icode;
6052
6053 /* If the length is known, just return it. */
6054 if (len != 0)
6055 return expand_expr (len, target, mode, 0);
6056
6057 /* If SRC is not a pointer type, don't do this operation inline. */
6058 if (align == 0)
6059 break;
6060
6061 /* Call a function if we can't compute strlen in the right mode. */
6062
6063 while (insn_mode != VOIDmode)
6064 {
6065 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
6066 if (icode != CODE_FOR_nothing)
6067 break;
6068
6069 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
6070 }
6071 if (insn_mode == VOIDmode)
6072 break;
6073
6074 /* Make a place to write the result of the instruction. */
6075 result = target;
6076 if (! (result != 0
6077 && GET_CODE (result) == REG
6078 && GET_MODE (result) == insn_mode
6079 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6080 result = gen_reg_rtx (insn_mode);
6081
6082 /* Make sure the operands are acceptable to the predicates. */
6083
6084 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
6085 result = gen_reg_rtx (insn_mode);
6086
6087 src_rtx = memory_address (BLKmode,
6088 expand_expr (src, NULL_RTX, Pmode,
6089 EXPAND_NORMAL));
6090 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
6091 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
6092
6093 char_rtx = const0_rtx;
6094 char_mode = insn_operand_mode[(int)icode][2];
6095 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
6096 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
6097
6098 emit_insn (GEN_FCN (icode) (result,
6099 gen_rtx (MEM, BLKmode, src_rtx),
6100 char_rtx, GEN_INT (align)));
6101
6102 /* Return the value in the proper mode for this function. */
6103 if (GET_MODE (result) == value_mode)
6104 return result;
6105 else if (target != 0)
6106 {
6107 convert_move (target, result, 0);
6108 return target;
6109 }
6110 else
6111 return convert_to_mode (value_mode, result, 0);
6112 }
6113
6114 case BUILT_IN_STRCPY:
6115 /* If not optimizing, call the library function. */
6116 if (!optimize)
6117 break;
6118
6119 if (arglist == 0
6120 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6121 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6122 || TREE_CHAIN (arglist) == 0
6123 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6124 return const0_rtx;
6125 else
6126 {
6127 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
6128
6129 if (len == 0)
6130 break;
6131
6132 len = size_binop (PLUS_EXPR, len, integer_one_node);
6133
6134 chainon (arglist, build_tree_list (NULL_TREE, len));
6135 }
6136
6137 /* Drops in. */
6138 case BUILT_IN_MEMCPY:
6139 /* If not optimizing, call the library function. */
6140 if (!optimize)
6141 break;
6142
6143 if (arglist == 0
6144 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6145 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6146 || TREE_CHAIN (arglist) == 0
6147 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6148 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6149 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6150 return const0_rtx;
6151 else
6152 {
6153 tree dest = TREE_VALUE (arglist);
6154 tree src = TREE_VALUE (TREE_CHAIN (arglist));
6155 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6156
6157 int src_align
6158 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6159 int dest_align
6160 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6161 rtx dest_rtx, dest_mem, src_mem;
6162
6163 /* If either SRC or DEST is not a pointer type, don't do
6164 this operation in-line. */
6165 if (src_align == 0 || dest_align == 0)
6166 {
6167 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
6168 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6169 break;
6170 }
6171
6172 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
6173 dest_mem = gen_rtx (MEM, BLKmode,
6174 memory_address (BLKmode, dest_rtx));
6175 src_mem = gen_rtx (MEM, BLKmode,
6176 memory_address (BLKmode,
6177 expand_expr (src, NULL_RTX,
6178 Pmode,
6179 EXPAND_NORMAL)));
6180
6181 /* Copy word part most expediently. */
6182 emit_block_move (dest_mem, src_mem,
6183 expand_expr (len, NULL_RTX, VOIDmode, 0),
6184 MIN (src_align, dest_align));
6185 return dest_rtx;
6186 }
6187
6188/* These comparison functions need an instruction that returns an actual
6189 index. An ordinary compare that just sets the condition codes
6190 is not enough. */
6191#ifdef HAVE_cmpstrsi
6192 case BUILT_IN_STRCMP:
6193 /* If not optimizing, call the library function. */
6194 if (!optimize)
6195 break;
6196
6197 if (arglist == 0
6198 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6199 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6200 || TREE_CHAIN (arglist) == 0
6201 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6202 return const0_rtx;
6203 else if (!HAVE_cmpstrsi)
6204 break;
6205 {
6206 tree arg1 = TREE_VALUE (arglist);
6207 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6208 tree offset;
6209 tree len, len2;
6210
6211 len = c_strlen (arg1);
6212 if (len)
6213 len = size_binop (PLUS_EXPR, integer_one_node, len);
6214 len2 = c_strlen (arg2);
6215 if (len2)
6216 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
6217
6218 /* If we don't have a constant length for the first, use the length
6219 of the second, if we know it. We don't require a constant for
6220 this case; some cost analysis could be done if both are available
6221 but neither is constant. For now, assume they're equally cheap.
6222
6223 If both strings have constant lengths, use the smaller. This
6224 could arise if optimization results in strcpy being called with
6225 two fixed strings, or if the code was machine-generated. We should
6226 add some code to the `memcmp' handler below to deal with such
6227 situations, someday. */
6228 if (!len || TREE_CODE (len) != INTEGER_CST)
6229 {
6230 if (len2)
6231 len = len2;
6232 else if (len == 0)
6233 break;
6234 }
6235 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6236 {
6237 if (tree_int_cst_lt (len2, len))
6238 len = len2;
6239 }
6240
6241 chainon (arglist, build_tree_list (NULL_TREE, len));
6242 }
6243
6244 /* Drops in. */
6245 case BUILT_IN_MEMCMP:
6246 /* If not optimizing, call the library function. */
6247 if (!optimize)
6248 break;
6249
6250 if (arglist == 0
6251 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6252 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6253 || TREE_CHAIN (arglist) == 0
6254 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6255 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6256 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6257 return const0_rtx;
6258 else if (!HAVE_cmpstrsi)
6259 break;
6260 {
6261 tree arg1 = TREE_VALUE (arglist);
6262 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6263 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6264 rtx result;
6265
6266 int arg1_align
6267 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6268 int arg2_align
6269 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6270 enum machine_mode insn_mode
6271 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6272
6273 /* If we don't have POINTER_TYPE, call the function. */
6274 if (arg1_align == 0 || arg2_align == 0)
6275 {
6276 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6277 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6278 break;
6279 }
6280
6281 /* Make a place to write the result of the instruction. */
6282 result = target;
6283 if (! (result != 0
6284 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6285 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6286 result = gen_reg_rtx (insn_mode);
6287
6288 emit_insn (gen_cmpstrsi (result,
6289 gen_rtx (MEM, BLKmode,
6290 expand_expr (arg1, NULL_RTX, Pmode,
6291 EXPAND_NORMAL)),
6292 gen_rtx (MEM, BLKmode,
6293 expand_expr (arg2, NULL_RTX, Pmode,
6294 EXPAND_NORMAL)),
6295 expand_expr (len, NULL_RTX, VOIDmode, 0),
6296 GEN_INT (MIN (arg1_align, arg2_align))));
6297
6298 /* Return the value in the proper mode for this function. */
6299 mode = TYPE_MODE (TREE_TYPE (exp));
6300 if (GET_MODE (result) == mode)
6301 return result;
6302 else if (target != 0)
6303 {
6304 convert_move (target, result, 0);
6305 return target;
6306 }
6307 else
6308 return convert_to_mode (mode, result, 0);
6309 }
6310#else
6311 case BUILT_IN_STRCMP:
6312 case BUILT_IN_MEMCMP:
6313 break;
6314#endif
6315
6316 default: /* just do library call, if unknown builtin */
6317 error ("built-in function `%s' not currently supported",
6318 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6319 }
6320
6321 /* The switch statement above can drop through to cause the function
6322 to be called normally. */
6323
6324 return expand_call (exp, target, ignore);
6325}
6326\f
6327/* Built-in functions to perform an untyped call and return. */
6328
6329/* For each register that may be used for calling a function, this
6330 gives a mode used to copy the register's value. VOIDmode indicates
6331 the register is not used for calling a function. If the machine
6332 has register windows, this gives only the outbound registers.
6333 INCOMING_REGNO gives the corresponding inbound register. */
6334static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
6335
6336/* For each register that may be used for returning values, this gives
6337 a mode used to copy the register's value. VOIDmode indicates the
6338 register is not used for returning values. If the machine has
6339 register windows, this gives only the outbound registers.
6340 INCOMING_REGNO gives the corresponding inbound register. */
6341static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
6342
6343/* Return the size required for the block returned by __builtin_apply_args,
6344 and initialize apply_args_mode. */
6345static int
6346apply_args_size ()
6347{
6348 static int size = -1;
6349 int align, regno;
6350 enum machine_mode mode;
6351
6352 /* The values computed by this function never change. */
6353 if (size < 0)
6354 {
6355 /* The first value is the incoming arg-pointer. */
6356 size = GET_MODE_SIZE (Pmode);
6357
6358 /* The second value is the structure value address unless this is
6359 passed as an "invisible" first argument. */
6360 if (struct_value_rtx)
6361 size += GET_MODE_SIZE (Pmode);
6362
6363 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6364 if (FUNCTION_ARG_REGNO_P (regno))
6365 {
6366 /* Search for the proper mode for copying this register's
6367 value. I'm not sure this is right, but it works so far. */
6368 enum machine_mode best_mode = VOIDmode;
6369
6370 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6371 mode != VOIDmode;
6372 mode = GET_MODE_WIDER_MODE (mode))
6373 if (HARD_REGNO_MODE_OK (regno, mode)
6374 && HARD_REGNO_NREGS (regno, mode) == 1)
6375 best_mode = mode;
6376
6377 if (best_mode == VOIDmode)
6378 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6379 mode != VOIDmode;
6380 mode = GET_MODE_WIDER_MODE (mode))
6381 if (HARD_REGNO_MODE_OK (regno, mode)
6382 && (mov_optab->handlers[(int) mode].insn_code
6383 != CODE_FOR_nothing))
6384 best_mode = mode;
6385
6386 mode = best_mode;
6387 if (mode == VOIDmode)
6388 abort ();
6389
6390 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6391 if (size % align != 0)
6392 size = CEIL (size, align) * align;
6393 size += GET_MODE_SIZE (mode);
6394 apply_args_mode[regno] = mode;
6395 }
6396 else
6397 apply_args_mode[regno] = VOIDmode;
6398 }
6399 return size;
6400}
6401
6402/* Return the size required for the block returned by __builtin_apply,
6403 and initialize apply_result_mode. */
6404static int
6405apply_result_size ()
6406{
6407 static int size = -1;
6408 int align, regno;
6409 enum machine_mode mode;
6410
6411 /* The values computed by this function never change. */
6412 if (size < 0)
6413 {
6414 size = 0;
6415
6416 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6417 if (FUNCTION_VALUE_REGNO_P (regno))
6418 {
6419 /* Search for the proper mode for copying this register's
6420 value. I'm not sure this is right, but it works so far. */
6421 enum machine_mode best_mode = VOIDmode;
6422
6423 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6424 mode != TImode;
6425 mode = GET_MODE_WIDER_MODE (mode))
6426 if (HARD_REGNO_MODE_OK (regno, mode))
6427 best_mode = mode;
6428
6429 if (best_mode == VOIDmode)
6430 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6431 mode != VOIDmode;
6432 mode = GET_MODE_WIDER_MODE (mode))
6433 if (HARD_REGNO_MODE_OK (regno, mode)
6434 && (mov_optab->handlers[(int) mode].insn_code
6435 != CODE_FOR_nothing))
6436 best_mode = mode;
6437
6438 mode = best_mode;
6439 if (mode == VOIDmode)
6440 abort ();
6441
6442 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6443 if (size % align != 0)
6444 size = CEIL (size, align) * align;
6445 size += GET_MODE_SIZE (mode);
6446 apply_result_mode[regno] = mode;
6447 }
6448 else
6449 apply_result_mode[regno] = VOIDmode;
6450
6451 /* Allow targets that use untyped_call and untyped_return to override
6452 the size so that machine-specific information can be stored here. */
6453#ifdef APPLY_RESULT_SIZE
6454 size = APPLY_RESULT_SIZE;
6455#endif
6456 }
6457 return size;
6458}
6459
6460#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
6461/* Create a vector describing the result block RESULT. If SAVEP is true,
6462 the result block is used to save the values; otherwise it is used to
6463 restore the values. */
6464static rtx
6465result_vector (savep, result)
6466 int savep;
6467 rtx result;
6468{
6469 int regno, size, align, nelts;
6470 enum machine_mode mode;
6471 rtx reg, mem;
6472 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
6473
6474 size = nelts = 0;
6475 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6476 if ((mode = apply_result_mode[regno]) != VOIDmode)
6477 {
6478 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6479 if (size % align != 0)
6480 size = CEIL (size, align) * align;
6481 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
6482 mem = change_address (result, mode,
6483 plus_constant (XEXP (result, 0), size));
6484 savevec[nelts++] = (savep
6485 ? gen_rtx (SET, VOIDmode, mem, reg)
6486 : gen_rtx (SET, VOIDmode, reg, mem));
6487 size += GET_MODE_SIZE (mode);
6488 }
6489 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
6490}
6491#endif /* HAVE_untyped_call or HAVE_untyped_return */
6492
6493
6494/* Save the state required to perform an untyped call with the same
6495 arguments as were passed to the current function. */
6496static rtx
6497expand_builtin_apply_args ()
6498{
6499 rtx registers;
6500 int size, align, regno;
6501 enum machine_mode mode;
6502
6503 /* Create a block where the arg-pointer, structure value address,
6504 and argument registers can be saved. */
6505 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
6506
6507 /* Walk past the arg-pointer and structure value address. */
6508 size = GET_MODE_SIZE (Pmode);
6509 if (struct_value_rtx)
6510 size += GET_MODE_SIZE (Pmode);
6511
6512 /* Save each register used in calling a function to the block. */
6513 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6514 if ((mode = apply_args_mode[regno]) != VOIDmode)
6515 {
6516 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6517 if (size % align != 0)
6518 size = CEIL (size, align) * align;
6519 emit_move_insn (change_address (registers, mode,
6520 plus_constant (XEXP (registers, 0),
6521 size)),
6522 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
6523 size += GET_MODE_SIZE (mode);
6524 }
6525
6526 /* Save the arg pointer to the block. */
6527 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
6528 copy_to_reg (virtual_incoming_args_rtx));
6529 size = GET_MODE_SIZE (Pmode);
6530
6531 /* Save the structure value address unless this is passed as an
6532 "invisible" first argument. */
6533 if (struct_value_incoming_rtx)
6534 {
6535 emit_move_insn (change_address (registers, Pmode,
6536 plus_constant (XEXP (registers, 0),
6537 size)),
6538 copy_to_reg (struct_value_incoming_rtx));
6539 size += GET_MODE_SIZE (Pmode);
6540 }
6541
6542 /* Return the address of the block. */
6543 return copy_addr_to_reg (XEXP (registers, 0));
6544}
6545
6546/* Perform an untyped call and save the state required to perform an
6547 untyped return of whatever value was returned by the given function. */
6548static rtx
6549expand_builtin_apply (function, arguments, argsize)
6550 rtx function, arguments, argsize;
6551{
6552 int size, align, regno;
6553 enum machine_mode mode;
6554 rtx incoming_args, result, reg, dest, call_insn;
6555 rtx old_stack_level = 0;
6556 rtx use_insns = 0;
6557
6558 /* Create a block where the return registers can be saved. */
6559 result = assign_stack_local (BLKmode, apply_result_size (), -1);
6560
6561 /* ??? The argsize value should be adjusted here. */
6562
6563 /* Fetch the arg pointer from the ARGUMENTS block. */
6564 incoming_args = gen_reg_rtx (Pmode);
6565 emit_move_insn (incoming_args,
6566 gen_rtx (MEM, Pmode, arguments));
6567#ifndef STACK_GROWS_DOWNWARD
6568 incoming_args = expand_binop (Pmode, add_optab, incoming_args, argsize,
6569 incoming_args, 0, OPTAB_LIB_WIDEN);
6570#endif
6571
6572 /* Perform postincrements before actually calling the function. */
6573 emit_queue ();
6574
6575 /* Push a new argument block and copy the arguments. */
6576 do_pending_stack_adjust ();
6577 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
6578
6579 /* Push a block of memory onto the stack to store the memory arguments.
6580 Save the address in a register, and copy the memory arguments. ??? I
6581 haven't figured out how the calling convention macros effect this,
6582 but it's likely that the source and/or destination addresses in
6583 the block copy will need updating in machine specific ways. */
6584 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
6585 emit_block_move (gen_rtx (MEM, BLKmode, dest),
6586 gen_rtx (MEM, BLKmode, incoming_args),
6587 argsize,
6588 PARM_BOUNDARY / BITS_PER_UNIT);
6589
6590 /* Refer to the argument block. */
6591 apply_args_size ();
6592 arguments = gen_rtx (MEM, BLKmode, arguments);
6593
6594 /* Walk past the arg-pointer and structure value address. */
6595 size = GET_MODE_SIZE (Pmode);
6596 if (struct_value_rtx)
6597 size += GET_MODE_SIZE (Pmode);
6598
6599 /* Restore each of the registers previously saved. Make USE insns
6600 for each of these registers for use in making the call. */
6601 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6602 if ((mode = apply_args_mode[regno]) != VOIDmode)
6603 {
6604 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6605 if (size % align != 0)
6606 size = CEIL (size, align) * align;
6607 reg = gen_rtx (REG, mode, regno);
6608 emit_move_insn (reg,
6609 change_address (arguments, mode,
6610 plus_constant (XEXP (arguments, 0),
6611 size)));
6612
6613 push_to_sequence (use_insns);
6614 emit_insn (gen_rtx (USE, VOIDmode, reg));
6615 use_insns = get_insns ();
6616 end_sequence ();
6617 size += GET_MODE_SIZE (mode);
6618 }
6619
6620 /* Restore the structure value address unless this is passed as an
6621 "invisible" first argument. */
6622 size = GET_MODE_SIZE (Pmode);
6623 if (struct_value_rtx)
6624 {
6625 rtx value = gen_reg_rtx (Pmode);
6626 emit_move_insn (value,
6627 change_address (arguments, Pmode,
6628 plus_constant (XEXP (arguments, 0),
6629 size)));
6630 emit_move_insn (struct_value_rtx, value);
6631 if (GET_CODE (struct_value_rtx) == REG)
6632 {
6633 push_to_sequence (use_insns);
6634 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
6635 use_insns = get_insns ();
6636 end_sequence ();
6637 }
6638 size += GET_MODE_SIZE (Pmode);
6639 }
6640
6641 /* All arguments and registers used for the call are set up by now! */
6642 function = prepare_call_address (function, NULL_TREE, &use_insns);
6643
6644 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
6645 and we don't want to load it into a register as an optimization,
6646 because prepare_call_address already did it if it should be done. */
6647 if (GET_CODE (function) != SYMBOL_REF)
6648 function = memory_address (FUNCTION_MODE, function);
6649
6650 /* Generate the actual call instruction and save the return value. */
6651#ifdef HAVE_untyped_call
6652 if (HAVE_untyped_call)
6653 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
6654 result, result_vector (1, result)));
6655 else
6656#endif
6657#ifdef HAVE_call_value
6658 if (HAVE_call_value)
6659 {
6660 rtx valreg = 0;
6661
6662 /* Locate the unique return register. It is not possible to
6663 express a call that sets more than one return register using
6664 call_value; use untyped_call for that. In fact, untyped_call
6665 only needs to save the return registers in the given block. */
6666 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6667 if ((mode = apply_result_mode[regno]) != VOIDmode)
6668 {
6669 if (valreg)
6670 abort (); /* HAVE_untyped_call required. */
6671 valreg = gen_rtx (REG, mode, regno);
6672 }
6673
6674 emit_call_insn (gen_call_value (valreg,
6675 gen_rtx (MEM, FUNCTION_MODE, function),
6676 const0_rtx, NULL_RTX, const0_rtx));
6677
6678 emit_move_insn (change_address (result, GET_MODE (valreg),
6679 XEXP (result, 0)),
6680 valreg);
6681 }
6682 else
6683#endif
6684 abort ();
6685
6686 /* Find the CALL insn we just emitted and write the USE insns before it. */
6687 for (call_insn = get_last_insn ();
6688 call_insn && GET_CODE (call_insn) != CALL_INSN;
6689 call_insn = PREV_INSN (call_insn))
6690 ;
6691
6692 if (! call_insn)
6693 abort ();
6694
6695 /* Put the USE insns before the CALL. */
6696 emit_insns_before (use_insns, call_insn);
6697
6698 /* Restore the stack. */
6699 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
6700
6701 /* Return the address of the result block. */
6702 return copy_addr_to_reg (XEXP (result, 0));
6703}
6704
6705/* Perform an untyped return. */
6706static void
6707expand_builtin_return (result)
6708 rtx result;
6709{
6710 int size, align, regno;
6711 enum machine_mode mode;
6712 rtx reg;
6713 rtx use_insns = 0;
6714
6715 apply_result_size ();
6716 result = gen_rtx (MEM, BLKmode, result);
6717
6718#ifdef HAVE_untyped_return
6719 if (HAVE_untyped_return)
6720 {
6721 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
6722 emit_barrier ();
6723 return;
6724 }
6725#endif
6726
6727 /* Restore the return value and note that each value is used. */
6728 size = 0;
6729 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6730 if ((mode = apply_result_mode[regno]) != VOIDmode)
6731 {
6732 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6733 if (size % align != 0)
6734 size = CEIL (size, align) * align;
6735 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
6736 emit_move_insn (reg,
6737 change_address (result, mode,
6738 plus_constant (XEXP (result, 0),
6739 size)));
6740
6741 push_to_sequence (use_insns);
6742 emit_insn (gen_rtx (USE, VOIDmode, reg));
6743 use_insns = get_insns ();
6744 end_sequence ();
6745 size += GET_MODE_SIZE (mode);
6746 }
6747
6748 /* Put the USE insns before the return. */
6749 emit_insns (use_insns);
6750
6751 /* Return whatever values was restored by jumping directly to the end
6752 of the function. */
6753 expand_null_return ();
6754}
6755\f
6756/* Expand code for a post- or pre- increment or decrement
6757 and return the RTX for the result.
6758 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6759
6760static rtx
6761expand_increment (exp, post)
6762 register tree exp;
6763 int post;
6764{
6765 register rtx op0, op1;
6766 register rtx temp, value;
6767 register tree incremented = TREE_OPERAND (exp, 0);
6768 optab this_optab = add_optab;
6769 int icode;
6770 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6771 int op0_is_copy = 0;
6772
6773 /* Stabilize any component ref that might need to be
6774 evaluated more than once below. */
6775 if (!post
6776 || TREE_CODE (incremented) == BIT_FIELD_REF
6777 || (TREE_CODE (incremented) == COMPONENT_REF
6778 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6779 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6780 incremented = stabilize_reference (incremented);
6781
6782 /* Compute the operands as RTX.
6783 Note whether OP0 is the actual lvalue or a copy of it:
6784 I believe it is a copy iff it is a register or subreg
6785 and insns were generated in computing it. */
6786
6787 temp = get_last_insn ();
6788 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
6789
6790 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6791 in place but intead must do sign- or zero-extension during assignment,
6792 so we copy it into a new register and let the code below use it as
6793 a copy.
6794
6795 Note that we can safely modify this SUBREG since it is know not to be
6796 shared (it was made by the expand_expr call above). */
6797
6798 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6799 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6800
6801 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6802 && temp != get_last_insn ());
6803 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6804
6805 /* Decide whether incrementing or decrementing. */
6806 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6807 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6808 this_optab = sub_optab;
6809
6810 /* If OP0 is not the actual lvalue, but rather a copy in a register,
6811 then we cannot just increment OP0. We must therefore contrive to
6812 increment the original value. Then, for postincrement, we can return
6813 OP0 since it is a copy of the old value. For preincrement, we want
6814 to always expand here, since this generates better or equivalent code. */
6815 if (!post || op0_is_copy)
6816 {
6817 /* This is the easiest way to increment the value wherever it is.
6818 Problems with multiple evaluation of INCREMENTED are prevented
6819 because either (1) it is a component_ref or preincrement,
6820 in which case it was stabilized above, or (2) it is an array_ref
6821 with constant index in an array in a register, which is
6822 safe to reevaluate. */
6823 tree newexp = build ((this_optab == add_optab
6824 ? PLUS_EXPR : MINUS_EXPR),
6825 TREE_TYPE (exp),
6826 incremented,
6827 TREE_OPERAND (exp, 1));
6828 temp = expand_assignment (incremented, newexp, ! post, 0);
6829 return post ? op0 : temp;
6830 }
6831
6832 /* Convert decrement by a constant into a negative increment. */
6833 if (this_optab == sub_optab
6834 && GET_CODE (op1) == CONST_INT)
6835 {
6836 op1 = GEN_INT (- INTVAL (op1));
6837 this_optab = add_optab;
6838 }
6839
6840 if (post)
6841 {
6842 /* We have a true reference to the value in OP0.
6843 If there is an insn to add or subtract in this mode, queue it. */
6844
6845#if 0 /* Turned off to avoid making extra insn for indexed memref. */
6846 op0 = stabilize (op0);
6847#endif
6848
6849 icode = (int) this_optab->handlers[(int) mode].insn_code;
6850 if (icode != (int) CODE_FOR_nothing
6851 /* Make sure that OP0 is valid for operands 0 and 1
6852 of the insn we want to queue. */
6853 && (*insn_operand_predicate[icode][0]) (op0, mode)
6854 && (*insn_operand_predicate[icode][1]) (op0, mode))
6855 {
6856 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6857 op1 = force_reg (mode, op1);
6858
6859 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6860 }
6861 }
6862
6863 /* Preincrement, or we can't increment with one simple insn. */
6864 if (post)
6865 /* Save a copy of the value before inc or dec, to return it later. */
6866 temp = value = copy_to_reg (op0);
6867 else
6868 /* Arrange to return the incremented value. */
6869 /* Copy the rtx because expand_binop will protect from the queue,
6870 and the results of that would be invalid for us to return
6871 if our caller does emit_queue before using our result. */
6872 temp = copy_rtx (value = op0);
6873
6874 /* Increment however we can. */
6875 op1 = expand_binop (mode, this_optab, value, op1, op0,
6876 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6877 /* Make sure the value is stored into OP0. */
6878 if (op1 != op0)
6879 emit_move_insn (op0, op1);
6880
6881 return temp;
6882}
6883\f
6884/* Expand all function calls contained within EXP, innermost ones first.
6885 But don't look within expressions that have sequence points.
6886 For each CALL_EXPR, record the rtx for its value
6887 in the CALL_EXPR_RTL field. */
6888
6889static void
6890preexpand_calls (exp)
6891 tree exp;
6892{
6893 register int nops, i;
6894 int type = TREE_CODE_CLASS (TREE_CODE (exp));
6895
6896 if (! do_preexpand_calls)
6897 return;
6898
6899 /* Only expressions and references can contain calls. */
6900
6901 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
6902 return;
6903
6904 switch (TREE_CODE (exp))
6905 {
6906 case CALL_EXPR:
6907 /* Do nothing if already expanded. */
6908 if (CALL_EXPR_RTL (exp) != 0)
6909 return;
6910
6911 /* Do nothing to built-in functions. */
6912 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
6913 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
6914 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6915 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
6916 return;
6917
6918 case COMPOUND_EXPR:
6919 case COND_EXPR:
6920 case TRUTH_ANDIF_EXPR:
6921 case TRUTH_ORIF_EXPR:
6922 /* If we find one of these, then we can be sure
6923 the adjust will be done for it (since it makes jumps).
6924 Do it now, so that if this is inside an argument
6925 of a function, we don't get the stack adjustment
6926 after some other args have already been pushed. */
6927 do_pending_stack_adjust ();
6928 return;
6929
6930 case BLOCK:
6931 case RTL_EXPR:
6932 case WITH_CLEANUP_EXPR:
6933 return;
6934
6935 case SAVE_EXPR:
6936 if (SAVE_EXPR_RTL (exp) != 0)
6937 return;
6938 }
6939
6940 nops = tree_code_length[(int) TREE_CODE (exp)];
6941 for (i = 0; i < nops; i++)
6942 if (TREE_OPERAND (exp, i) != 0)
6943 {
6944 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
6945 if (type == 'e' || type == '<' || type == '1' || type == '2'
6946 || type == 'r')
6947 preexpand_calls (TREE_OPERAND (exp, i));
6948 }
6949}
6950\f
6951/* At the start of a function, record that we have no previously-pushed
6952 arguments waiting to be popped. */
6953
6954void
6955init_pending_stack_adjust ()
6956{
6957 pending_stack_adjust = 0;
6958}
6959
6960/* When exiting from function, if safe, clear out any pending stack adjust
6961 so the adjustment won't get done. */
6962
6963void
6964clear_pending_stack_adjust ()
6965{
6966#ifdef EXIT_IGNORE_STACK
6967 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
6968 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
6969 && ! flag_inline_functions)
6970 pending_stack_adjust = 0;
6971#endif
6972}
6973
6974/* Pop any previously-pushed arguments that have not been popped yet. */
6975
6976void
6977do_pending_stack_adjust ()
6978{
6979 if (inhibit_defer_pop == 0)
6980 {
6981 if (pending_stack_adjust != 0)
6982 adjust_stack (GEN_INT (pending_stack_adjust));
6983 pending_stack_adjust = 0;
6984 }
6985}
6986
6987/* Expand all cleanups up to OLD_CLEANUPS.
6988 Needed here, and also for language-dependent calls. */
6989
6990void
6991expand_cleanups_to (old_cleanups)
6992 tree old_cleanups;
6993{
6994 while (cleanups_this_call != old_cleanups)
6995 {
6996 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
6997 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
6998 }
6999}
7000\f
7001/* Expand conditional expressions. */
7002
7003/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
7004 LABEL is an rtx of code CODE_LABEL, in this function and all the
7005 functions here. */
7006
7007void
7008jumpifnot (exp, label)
7009 tree exp;
7010 rtx label;
7011{
7012 do_jump (exp, label, NULL_RTX);
7013}
7014
7015/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
7016
7017void
7018jumpif (exp, label)
7019 tree exp;
7020 rtx label;
7021{
7022 do_jump (exp, NULL_RTX, label);
7023}
7024
7025/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
7026 the result is zero, or IF_TRUE_LABEL if the result is one.
7027 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
7028 meaning fall through in that case.
7029
7030 do_jump always does any pending stack adjust except when it does not
7031 actually perform a jump. An example where there is no jump
7032 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
7033
7034 This function is responsible for optimizing cases such as
7035 &&, || and comparison operators in EXP. */
7036
7037void
7038do_jump (exp, if_false_label, if_true_label)
7039 tree exp;
7040 rtx if_false_label, if_true_label;
7041{
7042 register enum tree_code code = TREE_CODE (exp);
7043 /* Some cases need to create a label to jump to
7044 in order to properly fall through.
7045 These cases set DROP_THROUGH_LABEL nonzero. */
7046 rtx drop_through_label = 0;
7047 rtx temp;
7048 rtx comparison = 0;
7049 int i;
7050 tree type;
7051
7052 emit_queue ();
7053
7054 switch (code)
7055 {
7056 case ERROR_MARK:
7057 break;
7058
7059 case INTEGER_CST:
7060 temp = integer_zerop (exp) ? if_false_label : if_true_label;
7061 if (temp)
7062 emit_jump (temp);
7063 break;
7064
7065#if 0
7066 /* This is not true with #pragma weak */
7067 case ADDR_EXPR:
7068 /* The address of something can never be zero. */
7069 if (if_true_label)
7070 emit_jump (if_true_label);
7071 break;
7072#endif
7073
7074 case NOP_EXPR:
7075 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
7076 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
7077 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
7078 goto normal;
7079 case CONVERT_EXPR:
7080 /* If we are narrowing the operand, we have to do the compare in the
7081 narrower mode. */
7082 if ((TYPE_PRECISION (TREE_TYPE (exp))
7083 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7084 goto normal;
7085 case NON_LVALUE_EXPR:
7086 case REFERENCE_EXPR:
7087 case ABS_EXPR:
7088 case NEGATE_EXPR:
7089 case LROTATE_EXPR:
7090 case RROTATE_EXPR:
7091 /* These cannot change zero->non-zero or vice versa. */
7092 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7093 break;
7094
7095#if 0
7096 /* This is never less insns than evaluating the PLUS_EXPR followed by
7097 a test and can be longer if the test is eliminated. */
7098 case PLUS_EXPR:
7099 /* Reduce to minus. */
7100 exp = build (MINUS_EXPR, TREE_TYPE (exp),
7101 TREE_OPERAND (exp, 0),
7102 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
7103 TREE_OPERAND (exp, 1))));
7104 /* Process as MINUS. */
7105#endif
7106
7107 case MINUS_EXPR:
7108 /* Non-zero iff operands of minus differ. */
7109 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
7110 TREE_OPERAND (exp, 0),
7111 TREE_OPERAND (exp, 1)),
7112 NE, NE);
7113 break;
7114
7115 case BIT_AND_EXPR:
7116 /* If we are AND'ing with a small constant, do this comparison in the
7117 smallest type that fits. If the machine doesn't have comparisons
7118 that small, it will be converted back to the wider comparison.
7119 This helps if we are testing the sign bit of a narrower object.
7120 combine can't do this for us because it can't know whether a
7121 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
7122
7123 if (! SLOW_BYTE_ACCESS
7124 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7125 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
7126 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
7127 && (type = type_for_size (i + 1, 1)) != 0
7128 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7129 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7130 != CODE_FOR_nothing))
7131 {
7132 do_jump (convert (type, exp), if_false_label, if_true_label);
7133 break;
7134 }
7135 goto normal;
7136
7137 case TRUTH_NOT_EXPR:
7138 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7139 break;
7140
7141 case TRUTH_ANDIF_EXPR:
7142 if (if_false_label == 0)
7143 if_false_label = drop_through_label = gen_label_rtx ();
7144 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
7145 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7146 break;
7147
7148 case TRUTH_ORIF_EXPR:
7149 if (if_true_label == 0)
7150 if_true_label = drop_through_label = gen_label_rtx ();
7151 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
7152 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7153 break;
7154
7155 case COMPOUND_EXPR:
7156 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7157 free_temp_slots ();
7158 emit_queue ();
7159 do_pending_stack_adjust ();
7160 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7161 break;
7162
7163 case COMPONENT_REF:
7164 case BIT_FIELD_REF:
7165 case ARRAY_REF:
7166 {
7167 int bitsize, bitpos, unsignedp;
7168 enum machine_mode mode;
7169 tree type;
7170 tree offset;
7171 int volatilep = 0;
7172
7173 /* Get description of this reference. We don't actually care
7174 about the underlying object here. */
7175 get_inner_reference (exp, &bitsize, &bitpos, &offset,
7176 &mode, &unsignedp, &volatilep);
7177
7178 type = type_for_size (bitsize, unsignedp);
7179 if (! SLOW_BYTE_ACCESS
7180 && type != 0 && bitsize >= 0
7181 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7182 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7183 != CODE_FOR_nothing))
7184 {
7185 do_jump (convert (type, exp), if_false_label, if_true_label);
7186 break;
7187 }
7188 goto normal;
7189 }
7190
7191 case COND_EXPR:
7192 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
7193 if (integer_onep (TREE_OPERAND (exp, 1))
7194 && integer_zerop (TREE_OPERAND (exp, 2)))
7195 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7196
7197 else if (integer_zerop (TREE_OPERAND (exp, 1))
7198 && integer_onep (TREE_OPERAND (exp, 2)))
7199 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7200
7201 else
7202 {
7203 register rtx label1 = gen_label_rtx ();
7204 drop_through_label = gen_label_rtx ();
7205 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
7206 /* Now the THEN-expression. */
7207 do_jump (TREE_OPERAND (exp, 1),
7208 if_false_label ? if_false_label : drop_through_label,
7209 if_true_label ? if_true_label : drop_through_label);
7210 /* In case the do_jump just above never jumps. */
7211 do_pending_stack_adjust ();
7212 emit_label (label1);
7213 /* Now the ELSE-expression. */
7214 do_jump (TREE_OPERAND (exp, 2),
7215 if_false_label ? if_false_label : drop_through_label,
7216 if_true_label ? if_true_label : drop_through_label);
7217 }
7218 break;
7219
7220 case EQ_EXPR:
7221 if (integer_zerop (TREE_OPERAND (exp, 1)))
7222 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7223 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7224 == MODE_INT)
7225 &&
7226 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7227 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
7228 else
7229 comparison = compare (exp, EQ, EQ);
7230 break;
7231
7232 case NE_EXPR:
7233 if (integer_zerop (TREE_OPERAND (exp, 1)))
7234 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7235 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7236 == MODE_INT)
7237 &&
7238 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7239 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
7240 else
7241 comparison = compare (exp, NE, NE);
7242 break;
7243
7244 case LT_EXPR:
7245 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7246 == MODE_INT)
7247 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7248 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
7249 else
7250 comparison = compare (exp, LT, LTU);
7251 break;
7252
7253 case LE_EXPR:
7254 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7255 == MODE_INT)
7256 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7257 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
7258 else
7259 comparison = compare (exp, LE, LEU);
7260 break;
7261
7262 case GT_EXPR:
7263 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7264 == MODE_INT)
7265 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7266 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
7267 else
7268 comparison = compare (exp, GT, GTU);
7269 break;
7270
7271 case GE_EXPR:
7272 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7273 == MODE_INT)
7274 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7275 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
7276 else
7277 comparison = compare (exp, GE, GEU);
7278 break;
7279
7280 default:
7281 normal:
7282 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
7283#if 0
7284 /* This is not needed any more and causes poor code since it causes
7285 comparisons and tests from non-SI objects to have different code
7286 sequences. */
7287 /* Copy to register to avoid generating bad insns by cse
7288 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
7289 if (!cse_not_expected && GET_CODE (temp) == MEM)
7290 temp = copy_to_reg (temp);
7291#endif
7292 do_pending_stack_adjust ();
7293 if (GET_CODE (temp) == CONST_INT)
7294 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
7295 else if (GET_CODE (temp) == LABEL_REF)
7296 comparison = const_true_rtx;
7297 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
7298 && !can_compare_p (GET_MODE (temp)))
7299 /* Note swapping the labels gives us not-equal. */
7300 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
7301 else if (GET_MODE (temp) != VOIDmode)
7302 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
7303 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
7304 GET_MODE (temp), NULL_RTX, 0);
7305 else
7306 abort ();
7307 }
7308
7309 /* Do any postincrements in the expression that was tested. */
7310 emit_queue ();
7311
7312 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
7313 straight into a conditional jump instruction as the jump condition.
7314 Otherwise, all the work has been done already. */
7315
7316 if (comparison == const_true_rtx)
7317 {
7318 if (if_true_label)
7319 emit_jump (if_true_label);
7320 }
7321 else if (comparison == const0_rtx)
7322 {
7323 if (if_false_label)
7324 emit_jump (if_false_label);
7325 }
7326 else if (comparison)
7327 do_jump_for_compare (comparison, if_false_label, if_true_label);
7328
7329 free_temp_slots ();
7330
7331 if (drop_through_label)
7332 {
7333 /* If do_jump produces code that might be jumped around,
7334 do any stack adjusts from that code, before the place
7335 where control merges in. */
7336 do_pending_stack_adjust ();
7337 emit_label (drop_through_label);
7338 }
7339}
7340\f
7341/* Given a comparison expression EXP for values too wide to be compared
7342 with one insn, test the comparison and jump to the appropriate label.
7343 The code of EXP is ignored; we always test GT if SWAP is 0,
7344 and LT if SWAP is 1. */
7345
7346static void
7347do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
7348 tree exp;
7349 int swap;
7350 rtx if_false_label, if_true_label;
7351{
7352 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
7353 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
7354 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7355 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7356 rtx drop_through_label = 0;
7357 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
7358 int i;
7359
7360 if (! if_true_label || ! if_false_label)
7361 drop_through_label = gen_label_rtx ();
7362 if (! if_true_label)
7363 if_true_label = drop_through_label;
7364 if (! if_false_label)
7365 if_false_label = drop_through_label;
7366
7367 /* Compare a word at a time, high order first. */
7368 for (i = 0; i < nwords; i++)
7369 {
7370 rtx comp;
7371 rtx op0_word, op1_word;
7372
7373 if (WORDS_BIG_ENDIAN)
7374 {
7375 op0_word = operand_subword_force (op0, i, mode);
7376 op1_word = operand_subword_force (op1, i, mode);
7377 }
7378 else
7379 {
7380 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7381 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7382 }
7383
7384 /* All but high-order word must be compared as unsigned. */
7385 comp = compare_from_rtx (op0_word, op1_word,
7386 (unsignedp || i > 0) ? GTU : GT,
7387 unsignedp, word_mode, NULL_RTX, 0);
7388 if (comp == const_true_rtx)
7389 emit_jump (if_true_label);
7390 else if (comp != const0_rtx)
7391 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7392
7393 /* Consider lower words only if these are equal. */
7394 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7395 NULL_RTX, 0);
7396 if (comp == const_true_rtx)
7397 emit_jump (if_false_label);
7398 else if (comp != const0_rtx)
7399 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7400 }
7401
7402 if (if_false_label)
7403 emit_jump (if_false_label);
7404 if (drop_through_label)
7405 emit_label (drop_through_label);
7406}
7407
7408/* Compare OP0 with OP1, word at a time, in mode MODE.
7409 UNSIGNEDP says to do unsigned comparison.
7410 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
7411
7412static void
7413do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
7414 enum machine_mode mode;
7415 int unsignedp;
7416 rtx op0, op1;
7417 rtx if_false_label, if_true_label;
7418{
7419 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7420 rtx drop_through_label = 0;
7421 int i;
7422
7423 if (! if_true_label || ! if_false_label)
7424 drop_through_label = gen_label_rtx ();
7425 if (! if_true_label)
7426 if_true_label = drop_through_label;
7427 if (! if_false_label)
7428 if_false_label = drop_through_label;
7429
7430 /* Compare a word at a time, high order first. */
7431 for (i = 0; i < nwords; i++)
7432 {
7433 rtx comp;
7434 rtx op0_word, op1_word;
7435
7436 if (WORDS_BIG_ENDIAN)
7437 {
7438 op0_word = operand_subword_force (op0, i, mode);
7439 op1_word = operand_subword_force (op1, i, mode);
7440 }
7441 else
7442 {
7443 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7444 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7445 }
7446
7447 /* All but high-order word must be compared as unsigned. */
7448 comp = compare_from_rtx (op0_word, op1_word,
7449 (unsignedp || i > 0) ? GTU : GT,
7450 unsignedp, word_mode, NULL_RTX, 0);
7451 if (comp == const_true_rtx)
7452 emit_jump (if_true_label);
7453 else if (comp != const0_rtx)
7454 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7455
7456 /* Consider lower words only if these are equal. */
7457 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7458 NULL_RTX, 0);
7459 if (comp == const_true_rtx)
7460 emit_jump (if_false_label);
7461 else if (comp != const0_rtx)
7462 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7463 }
7464
7465 if (if_false_label)
7466 emit_jump (if_false_label);
7467 if (drop_through_label)
7468 emit_label (drop_through_label);
7469}
7470
7471/* Given an EQ_EXPR expression EXP for values too wide to be compared
7472 with one insn, test the comparison and jump to the appropriate label. */
7473
7474static void
7475do_jump_by_parts_equality (exp, if_false_label, if_true_label)
7476 tree exp;
7477 rtx if_false_label, if_true_label;
7478{
7479 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7480 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7481 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7482 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7483 int i;
7484 rtx drop_through_label = 0;
7485
7486 if (! if_false_label)
7487 drop_through_label = if_false_label = gen_label_rtx ();
7488
7489 for (i = 0; i < nwords; i++)
7490 {
7491 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
7492 operand_subword_force (op1, i, mode),
7493 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
7494 word_mode, NULL_RTX, 0);
7495 if (comp == const_true_rtx)
7496 emit_jump (if_false_label);
7497 else if (comp != const0_rtx)
7498 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7499 }
7500
7501 if (if_true_label)
7502 emit_jump (if_true_label);
7503 if (drop_through_label)
7504 emit_label (drop_through_label);
7505}
7506\f
7507/* Jump according to whether OP0 is 0.
7508 We assume that OP0 has an integer mode that is too wide
7509 for the available compare insns. */
7510
7511static void
7512do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
7513 rtx op0;
7514 rtx if_false_label, if_true_label;
7515{
7516 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
7517 int i;
7518 rtx drop_through_label = 0;
7519
7520 if (! if_false_label)
7521 drop_through_label = if_false_label = gen_label_rtx ();
7522
7523 for (i = 0; i < nwords; i++)
7524 {
7525 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
7526 GET_MODE (op0)),
7527 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
7528 if (comp == const_true_rtx)
7529 emit_jump (if_false_label);
7530 else if (comp != const0_rtx)
7531 do_jump_for_compare (comp, if_false_label, NULL_RTX);
7532 }
7533
7534 if (if_true_label)
7535 emit_jump (if_true_label);
7536 if (drop_through_label)
7537 emit_label (drop_through_label);
7538}
7539
7540/* Given a comparison expression in rtl form, output conditional branches to
7541 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
7542
7543static void
7544do_jump_for_compare (comparison, if_false_label, if_true_label)
7545 rtx comparison, if_false_label, if_true_label;
7546{
7547 if (if_true_label)
7548 {
7549 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7550 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
7551 else
7552 abort ();
7553
7554 if (if_false_label)
7555 emit_jump (if_false_label);
7556 }
7557 else if (if_false_label)
7558 {
7559 rtx insn;
7560 rtx prev = PREV_INSN (get_last_insn ());
7561 rtx branch = 0;
7562
7563 /* Output the branch with the opposite condition. Then try to invert
7564 what is generated. If more than one insn is a branch, or if the
7565 branch is not the last insn written, abort. If we can't invert
7566 the branch, emit make a true label, redirect this jump to that,
7567 emit a jump to the false label and define the true label. */
7568
7569 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7570 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
7571 else
7572 abort ();
7573
7574 /* Here we get the insn before what was just emitted.
7575 On some machines, emitting the branch can discard
7576 the previous compare insn and emit a replacement. */
7577 if (prev == 0)
7578 /* If there's only one preceding insn... */
7579 insn = get_insns ();
7580 else
7581 insn = NEXT_INSN (prev);
7582
7583 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
7584 if (GET_CODE (insn) == JUMP_INSN)
7585 {
7586 if (branch)
7587 abort ();
7588 branch = insn;
7589 }
7590
7591 if (branch != get_last_insn ())
7592 abort ();
7593
7594 if (! invert_jump (branch, if_false_label))
7595 {
7596 if_true_label = gen_label_rtx ();
7597 redirect_jump (branch, if_true_label);
7598 emit_jump (if_false_label);
7599 emit_label (if_true_label);
7600 }
7601 }
7602}
7603\f
7604/* Generate code for a comparison expression EXP
7605 (including code to compute the values to be compared)
7606 and set (CC0) according to the result.
7607 SIGNED_CODE should be the rtx operation for this comparison for
7608 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
7609
7610 We force a stack adjustment unless there are currently
7611 things pushed on the stack that aren't yet used. */
7612
7613static rtx
7614compare (exp, signed_code, unsigned_code)
7615 register tree exp;
7616 enum rtx_code signed_code, unsigned_code;
7617{
7618 register rtx op0
7619 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7620 register rtx op1
7621 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7622 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
7623 register enum machine_mode mode = TYPE_MODE (type);
7624 int unsignedp = TREE_UNSIGNED (type);
7625 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
7626
7627 return compare_from_rtx (op0, op1, code, unsignedp, mode,
7628 ((mode == BLKmode)
7629 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
7630 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
7631}
7632
7633/* Like compare but expects the values to compare as two rtx's.
7634 The decision as to signed or unsigned comparison must be made by the caller.
7635
7636 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
7637 compared.
7638
7639 If ALIGN is non-zero, it is the alignment of this type; if zero, the
7640 size of MODE should be used. */
7641
7642rtx
7643compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
7644 register rtx op0, op1;
7645 enum rtx_code code;
7646 int unsignedp;
7647 enum machine_mode mode;
7648 rtx size;
7649 int align;
7650{
7651 rtx tem;
7652
7653 /* If one operand is constant, make it the second one. Only do this
7654 if the other operand is not constant as well. */
7655
7656 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
7657 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
7658 {
7659 tem = op0;
7660 op0 = op1;
7661 op1 = tem;
7662 code = swap_condition (code);
7663 }
7664
7665 if (flag_force_mem)
7666 {
7667 op0 = force_not_mem (op0);
7668 op1 = force_not_mem (op1);
7669 }
7670
7671 do_pending_stack_adjust ();
7672
7673 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
7674 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
7675 return tem;
7676
7677#if 0
7678 /* There's no need to do this now that combine.c can eliminate lots of
7679 sign extensions. This can be less efficient in certain cases on other
7680 machines. */
7681
7682 /* If this is a signed equality comparison, we can do it as an
7683 unsigned comparison since zero-extension is cheaper than sign
7684 extension and comparisons with zero are done as unsigned. This is
7685 the case even on machines that can do fast sign extension, since
7686 zero-extension is easier to combine with other operations than
7687 sign-extension is. If we are comparing against a constant, we must
7688 convert it to what it would look like unsigned. */
7689 if ((code == EQ || code == NE) && ! unsignedp
7690 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
7691 {
7692 if (GET_CODE (op1) == CONST_INT
7693 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
7694 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
7695 unsignedp = 1;
7696 }
7697#endif
7698
7699 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
7700
7701 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
7702}
7703\f
7704/* Generate code to calculate EXP using a store-flag instruction
7705 and return an rtx for the result. EXP is either a comparison
7706 or a TRUTH_NOT_EXPR whose operand is a comparison.
7707
7708 If TARGET is nonzero, store the result there if convenient.
7709
7710 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
7711 cheap.
7712
7713 Return zero if there is no suitable set-flag instruction
7714 available on this machine.
7715
7716 Once expand_expr has been called on the arguments of the comparison,
7717 we are committed to doing the store flag, since it is not safe to
7718 re-evaluate the expression. We emit the store-flag insn by calling
7719 emit_store_flag, but only expand the arguments if we have a reason
7720 to believe that emit_store_flag will be successful. If we think that
7721 it will, but it isn't, we have to simulate the store-flag with a
7722 set/jump/set sequence. */
7723
7724static rtx
7725do_store_flag (exp, target, mode, only_cheap)
7726 tree exp;
7727 rtx target;
7728 enum machine_mode mode;
7729 int only_cheap;
7730{
7731 enum rtx_code code;
7732 tree arg0, arg1, type;
7733 tree tem;
7734 enum machine_mode operand_mode;
7735 int invert = 0;
7736 int unsignedp;
7737 rtx op0, op1;
7738 enum insn_code icode;
7739 rtx subtarget = target;
7740 rtx result, label, pattern, jump_pat;
7741
7742 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7743 result at the end. We can't simply invert the test since it would
7744 have already been inverted if it were valid. This case occurs for
7745 some floating-point comparisons. */
7746
7747 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7748 invert = 1, exp = TREE_OPERAND (exp, 0);
7749
7750 arg0 = TREE_OPERAND (exp, 0);
7751 arg1 = TREE_OPERAND (exp, 1);
7752 type = TREE_TYPE (arg0);
7753 operand_mode = TYPE_MODE (type);
7754 unsignedp = TREE_UNSIGNED (type);
7755
7756 /* We won't bother with BLKmode store-flag operations because it would mean
7757 passing a lot of information to emit_store_flag. */
7758 if (operand_mode == BLKmode)
7759 return 0;
7760
7761 STRIP_NOPS (arg0);
7762 STRIP_NOPS (arg1);
7763
7764 /* Get the rtx comparison code to use. We know that EXP is a comparison
7765 operation of some type. Some comparisons against 1 and -1 can be
7766 converted to comparisons with zero. Do so here so that the tests
7767 below will be aware that we have a comparison with zero. These
7768 tests will not catch constants in the first operand, but constants
7769 are rarely passed as the first operand. */
7770
7771 switch (TREE_CODE (exp))
7772 {
7773 case EQ_EXPR:
7774 code = EQ;
7775 break;
7776 case NE_EXPR:
7777 code = NE;
7778 break;
7779 case LT_EXPR:
7780 if (integer_onep (arg1))
7781 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7782 else
7783 code = unsignedp ? LTU : LT;
7784 break;
7785 case LE_EXPR:
7786 if (! unsignedp && integer_all_onesp (arg1))
7787 arg1 = integer_zero_node, code = LT;
7788 else
7789 code = unsignedp ? LEU : LE;
7790 break;
7791 case GT_EXPR:
7792 if (! unsignedp && integer_all_onesp (arg1))
7793 arg1 = integer_zero_node, code = GE;
7794 else
7795 code = unsignedp ? GTU : GT;
7796 break;
7797 case GE_EXPR:
7798 if (integer_onep (arg1))
7799 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7800 else
7801 code = unsignedp ? GEU : GE;
7802 break;
7803 default:
7804 abort ();
7805 }
7806
7807 /* Put a constant second. */
7808 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7809 {
7810 tem = arg0; arg0 = arg1; arg1 = tem;
7811 code = swap_condition (code);
7812 }
7813
7814 /* If this is an equality or inequality test of a single bit, we can
7815 do this by shifting the bit being tested to the low-order bit and
7816 masking the result with the constant 1. If the condition was EQ,
7817 we xor it with 1. This does not require an scc insn and is faster
7818 than an scc insn even if we have it. */
7819
7820 if ((code == NE || code == EQ)
7821 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7822 && integer_pow2p (TREE_OPERAND (arg0, 1))
7823 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
7824 {
7825 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
7826 NULL_RTX, VOIDmode, 0)));
7827
7828 if (subtarget == 0 || GET_CODE (subtarget) != REG
7829 || GET_MODE (subtarget) != operand_mode
7830 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
7831 subtarget = 0;
7832
7833 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
7834
7835 if (bitnum != 0)
7836 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
7837 size_int (bitnum), target, 1);
7838
7839 if (GET_MODE (op0) != mode)
7840 op0 = convert_to_mode (mode, op0, 1);
7841
7842 if (bitnum != TYPE_PRECISION (type) - 1)
7843 op0 = expand_and (op0, const1_rtx, target);
7844
7845 if ((code == EQ && ! invert) || (code == NE && invert))
7846 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
7847 OPTAB_LIB_WIDEN);
7848
7849 return op0;
7850 }
7851
7852 /* Now see if we are likely to be able to do this. Return if not. */
7853 if (! can_compare_p (operand_mode))
7854 return 0;
7855 icode = setcc_gen_code[(int) code];
7856 if (icode == CODE_FOR_nothing
7857 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7858 {
7859 /* We can only do this if it is one of the special cases that
7860 can be handled without an scc insn. */
7861 if ((code == LT && integer_zerop (arg1))
7862 || (! only_cheap && code == GE && integer_zerop (arg1)))
7863 ;
7864 else if (BRANCH_COST >= 0
7865 && ! only_cheap && (code == NE || code == EQ)
7866 && TREE_CODE (type) != REAL_TYPE
7867 && ((abs_optab->handlers[(int) operand_mode].insn_code
7868 != CODE_FOR_nothing)
7869 || (ffs_optab->handlers[(int) operand_mode].insn_code
7870 != CODE_FOR_nothing)))
7871 ;
7872 else
7873 return 0;
7874 }
7875
7876 preexpand_calls (exp);
7877 if (subtarget == 0 || GET_CODE (subtarget) != REG
7878 || GET_MODE (subtarget) != operand_mode
7879 || ! safe_from_p (subtarget, arg1))
7880 subtarget = 0;
7881
7882 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
7883 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
7884
7885 if (target == 0)
7886 target = gen_reg_rtx (mode);
7887
7888 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
7889 because, if the emit_store_flag does anything it will succeed and
7890 OP0 and OP1 will not be used subsequently. */
7891
7892 result = emit_store_flag (target, code,
7893 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
7894 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
7895 operand_mode, unsignedp, 1);
7896
7897 if (result)
7898 {
7899 if (invert)
7900 result = expand_binop (mode, xor_optab, result, const1_rtx,
7901 result, 0, OPTAB_LIB_WIDEN);
7902 return result;
7903 }
7904
7905 /* If this failed, we have to do this with set/compare/jump/set code. */
7906 if (target == 0 || GET_CODE (target) != REG
7907 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
7908 target = gen_reg_rtx (GET_MODE (target));
7909
7910 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
7911 result = compare_from_rtx (op0, op1, code, unsignedp,
7912 operand_mode, NULL_RTX, 0);
7913 if (GET_CODE (result) == CONST_INT)
7914 return (((result == const0_rtx && ! invert)
7915 || (result != const0_rtx && invert))
7916 ? const0_rtx : const1_rtx);
7917
7918 label = gen_label_rtx ();
7919 if (bcc_gen_fctn[(int) code] == 0)
7920 abort ();
7921
7922 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
7923 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
7924 emit_label (label);
7925
7926 return target;
7927}
7928\f
7929/* Generate a tablejump instruction (used for switch statements). */
7930
7931#ifdef HAVE_tablejump
7932
7933/* INDEX is the value being switched on, with the lowest value
7934 in the table already subtracted.
7935 MODE is its expected mode (needed if INDEX is constant).
7936 RANGE is the length of the jump table.
7937 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
7938
7939 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
7940 index value is out of range. */
7941
7942void
7943do_tablejump (index, mode, range, table_label, default_label)
7944 rtx index, range, table_label, default_label;
7945 enum machine_mode mode;
7946{
7947 register rtx temp, vector;
7948
7949 /* Do an unsigned comparison (in the proper mode) between the index
7950 expression and the value which represents the length of the range.
7951 Since we just finished subtracting the lower bound of the range
7952 from the index expression, this comparison allows us to simultaneously
7953 check that the original index expression value is both greater than
7954 or equal to the minimum value of the range and less than or equal to
7955 the maximum value of the range. */
7956
7957 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
7958 emit_jump_insn (gen_bltu (default_label));
7959
7960 /* If index is in range, it must fit in Pmode.
7961 Convert to Pmode so we can index with it. */
7962 if (mode != Pmode)
7963 index = convert_to_mode (Pmode, index, 1);
7964
7965 /* If flag_force_addr were to affect this address
7966 it could interfere with the tricky assumptions made
7967 about addresses that contain label-refs,
7968 which may be valid only very near the tablejump itself. */
7969 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
7970 GET_MODE_SIZE, because this indicates how large insns are. The other
7971 uses should all be Pmode, because they are addresses. This code
7972 could fail if addresses and insns are not the same size. */
7973 index = memory_address_noforce
7974 (CASE_VECTOR_MODE,
7975 gen_rtx (PLUS, Pmode,
7976 gen_rtx (MULT, Pmode, index,
7977 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
7978 gen_rtx (LABEL_REF, Pmode, table_label)));
7979 temp = gen_reg_rtx (CASE_VECTOR_MODE);
7980 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
7981 RTX_UNCHANGING_P (vector) = 1;
7982 convert_move (temp, vector, 0);
7983
7984 emit_jump_insn (gen_tablejump (temp, table_label));
7985
7986#ifndef CASE_VECTOR_PC_RELATIVE
7987 /* If we are generating PIC code or if the table is PC-relative, the
7988 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
7989 if (! flag_pic)
7990 emit_barrier ();
7991#endif
7992}
7993
7994#endif /* HAVE_tablejump */