1/* Convert tree expression to rtl instructions, for GNU compiler.
2   Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3   2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2002110-1301, USA.  */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "rtl.h"
27#include "tree.h"
28#include "flags.h"
29#include "function.h"
30#include "insn-config.h"
31#include "insn-attr.h"
32/* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
33#include "expr.h"
34#include "optabs.h"
35#include "langhooks.h"
36#include "ggc.h"
37
38static bool prefer_and_bit_test (enum machine_mode, int);
39static void do_jump_by_parts_greater (tree, int, rtx, rtx);
40static void do_jump_by_parts_equality (tree, rtx, rtx);
41static void do_compare_and_jump	(tree, enum rtx_code, enum rtx_code, rtx,
42				 rtx);
43
44/* At the start of a function, record that we have no previously-pushed
45   arguments waiting to be popped.  */
46
47void
48init_pending_stack_adjust (void)
49{
50  pending_stack_adjust = 0;
51}
52
53/* Discard any pending stack adjustment.  This avoid relying on the
54   RTL optimizers to remove useless adjustments when we know the
55   stack pointer value is dead.  */
56void
57discard_pending_stack_adjust (void)
58{
59  stack_pointer_delta -= pending_stack_adjust;
60  pending_stack_adjust = 0;
61}
62
63/* When exiting from function, if safe, clear out any pending stack adjust
64   so the adjustment won't get done.
65
66   Note, if the current function calls alloca, then it must have a
67   frame pointer regardless of the value of flag_omit_frame_pointer.  */
68
69void
70clear_pending_stack_adjust (void)
71{
72  if (optimize > 0
73      && (! flag_omit_frame_pointer || current_function_calls_alloca)
74      && EXIT_IGNORE_STACK
75      && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline))
76    discard_pending_stack_adjust ();
77}
78
79/* Pop any previously-pushed arguments that have not been popped yet.  */
80
81void
82do_pending_stack_adjust (void)
83{
84  if (inhibit_defer_pop == 0)
85    {
86      if (pending_stack_adjust != 0)
87        adjust_stack (GEN_INT (pending_stack_adjust));
88      pending_stack_adjust = 0;
89    }
90}
91
92/* Expand conditional expressions.  */
93
94/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
95   LABEL is an rtx of code CODE_LABEL, in this function and all the
96   functions here.  */
97
98void
99jumpifnot (tree exp, rtx label)
100{
101  do_jump (exp, label, NULL_RTX);
102}
103
104/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero.  */
105
106void
107jumpif (tree exp, rtx label)
108{
109  do_jump (exp, NULL_RTX, label);
110}
111
112/* Used internally by prefer_and_bit_test.  */
113
114static GTY(()) rtx and_reg;
115static GTY(()) rtx and_test;
116static GTY(()) rtx shift_test;
117
118/* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
119   where X is an arbitrary register of mode MODE.  Return true if the former
120   is preferred.  */
121
122static bool
123prefer_and_bit_test (enum machine_mode mode, int bitnum)
124{
125  if (and_test == 0)
126    {
127      /* Set up rtxes for the two variations.  Use NULL as a placeholder
128	 for the BITNUM-based constants.  */
129      and_reg = gen_rtx_REG (mode, FIRST_PSEUDO_REGISTER);
130      and_test = gen_rtx_AND (mode, and_reg, NULL);
131      shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
132				const1_rtx);
133    }
134  else
135    {
136      /* Change the mode of the previously-created rtxes.  */
137      PUT_MODE (and_reg, mode);
138      PUT_MODE (and_test, mode);
139      PUT_MODE (shift_test, mode);
140      PUT_MODE (XEXP (shift_test, 0), mode);
141    }
142
143  /* Fill in the integers.  */
144  XEXP (and_test, 1) = GEN_INT ((unsigned HOST_WIDE_INT) 1 << bitnum);
145  XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
146
147  return (rtx_cost (and_test, IF_THEN_ELSE)
148	  <= rtx_cost (shift_test, IF_THEN_ELSE));
149}
150
151/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
152   the result is zero, or IF_TRUE_LABEL if the result is one.
153   Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
154   meaning fall through in that case.
155
156   do_jump always does any pending stack adjust except when it does not
157   actually perform a jump.  An example where there is no jump
158   is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.  */
159
160void
161do_jump (tree exp, rtx if_false_label, rtx if_true_label)
162{
163  enum tree_code code = TREE_CODE (exp);
164  rtx temp;
165  int i;
166  tree type;
167  enum machine_mode mode;
168  rtx drop_through_label = 0;
169
170  switch (code)
171    {
172    case ERROR_MARK:
173      break;
174
175    case INTEGER_CST:
176      temp = integer_zerop (exp) ? if_false_label : if_true_label;
177      if (temp)
178        emit_jump (temp);
179      break;
180
181#if 0
182      /* This is not true with #pragma weak  */
183    case ADDR_EXPR:
184      /* The address of something can never be zero.  */
185      if (if_true_label)
186        emit_jump (if_true_label);
187      break;
188#endif
189
190    case NOP_EXPR:
191      if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
192          || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
193          || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
194          || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
195        goto normal;
196    case CONVERT_EXPR:
197      /* If we are narrowing the operand, we have to do the compare in the
198         narrower mode.  */
199      if ((TYPE_PRECISION (TREE_TYPE (exp))
200           < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
201        goto normal;
202    case NON_LVALUE_EXPR:
203    case ABS_EXPR:
204    case NEGATE_EXPR:
205    case LROTATE_EXPR:
206    case RROTATE_EXPR:
207      /* These cannot change zero->nonzero or vice versa.  */
208      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
209      break;
210
211    case BIT_AND_EXPR:
212      /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
213	 See if the former is preferred for jump tests and restore it
214	 if so.  */
215      if (integer_onep (TREE_OPERAND (exp, 1)))
216	{
217	  tree exp0 = TREE_OPERAND (exp, 0);
218	  rtx set_label, clr_label;
219
220	  /* Strip narrowing integral type conversions.  */
221	  while ((TREE_CODE (exp0) == NOP_EXPR
222		  || TREE_CODE (exp0) == CONVERT_EXPR
223		  || TREE_CODE (exp0) == NON_LVALUE_EXPR)
224		 && TREE_OPERAND (exp0, 0) != error_mark_node
225		 && TYPE_PRECISION (TREE_TYPE (exp0))
226		    <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
227	    exp0 = TREE_OPERAND (exp0, 0);
228
229	  /* "exp0 ^ 1" inverts the sense of the single bit test.  */
230	  if (TREE_CODE (exp0) == BIT_XOR_EXPR
231	      && integer_onep (TREE_OPERAND (exp0, 1)))
232	    {
233	      exp0 = TREE_OPERAND (exp0, 0);
234	      clr_label = if_true_label;
235	      set_label = if_false_label;
236	    }
237	  else
238	    {
239	      clr_label = if_false_label;
240	      set_label = if_true_label;
241	    }
242
243	  if (TREE_CODE (exp0) == RSHIFT_EXPR)
244	    {
245	      tree arg = TREE_OPERAND (exp0, 0);
246	      tree shift = TREE_OPERAND (exp0, 1);
247	      tree argtype = TREE_TYPE (arg);
248	      if (TREE_CODE (shift) == INTEGER_CST
249		  && compare_tree_int (shift, 0) >= 0
250		  && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
251		  && prefer_and_bit_test (TYPE_MODE (argtype),
252					  TREE_INT_CST_LOW (shift)))
253		{
254		  HOST_WIDE_INT mask = (HOST_WIDE_INT) 1
255				       << TREE_INT_CST_LOW (shift);
256		  do_jump (build2 (BIT_AND_EXPR, argtype, arg,
257				   build_int_cst_type (argtype, mask)),
258			   clr_label, set_label);
259		  break;
260		}
261	    }
262	}
263
264      /* If we are AND'ing with a small constant, do this comparison in the
265         smallest type that fits.  If the machine doesn't have comparisons
266         that small, it will be converted back to the wider comparison.
267         This helps if we are testing the sign bit of a narrower object.
268         combine can't do this for us because it can't know whether a
269         ZERO_EXTRACT or a compare in a smaller mode exists, but we do.  */
270
271      if (! SLOW_BYTE_ACCESS
272          && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
273          && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
274          && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
275          && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
276          && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
277          && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
278          && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
279              != CODE_FOR_nothing))
280        {
281          do_jump (fold_convert (type, exp), if_false_label, if_true_label);
282          break;
283        }
284      goto normal;
285
286    case TRUTH_NOT_EXPR:
287      do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
288      break;
289
290    case COND_EXPR:
291      {
292	rtx label1 = gen_label_rtx ();
293	if (!if_true_label || !if_false_label)
294	  {
295	    drop_through_label = gen_label_rtx ();
296	    if (!if_true_label)
297	      if_true_label = drop_through_label;
298	    if (!if_false_label)
299	      if_false_label = drop_through_label;
300	  }
301
302        do_pending_stack_adjust ();
303        do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
304        do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
305        emit_label (label1);
306        do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label);
307	break;
308      }
309
310    case TRUTH_ANDIF_EXPR:
311    case TRUTH_ORIF_EXPR:
312    case COMPOUND_EXPR:
313      /* Lowered by gimplify.c.  */
314      gcc_unreachable ();
315
316    case COMPONENT_REF:
317    case BIT_FIELD_REF:
318    case ARRAY_REF:
319    case ARRAY_RANGE_REF:
320      {
321        HOST_WIDE_INT bitsize, bitpos;
322        int unsignedp;
323        enum machine_mode mode;
324        tree type;
325        tree offset;
326        int volatilep = 0;
327
328        /* Get description of this reference.  We don't actually care
329           about the underlying object here.  */
330        get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
331                             &unsignedp, &volatilep, false);
332
333        type = lang_hooks.types.type_for_size (bitsize, unsignedp);
334        if (! SLOW_BYTE_ACCESS
335            && type != 0 && bitsize >= 0
336            && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
337            && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
338		!= CODE_FOR_nothing))
339          {
340            do_jump (fold_convert (type, exp), if_false_label, if_true_label);
341            break;
342          }
343        goto normal;
344      }
345
346    case EQ_EXPR:
347      {
348        tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
349
350        gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
351		    != MODE_COMPLEX_FLOAT);
352	gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
353		    != MODE_COMPLEX_INT);
354
355        if (integer_zerop (TREE_OPERAND (exp, 1)))
356          do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
357        else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
358                 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
359          do_jump_by_parts_equality (exp, if_false_label, if_true_label);
360        else
361          do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
362        break;
363      }
364
365    case MINUS_EXPR:
366      /* Nonzero iff operands of minus differ.  */
367      exp = build2 (NE_EXPR, TREE_TYPE (exp),
368		    TREE_OPERAND (exp, 0),
369		    TREE_OPERAND (exp, 1));
370      /* FALLTHRU */
371    case NE_EXPR:
372      {
373        tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
374
375        gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
376		    != MODE_COMPLEX_FLOAT);
377	gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
378		    != MODE_COMPLEX_INT);
379
380        if (integer_zerop (TREE_OPERAND (exp, 1)))
381          do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
382        else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
383           && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
384          do_jump_by_parts_equality (exp, if_true_label, if_false_label);
385        else
386          do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
387        break;
388      }
389
390    case LT_EXPR:
391      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
392      if (GET_MODE_CLASS (mode) == MODE_INT
393          && ! can_compare_p (LT, mode, ccp_jump))
394        do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
395      else
396        do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
397      break;
398
399    case LE_EXPR:
400      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
401      if (GET_MODE_CLASS (mode) == MODE_INT
402          && ! can_compare_p (LE, mode, ccp_jump))
403        do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
404      else
405        do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
406      break;
407
408    case GT_EXPR:
409      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
410      if (GET_MODE_CLASS (mode) == MODE_INT
411          && ! can_compare_p (GT, mode, ccp_jump))
412        do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
413      else
414        do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
415      break;
416
417    case GE_EXPR:
418      mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
419      if (GET_MODE_CLASS (mode) == MODE_INT
420          && ! can_compare_p (GE, mode, ccp_jump))
421        do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
422      else
423        do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
424      break;
425
426    case UNORDERED_EXPR:
427    case ORDERED_EXPR:
428      {
429        enum rtx_code cmp, rcmp;
430        int do_rev;
431
432        if (code == UNORDERED_EXPR)
433          cmp = UNORDERED, rcmp = ORDERED;
434        else
435          cmp = ORDERED, rcmp = UNORDERED;
436        mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
437
438        do_rev = 0;
439        if (! can_compare_p (cmp, mode, ccp_jump)
440            && (can_compare_p (rcmp, mode, ccp_jump)
441          /* If the target doesn't provide either UNORDERED or ORDERED
442             comparisons, canonicalize on UNORDERED for the library.  */
443          || rcmp == UNORDERED))
444          do_rev = 1;
445
446        if (! do_rev)
447          do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
448        else
449          do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
450      }
451      break;
452
453    {
454      enum rtx_code rcode1;
455      enum tree_code tcode1, tcode2;
456
457      case UNLT_EXPR:
458        rcode1 = UNLT;
459        tcode1 = UNORDERED_EXPR;
460        tcode2 = LT_EXPR;
461        goto unordered_bcc;
462      case UNLE_EXPR:
463        rcode1 = UNLE;
464        tcode1 = UNORDERED_EXPR;
465        tcode2 = LE_EXPR;
466        goto unordered_bcc;
467      case UNGT_EXPR:
468        rcode1 = UNGT;
469        tcode1 = UNORDERED_EXPR;
470        tcode2 = GT_EXPR;
471        goto unordered_bcc;
472      case UNGE_EXPR:
473        rcode1 = UNGE;
474        tcode1 = UNORDERED_EXPR;
475        tcode2 = GE_EXPR;
476        goto unordered_bcc;
477      case UNEQ_EXPR:
478        rcode1 = UNEQ;
479        tcode1 = UNORDERED_EXPR;
480        tcode2 = EQ_EXPR;
481        goto unordered_bcc;
482      case LTGT_EXPR:
483	/* It is ok for LTGT_EXPR to trap when the result is unordered,
484	   so expand to (a < b) || (a > b).  */
485        rcode1 = LTGT;
486        tcode1 = LT_EXPR;
487        tcode2 = GT_EXPR;
488        goto unordered_bcc;
489
490      unordered_bcc:
491        mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
492        if (can_compare_p (rcode1, mode, ccp_jump))
493          do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
494                               if_true_label);
495        else
496          {
497            tree op0 = save_expr (TREE_OPERAND (exp, 0));
498            tree op1 = save_expr (TREE_OPERAND (exp, 1));
499            tree cmp0, cmp1;
500
501            /* If the target doesn't support combined unordered
502               compares, decompose into two comparisons.  */
503	    if (if_true_label == 0)
504	      drop_through_label = if_true_label = gen_label_rtx ();
505
506            cmp0 = fold_build2 (tcode1, TREE_TYPE (exp), op0, op1);
507            cmp1 = fold_build2 (tcode2, TREE_TYPE (exp), op0, op1);
508	    do_jump (cmp0, 0, if_true_label);
509	    do_jump (cmp1, if_false_label, if_true_label);
510          }
511      }
512      break;
513
514    case TRUTH_AND_EXPR:
515      /* High branch cost, expand as the bitwise AND of the conditions.
516	 Do the same if the RHS has side effects, because we're effectively
517	 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR.  */
518      if (BRANCH_COST >= 4 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
519	goto normal;
520
521      if (if_false_label == NULL_RTX)
522        {
523	  drop_through_label = gen_label_rtx ();
524          do_jump (TREE_OPERAND (exp, 0), drop_through_label, NULL_RTX);
525          do_jump (TREE_OPERAND (exp, 1), NULL_RTX, if_true_label);
526	}
527      else
528	{
529	  do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
530          do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
531	}
532      break;
533
534    case TRUTH_OR_EXPR:
535      /* High branch cost, expand as the bitwise OR of the conditions.
536	 Do the same if the RHS has side effects, because we're effectively
537	 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR.  */
538      if (BRANCH_COST >= 4 || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
539	goto normal;
540
541      if (if_true_label == NULL_RTX)
542	{
543          drop_through_label = gen_label_rtx ();
544          do_jump (TREE_OPERAND (exp, 0), NULL_RTX, drop_through_label);
545          do_jump (TREE_OPERAND (exp, 1), if_false_label, NULL_RTX);
546	}
547      else
548	{
549          do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
550          do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
551	}
552      break;
553
554      /* Special case:
555          __builtin_expect (<test>, 0)	and
556          __builtin_expect (<test>, 1)
557
558         We need to do this here, so that <test> is not converted to a SCC
559         operation on machines that use condition code registers and COMPARE
560         like the PowerPC, and then the jump is done based on whether the SCC
561         operation produced a 1 or 0.  */
562    case CALL_EXPR:
563      /* Check for a built-in function.  */
564      {
565	tree fndecl = get_callee_fndecl (exp);
566	tree arglist = TREE_OPERAND (exp, 1);
567
568	if (fndecl
569	    && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
570	    && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
571	    && arglist != NULL_TREE
572	    && TREE_CHAIN (arglist) != NULL_TREE)
573	  {
574	    rtx seq = expand_builtin_expect_jump (exp, if_false_label,
575						  if_true_label);
576
577	    if (seq != NULL_RTX)
578	      {
579		emit_insn (seq);
580		return;
581	      }
582	  }
583      }
584
585      /* Fall through and generate the normal code.  */
586    default:
587    normal:
588      temp = expand_normal (exp);
589      do_pending_stack_adjust ();
590      /* The RTL optimizers prefer comparisons against pseudos.  */
591      if (GET_CODE (temp) == SUBREG)
592	{
593	  /* Compare promoted variables in their promoted mode.  */
594	  if (SUBREG_PROMOTED_VAR_P (temp)
595	      && REG_P (XEXP (temp, 0)))
596	    temp = XEXP (temp, 0);
597	  else
598	    temp = copy_to_reg (temp);
599	}
600      do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
601			       NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
602			       GET_MODE (temp), NULL_RTX,
603			       if_false_label, if_true_label);
604    }
605
606  if (drop_through_label)
607    {
608      do_pending_stack_adjust ();
609      emit_label (drop_through_label);
610    }
611}
612
613/* Compare OP0 with OP1, word at a time, in mode MODE.
614   UNSIGNEDP says to do unsigned comparison.
615   Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise.  */
616
617static void
618do_jump_by_parts_greater_rtx (enum machine_mode mode, int unsignedp, rtx op0,
619			      rtx op1, rtx if_false_label, rtx if_true_label)
620{
621  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
622  rtx drop_through_label = 0;
623  int i;
624
625  if (! if_true_label || ! if_false_label)
626    drop_through_label = gen_label_rtx ();
627  if (! if_true_label)
628    if_true_label = drop_through_label;
629  if (! if_false_label)
630    if_false_label = drop_through_label;
631
632  /* Compare a word at a time, high order first.  */
633  for (i = 0; i < nwords; i++)
634    {
635      rtx op0_word, op1_word;
636
637      if (WORDS_BIG_ENDIAN)
638        {
639          op0_word = operand_subword_force (op0, i, mode);
640          op1_word = operand_subword_force (op1, i, mode);
641        }
642      else
643        {
644          op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
645          op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
646        }
647
648      /* All but high-order word must be compared as unsigned.  */
649      do_compare_rtx_and_jump (op0_word, op1_word, GT,
650                               (unsignedp || i > 0), word_mode, NULL_RTX,
651                               NULL_RTX, if_true_label);
652
653      /* Consider lower words only if these are equal.  */
654      do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
655                               NULL_RTX, NULL_RTX, if_false_label);
656    }
657
658  if (if_false_label)
659    emit_jump (if_false_label);
660  if (drop_through_label)
661    emit_label (drop_through_label);
662}
663
664/* Given a comparison expression EXP for values too wide to be compared
665   with one insn, test the comparison and jump to the appropriate label.
666   The code of EXP is ignored; we always test GT if SWAP is 0,
667   and LT if SWAP is 1.  */
668
669static void
670do_jump_by_parts_greater (tree exp, int swap, rtx if_false_label,
671			  rtx if_true_label)
672{
673  rtx op0 = expand_normal (TREE_OPERAND (exp, swap));
674  rtx op1 = expand_normal (TREE_OPERAND (exp, !swap));
675  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
676  int unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
677
678  do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
679				if_true_label);
680}
681
682/* Jump according to whether OP0 is 0.  We assume that OP0 has an integer
683   mode, MODE, that is too wide for the available compare insns.  Either
684   Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
685   to indicate drop through.  */
686
687static void
688do_jump_by_parts_zero_rtx (enum machine_mode mode, rtx op0,
689			   rtx if_false_label, rtx if_true_label)
690{
691  int nwords = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
692  rtx part;
693  int i;
694  rtx drop_through_label = 0;
695
696  /* The fastest way of doing this comparison on almost any machine is to
697     "or" all the words and compare the result.  If all have to be loaded
698     from memory and this is a very wide item, it's possible this may
699     be slower, but that's highly unlikely.  */
700
701  part = gen_reg_rtx (word_mode);
702  emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
703  for (i = 1; i < nwords && part != 0; i++)
704    part = expand_binop (word_mode, ior_optab, part,
705                         operand_subword_force (op0, i, GET_MODE (op0)),
706                         part, 1, OPTAB_WIDEN);
707
708  if (part != 0)
709    {
710      do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
711                               NULL_RTX, if_false_label, if_true_label);
712
713      return;
714    }
715
716  /* If we couldn't do the "or" simply, do this with a series of compares.  */
717  if (! if_false_label)
718    drop_through_label = if_false_label = gen_label_rtx ();
719
720  for (i = 0; i < nwords; i++)
721    do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
722                             const0_rtx, EQ, 1, word_mode, NULL_RTX,
723                             if_false_label, NULL_RTX);
724
725  if (if_true_label)
726    emit_jump (if_true_label);
727
728  if (drop_through_label)
729    emit_label (drop_through_label);
730}
731
732/* Test for the equality of two RTX expressions OP0 and OP1 in mode MODE,
733   where MODE is an integer mode too wide to be compared with one insn.
734   Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
735   to indicate drop through.  */
736
737static void
738do_jump_by_parts_equality_rtx (enum machine_mode mode, rtx op0, rtx op1,
739			       rtx if_false_label, rtx if_true_label)
740{
741  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
742  rtx drop_through_label = 0;
743  int i;
744
745  if (op1 == const0_rtx)
746    {
747      do_jump_by_parts_zero_rtx (mode, op0, if_false_label, if_true_label);
748      return;
749    }
750  else if (op0 == const0_rtx)
751    {
752      do_jump_by_parts_zero_rtx (mode, op1, if_false_label, if_true_label);
753      return;
754    }
755
756  if (! if_false_label)
757    drop_through_label = if_false_label = gen_label_rtx ();
758
759  for (i = 0; i < nwords; i++)
760    do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
761                             operand_subword_force (op1, i, mode),
762                             EQ, 0, word_mode, NULL_RTX,
763			     if_false_label, NULL_RTX);
764
765  if (if_true_label)
766    emit_jump (if_true_label);
767  if (drop_through_label)
768    emit_label (drop_through_label);
769}
770
771/* Given an EQ_EXPR expression EXP for values too wide to be compared
772   with one insn, test the comparison and jump to the appropriate label.  */
773
774static void
775do_jump_by_parts_equality (tree exp, rtx if_false_label, rtx if_true_label)
776{
777  rtx op0 = expand_normal (TREE_OPERAND (exp, 0));
778  rtx op1 = expand_normal (TREE_OPERAND (exp, 1));
779  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
780  do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
781				 if_true_label);
782}
783
784/* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
785   MODE is the machine mode of the comparison, not of the result.
786   (including code to compute the values to be compared) and set CC0
787   according to the result.  The decision as to signed or unsigned
788   comparison must be made by the caller.
789
790   We force a stack adjustment unless there are currently
791   things pushed on the stack that aren't yet used.
792
793   If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
794   compared.  */
795
796rtx
797compare_from_rtx (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
798		  enum machine_mode mode, rtx size)
799{
800  rtx tem;
801
802  /* If one operand is constant, make it the second one.  Only do this
803     if the other operand is not constant as well.  */
804
805  if (swap_commutative_operands_p (op0, op1))
806    {
807      tem = op0;
808      op0 = op1;
809      op1 = tem;
810      code = swap_condition (code);
811    }
812
813  do_pending_stack_adjust ();
814
815  code = unsignedp ? unsigned_condition (code) : code;
816  tem = simplify_relational_operation (code, VOIDmode, mode, op0, op1);
817  if (tem)
818    {
819      if (CONSTANT_P (tem))
820	return tem;
821
822      if (COMPARISON_P (tem))
823	{
824	  code = GET_CODE (tem);
825	  op0 = XEXP (tem, 0);
826	  op1 = XEXP (tem, 1);
827	  mode = GET_MODE (op0);
828	  unsignedp = (code == GTU || code == LTU
829		       || code == GEU || code == LEU);
830	}
831    }
832
833  emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
834
835#if HAVE_cc0
836  return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
837#else
838  return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
839#endif
840}
841
842/* Like do_compare_and_jump but expects the values to compare as two rtx's.
843   The decision as to signed or unsigned comparison must be made by the caller.
844
845   If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
846   compared.  */
847
848void
849do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
850			 enum machine_mode mode, rtx size, rtx if_false_label,
851			 rtx if_true_label)
852{
853  rtx tem;
854  int dummy_true_label = 0;
855
856  /* Reverse the comparison if that is safe and we want to jump if it is
857     false.  */
858  if (! if_true_label && ! FLOAT_MODE_P (mode))
859    {
860      if_true_label = if_false_label;
861      if_false_label = 0;
862      code = reverse_condition (code);
863    }
864
865  /* If one operand is constant, make it the second one.  Only do this
866     if the other operand is not constant as well.  */
867
868  if (swap_commutative_operands_p (op0, op1))
869    {
870      tem = op0;
871      op0 = op1;
872      op1 = tem;
873      code = swap_condition (code);
874    }
875
876  do_pending_stack_adjust ();
877
878  code = unsignedp ? unsigned_condition (code) : code;
879  if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
880						 op0, op1)))
881    {
882      if (CONSTANT_P (tem))
883	{
884	  rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode))
885		      ? if_false_label : if_true_label;
886	  if (label)
887	    emit_jump (label);
888	  return;
889	}
890
891      code = GET_CODE (tem);
892      mode = GET_MODE (tem);
893      op0 = XEXP (tem, 0);
894      op1 = XEXP (tem, 1);
895      unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
896    }
897
898
899  if (! if_true_label)
900    {
901      dummy_true_label = 1;
902      if_true_label = gen_label_rtx ();
903    }
904
905  if (GET_MODE_CLASS (mode) == MODE_INT
906      && ! can_compare_p (code, mode, ccp_jump))
907    {
908      switch (code)
909	{
910	case LTU:
911	  do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
912					if_false_label, if_true_label);
913	  break;
914
915	case LEU:
916	  do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
917					if_true_label, if_false_label);
918	  break;
919
920	case GTU:
921	  do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
922					if_false_label, if_true_label);
923	  break;
924
925	case GEU:
926	  do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
927					if_true_label, if_false_label);
928	  break;
929
930	case LT:
931	  do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
932					if_false_label, if_true_label);
933	  break;
934
935	case LE:
936	  do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
937					if_true_label, if_false_label);
938	  break;
939
940	case GT:
941	  do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
942					if_false_label, if_true_label);
943	  break;
944
945	case GE:
946	  do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
947					if_true_label, if_false_label);
948	  break;
949
950	case EQ:
951	  do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
952					 if_true_label);
953	  break;
954
955	case NE:
956	  do_jump_by_parts_equality_rtx (mode, op0, op1, if_true_label,
957					 if_false_label);
958	  break;
959
960	default:
961	  gcc_unreachable ();
962	}
963    }
964  else
965    emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
966			     if_true_label);
967
968  if (if_false_label)
969    emit_jump (if_false_label);
970  if (dummy_true_label)
971    emit_label (if_true_label);
972}
973
974/* Generate code for a comparison expression EXP (including code to compute
975   the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
976   IF_TRUE_LABEL.  One of the labels can be NULL_RTX, in which case the
977   generated code will drop through.
978   SIGNED_CODE should be the rtx operation for this comparison for
979   signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
980
981   We force a stack adjustment unless there are currently
982   things pushed on the stack that aren't yet used.  */
983
984static void
985do_compare_and_jump (tree exp, enum rtx_code signed_code,
986		     enum rtx_code unsigned_code, rtx if_false_label,
987		     rtx if_true_label)
988{
989  rtx op0, op1;
990  tree type;
991  enum machine_mode mode;
992  int unsignedp;
993  enum rtx_code code;
994
995  /* Don't crash if the comparison was erroneous.  */
996  op0 = expand_normal (TREE_OPERAND (exp, 0));
997  if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
998    return;
999
1000  op1 = expand_normal (TREE_OPERAND (exp, 1));
1001  if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
1002    return;
1003
1004  type = TREE_TYPE (TREE_OPERAND (exp, 0));
1005  mode = TYPE_MODE (type);
1006  if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
1007      && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
1008          || (GET_MODE_BITSIZE (mode)
1009              > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
1010                                                                      1)))))))
1011    {
1012      /* op0 might have been replaced by promoted constant, in which
1013         case the type of second argument should be used.  */
1014      type = TREE_TYPE (TREE_OPERAND (exp, 1));
1015      mode = TYPE_MODE (type);
1016    }
1017  unsignedp = TYPE_UNSIGNED (type);
1018  code = unsignedp ? unsigned_code : signed_code;
1019
1020#ifdef HAVE_canonicalize_funcptr_for_compare
1021  /* If function pointers need to be "canonicalized" before they can
1022     be reliably compared, then canonicalize them.
1023     Only do this if *both* sides of the comparison are function pointers.
1024     If one side isn't, we want a noncanonicalized comparison.  See PR
1025     middle-end/17564.  */
1026  if (HAVE_canonicalize_funcptr_for_compare
1027      && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
1028      && TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
1029          == FUNCTION_TYPE
1030      && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
1031      && TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
1032          == FUNCTION_TYPE)
1033    {
1034      rtx new_op0 = gen_reg_rtx (mode);
1035      rtx new_op1 = gen_reg_rtx (mode);
1036
1037      emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
1038      op0 = new_op0;
1039
1040      emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
1041      op1 = new_op1;
1042    }
1043#endif
1044
1045  do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
1046                           ((mode == BLKmode)
1047                            ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
1048                           if_false_label, if_true_label);
1049}
1050
1051#include "gt-dojump.h"
1052