1/* Convert tree expression to rtl instructions, for GNU compiler.
2   Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3   2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4   Free Software Foundation, Inc.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3.  If not see
20<http://www.gnu.org/licenses/>.  */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26#include "rtl.h"
27#include "tree.h"
28#include "flags.h"
29#include "function.h"
30#include "insn-config.h"
31#include "insn-attr.h"
32/* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
33#include "expr.h"
34#include "optabs.h"
35#include "langhooks.h"
36#include "ggc.h"
37#include "basic-block.h"
38#include "output.h"
39
40static bool prefer_and_bit_test (enum machine_mode, int);
41static void do_jump_by_parts_greater (tree, tree, int, rtx, rtx, int);
42static void do_jump_by_parts_equality (tree, tree, rtx, rtx, int);
43static void do_compare_and_jump	(tree, tree, enum rtx_code, enum rtx_code, rtx,
44				 rtx, int);
45
46/* Invert probability if there is any.  -1 stands for unknown.  */
47
48static inline int
49inv (int prob)
50{
51  return prob == -1 ? -1 : REG_BR_PROB_BASE - prob;
52}
53
54/* At the start of a function, record that we have no previously-pushed
55   arguments waiting to be popped.  */
56
57void
58init_pending_stack_adjust (void)
59{
60  pending_stack_adjust = 0;
61}
62
63/* Discard any pending stack adjustment.  This avoid relying on the
64   RTL optimizers to remove useless adjustments when we know the
65   stack pointer value is dead.  */
66void
67discard_pending_stack_adjust (void)
68{
69  stack_pointer_delta -= pending_stack_adjust;
70  pending_stack_adjust = 0;
71}
72
73/* When exiting from function, if safe, clear out any pending stack adjust
74   so the adjustment won't get done.
75
76   Note, if the current function calls alloca, then it must have a
77   frame pointer regardless of the value of flag_omit_frame_pointer.  */
78
79void
80clear_pending_stack_adjust (void)
81{
82  if (optimize > 0
83      && (! flag_omit_frame_pointer || cfun->calls_alloca)
84      && EXIT_IGNORE_STACK)
85    discard_pending_stack_adjust ();
86}
87
88/* Pop any previously-pushed arguments that have not been popped yet.  */
89
90void
91do_pending_stack_adjust (void)
92{
93  if (inhibit_defer_pop == 0)
94    {
95      if (pending_stack_adjust != 0)
96        adjust_stack (GEN_INT (pending_stack_adjust));
97      pending_stack_adjust = 0;
98    }
99}
100
101/* Expand conditional expressions.  */
102
103/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
104   LABEL is an rtx of code CODE_LABEL, in this function and all the
105   functions here.  */
106
107void
108jumpifnot (tree exp, rtx label, int prob)
109{
110  do_jump (exp, label, NULL_RTX, inv (prob));
111}
112
113void
114jumpifnot_1 (enum tree_code code, tree op0, tree op1, rtx label, int prob)
115{
116  do_jump_1 (code, op0, op1, label, NULL_RTX, inv (prob));
117}
118
119/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero.  */
120
121void
122jumpif (tree exp, rtx label, int prob)
123{
124  do_jump (exp, NULL_RTX, label, prob);
125}
126
127void
128jumpif_1 (enum tree_code code, tree op0, tree op1, rtx label, int prob)
129{
130  do_jump_1 (code, op0, op1, NULL_RTX, label, prob);
131}
132
133/* Used internally by prefer_and_bit_test.  */
134
135static GTY(()) rtx and_reg;
136static GTY(()) rtx and_test;
137static GTY(()) rtx shift_test;
138
139/* Compare the relative costs of "(X & (1 << BITNUM))" and "(X >> BITNUM) & 1",
140   where X is an arbitrary register of mode MODE.  Return true if the former
141   is preferred.  */
142
143static bool
144prefer_and_bit_test (enum machine_mode mode, int bitnum)
145{
146  if (and_test == 0)
147    {
148      /* Set up rtxes for the two variations.  Use NULL as a placeholder
149	 for the BITNUM-based constants.  */
150      and_reg = gen_rtx_REG (mode, FIRST_PSEUDO_REGISTER);
151      and_test = gen_rtx_AND (mode, and_reg, NULL);
152      shift_test = gen_rtx_AND (mode, gen_rtx_ASHIFTRT (mode, and_reg, NULL),
153				const1_rtx);
154    }
155  else
156    {
157      /* Change the mode of the previously-created rtxes.  */
158      PUT_MODE (and_reg, mode);
159      PUT_MODE (and_test, mode);
160      PUT_MODE (shift_test, mode);
161      PUT_MODE (XEXP (shift_test, 0), mode);
162    }
163
164  /* Fill in the integers.  */
165  XEXP (and_test, 1)
166    = immed_double_const ((unsigned HOST_WIDE_INT) 1 << bitnum, 0, mode);
167  XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
168
169  return (rtx_cost (and_test, IF_THEN_ELSE, optimize_insn_for_speed_p ())
170	  <= rtx_cost (shift_test, IF_THEN_ELSE, optimize_insn_for_speed_p ()));
171}
172
173/* Subroutine of do_jump, dealing with exploded comparisons of the type
174   OP0 CODE OP1 .  IF_FALSE_LABEL and IF_TRUE_LABEL like in do_jump.
175   PROB is probability of jump to if_true_label, or -1 if unknown.  */
176
177void
178do_jump_1 (enum tree_code code, tree op0, tree op1,
179	   rtx if_false_label, rtx if_true_label, int prob)
180{
181  enum machine_mode mode;
182  rtx drop_through_label = 0;
183
184  switch (code)
185    {
186    case EQ_EXPR:
187      {
188        tree inner_type = TREE_TYPE (op0);
189
190        gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
191		    != MODE_COMPLEX_FLOAT);
192	gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
193		    != MODE_COMPLEX_INT);
194
195        if (integer_zerop (op1))
196	  do_jump (op0, if_true_label, if_false_label, inv (prob));
197        else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
198                 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
199	  do_jump_by_parts_equality (op0, op1, if_false_label, if_true_label,
200				     prob);
201        else
202	  do_compare_and_jump (op0, op1, EQ, EQ, if_false_label, if_true_label,
203			       prob);
204        break;
205      }
206
207    case NE_EXPR:
208      {
209        tree inner_type = TREE_TYPE (op0);
210
211        gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
212		    != MODE_COMPLEX_FLOAT);
213	gcc_assert (GET_MODE_CLASS (TYPE_MODE (inner_type))
214		    != MODE_COMPLEX_INT);
215
216        if (integer_zerop (op1))
217	  do_jump (op0, if_false_label, if_true_label, prob);
218        else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
219           && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
220	  do_jump_by_parts_equality (op0, op1, if_true_label, if_false_label,
221				     inv (prob));
222        else
223	  do_compare_and_jump (op0, op1, NE, NE, if_false_label, if_true_label,
224			       prob);
225        break;
226      }
227
228    case LT_EXPR:
229      mode = TYPE_MODE (TREE_TYPE (op0));
230      if (GET_MODE_CLASS (mode) == MODE_INT
231          && ! can_compare_p (LT, mode, ccp_jump))
232	do_jump_by_parts_greater (op0, op1, 1, if_false_label, if_true_label,
233				  prob);
234      else
235	do_compare_and_jump (op0, op1, LT, LTU, if_false_label, if_true_label,
236			     prob);
237      break;
238
239    case LE_EXPR:
240      mode = TYPE_MODE (TREE_TYPE (op0));
241      if (GET_MODE_CLASS (mode) == MODE_INT
242          && ! can_compare_p (LE, mode, ccp_jump))
243	do_jump_by_parts_greater (op0, op1, 0, if_true_label, if_false_label,
244				  inv (prob));
245      else
246	do_compare_and_jump (op0, op1, LE, LEU, if_false_label, if_true_label,
247			     prob);
248      break;
249
250    case GT_EXPR:
251      mode = TYPE_MODE (TREE_TYPE (op0));
252      if (GET_MODE_CLASS (mode) == MODE_INT
253          && ! can_compare_p (GT, mode, ccp_jump))
254	do_jump_by_parts_greater (op0, op1, 0, if_false_label, if_true_label,
255				  prob);
256      else
257	do_compare_and_jump (op0, op1, GT, GTU, if_false_label, if_true_label,
258			     prob);
259      break;
260
261    case GE_EXPR:
262      mode = TYPE_MODE (TREE_TYPE (op0));
263      if (GET_MODE_CLASS (mode) == MODE_INT
264          && ! can_compare_p (GE, mode, ccp_jump))
265	do_jump_by_parts_greater (op0, op1, 1, if_true_label, if_false_label,
266				  inv (prob));
267      else
268	do_compare_and_jump (op0, op1, GE, GEU, if_false_label, if_true_label,
269			     prob);
270      break;
271
272    case ORDERED_EXPR:
273      do_compare_and_jump (op0, op1, ORDERED, ORDERED,
274			   if_false_label, if_true_label, prob);
275      break;
276
277    case UNORDERED_EXPR:
278      do_compare_and_jump (op0, op1, UNORDERED, UNORDERED,
279			   if_false_label, if_true_label, prob);
280      break;
281
282    case UNLT_EXPR:
283      do_compare_and_jump (op0, op1, UNLT, UNLT, if_false_label, if_true_label,
284			   prob);
285      break;
286
287    case UNLE_EXPR:
288      do_compare_and_jump (op0, op1, UNLE, UNLE, if_false_label, if_true_label,
289			   prob);
290      break;
291
292    case UNGT_EXPR:
293      do_compare_and_jump (op0, op1, UNGT, UNGT, if_false_label, if_true_label,
294			   prob);
295      break;
296
297    case UNGE_EXPR:
298      do_compare_and_jump (op0, op1, UNGE, UNGE, if_false_label, if_true_label,
299			   prob);
300      break;
301
302    case UNEQ_EXPR:
303      do_compare_and_jump (op0, op1, UNEQ, UNEQ, if_false_label, if_true_label,
304			   prob);
305      break;
306
307    case LTGT_EXPR:
308      do_compare_and_jump (op0, op1, LTGT, LTGT, if_false_label, if_true_label,
309			   prob);
310      break;
311
312    case TRUTH_ANDIF_EXPR:
313      if (if_false_label == NULL_RTX)
314        {
315	  drop_through_label = gen_label_rtx ();
316	  do_jump (op0, drop_through_label, NULL_RTX, prob);
317	  do_jump (op1, NULL_RTX, if_true_label, prob);
318	}
319      else
320	{
321	  do_jump (op0, if_false_label, NULL_RTX, prob);
322	  do_jump (op1, if_false_label, if_true_label, prob);
323	}
324      break;
325
326    case TRUTH_ORIF_EXPR:
327      if (if_true_label == NULL_RTX)
328	{
329          drop_through_label = gen_label_rtx ();
330	  do_jump (op0, NULL_RTX, drop_through_label, prob);
331	  do_jump (op1, if_false_label, NULL_RTX, prob);
332	}
333      else
334	{
335	  do_jump (op0, NULL_RTX, if_true_label, prob);
336	  do_jump (op1, if_false_label, if_true_label, prob);
337	}
338      break;
339
340    default:
341      gcc_unreachable ();
342    }
343
344  if (drop_through_label)
345    {
346      do_pending_stack_adjust ();
347      emit_label (drop_through_label);
348    }
349}
350
351/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
352   the result is zero, or IF_TRUE_LABEL if the result is one.
353   Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
354   meaning fall through in that case.
355
356   do_jump always does any pending stack adjust except when it does not
357   actually perform a jump.  An example where there is no jump
358   is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
359
360   PROB is probability of jump to if_true_label, or -1 if unknown.  */
361
362void
363do_jump (tree exp, rtx if_false_label, rtx if_true_label, int prob)
364{
365  enum tree_code code = TREE_CODE (exp);
366  rtx temp;
367  int i;
368  tree type;
369  enum machine_mode mode;
370  rtx drop_through_label = 0;
371
372  switch (code)
373    {
374    case ERROR_MARK:
375      break;
376
377    case INTEGER_CST:
378      temp = integer_zerop (exp) ? if_false_label : if_true_label;
379      if (temp)
380        emit_jump (temp);
381      break;
382
383#if 0
384      /* This is not true with #pragma weak  */
385    case ADDR_EXPR:
386      /* The address of something can never be zero.  */
387      if (if_true_label)
388        emit_jump (if_true_label);
389      break;
390#endif
391
392    case NOP_EXPR:
393      if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
394          || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
395          || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
396          || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
397        goto normal;
398    case CONVERT_EXPR:
399      /* If we are narrowing the operand, we have to do the compare in the
400         narrower mode.  */
401      if ((TYPE_PRECISION (TREE_TYPE (exp))
402           < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
403        goto normal;
404    case NON_LVALUE_EXPR:
405    case ABS_EXPR:
406    case NEGATE_EXPR:
407    case LROTATE_EXPR:
408    case RROTATE_EXPR:
409      /* These cannot change zero->nonzero or vice versa.  */
410      do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label, prob);
411      break;
412
413    case TRUTH_NOT_EXPR:
414      do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label,
415	       inv (prob));
416      break;
417
418    case COND_EXPR:
419      {
420	rtx label1 = gen_label_rtx ();
421	if (!if_true_label || !if_false_label)
422	  {
423	    drop_through_label = gen_label_rtx ();
424	    if (!if_true_label)
425	      if_true_label = drop_through_label;
426	    if (!if_false_label)
427	      if_false_label = drop_through_label;
428	  }
429
430        do_pending_stack_adjust ();
431	do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX, -1);
432	do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label, prob);
433        emit_label (label1);
434	do_jump (TREE_OPERAND (exp, 2), if_false_label, if_true_label, prob);
435	break;
436      }
437
438    case COMPOUND_EXPR:
439      /* Lowered by gimplify.c.  */
440      gcc_unreachable ();
441
442    case COMPONENT_REF:
443    case BIT_FIELD_REF:
444    case ARRAY_REF:
445    case ARRAY_RANGE_REF:
446      {
447        HOST_WIDE_INT bitsize, bitpos;
448        int unsignedp;
449        enum machine_mode mode;
450        tree type;
451        tree offset;
452        int volatilep = 0;
453
454        /* Get description of this reference.  We don't actually care
455           about the underlying object here.  */
456        get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
457                             &unsignedp, &volatilep, false);
458
459        type = lang_hooks.types.type_for_size (bitsize, unsignedp);
460        if (! SLOW_BYTE_ACCESS
461            && type != 0 && bitsize >= 0
462            && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
463            && have_insn_for (COMPARE, TYPE_MODE (type)))
464          {
465	    do_jump (fold_convert (type, exp), if_false_label, if_true_label,
466		     prob);
467            break;
468          }
469        goto normal;
470      }
471
472    case MINUS_EXPR:
473      /* Nonzero iff operands of minus differ.  */
474      code = NE_EXPR;
475
476      /* FALLTHRU */
477    case EQ_EXPR:
478    case NE_EXPR:
479    case LT_EXPR:
480    case LE_EXPR:
481    case GT_EXPR:
482    case GE_EXPR:
483    case ORDERED_EXPR:
484    case UNORDERED_EXPR:
485    case UNLT_EXPR:
486    case UNLE_EXPR:
487    case UNGT_EXPR:
488    case UNGE_EXPR:
489    case UNEQ_EXPR:
490    case LTGT_EXPR:
491    case TRUTH_ANDIF_EXPR:
492    case TRUTH_ORIF_EXPR:
493    other_code:
494      do_jump_1 (code, TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
495		 if_false_label, if_true_label, prob);
496      break;
497
498    case BIT_AND_EXPR:
499      /* fold_single_bit_test() converts (X & (1 << C)) into (X >> C) & 1.
500	 See if the former is preferred for jump tests and restore it
501	 if so.  */
502      if (integer_onep (TREE_OPERAND (exp, 1)))
503	{
504	  tree exp0 = TREE_OPERAND (exp, 0);
505	  rtx set_label, clr_label;
506	  int setclr_prob = prob;
507
508	  /* Strip narrowing integral type conversions.  */
509	  while (CONVERT_EXPR_P (exp0)
510		 && TREE_OPERAND (exp0, 0) != error_mark_node
511		 && TYPE_PRECISION (TREE_TYPE (exp0))
512		    <= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp0, 0))))
513	    exp0 = TREE_OPERAND (exp0, 0);
514
515	  /* "exp0 ^ 1" inverts the sense of the single bit test.  */
516	  if (TREE_CODE (exp0) == BIT_XOR_EXPR
517	      && integer_onep (TREE_OPERAND (exp0, 1)))
518	    {
519	      exp0 = TREE_OPERAND (exp0, 0);
520	      clr_label = if_true_label;
521	      set_label = if_false_label;
522	      setclr_prob = inv (prob);
523	    }
524	  else
525	    {
526	      clr_label = if_false_label;
527	      set_label = if_true_label;
528	    }
529
530	  if (TREE_CODE (exp0) == RSHIFT_EXPR)
531	    {
532	      tree arg = TREE_OPERAND (exp0, 0);
533	      tree shift = TREE_OPERAND (exp0, 1);
534	      tree argtype = TREE_TYPE (arg);
535	      if (TREE_CODE (shift) == INTEGER_CST
536		  && compare_tree_int (shift, 0) >= 0
537		  && compare_tree_int (shift, HOST_BITS_PER_WIDE_INT) < 0
538		  && prefer_and_bit_test (TYPE_MODE (argtype),
539					  TREE_INT_CST_LOW (shift)))
540		{
541		  unsigned HOST_WIDE_INT mask
542		    = (unsigned HOST_WIDE_INT) 1 << TREE_INT_CST_LOW (shift);
543		  do_jump (build2 (BIT_AND_EXPR, argtype, arg,
544				   build_int_cst_wide_type (argtype, mask, 0)),
545			   clr_label, set_label, setclr_prob);
546		  break;
547		}
548	    }
549	}
550
551      /* If we are AND'ing with a small constant, do this comparison in the
552         smallest type that fits.  If the machine doesn't have comparisons
553         that small, it will be converted back to the wider comparison.
554         This helps if we are testing the sign bit of a narrower object.
555         combine can't do this for us because it can't know whether a
556         ZERO_EXTRACT or a compare in a smaller mode exists, but we do.  */
557
558      if (! SLOW_BYTE_ACCESS
559          && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
560          && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
561          && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
562          && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
563          && (type = lang_hooks.types.type_for_mode (mode, 1)) != 0
564          && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
565          && have_insn_for (COMPARE, TYPE_MODE (type)))
566        {
567	  do_jump (fold_convert (type, exp), if_false_label, if_true_label,
568		   prob);
569          break;
570        }
571
572      if (TYPE_PRECISION (TREE_TYPE (exp)) > 1
573	  || TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
574	goto normal;
575
576      /* Boolean comparisons can be compiled as TRUTH_AND_EXPR.  */
577
578    case TRUTH_AND_EXPR:
579      /* High branch cost, expand as the bitwise AND of the conditions.
580	 Do the same if the RHS has side effects, because we're effectively
581	 turning a TRUTH_AND_EXPR into a TRUTH_ANDIF_EXPR.  */
582      if (BRANCH_COST (optimize_insn_for_speed_p (),
583		       false) >= 4
584	  || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
585	goto normal;
586      code = TRUTH_ANDIF_EXPR;
587      goto other_code;
588
589    case BIT_IOR_EXPR:
590    case TRUTH_OR_EXPR:
591      /* High branch cost, expand as the bitwise OR of the conditions.
592	 Do the same if the RHS has side effects, because we're effectively
593	 turning a TRUTH_OR_EXPR into a TRUTH_ORIF_EXPR.  */
594      if (BRANCH_COST (optimize_insn_for_speed_p (), false) >= 4
595	  || TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
596	goto normal;
597      code = TRUTH_ORIF_EXPR;
598      goto other_code;
599
600      /* Fall through and generate the normal code.  */
601    default:
602    normal:
603      temp = expand_normal (exp);
604      do_pending_stack_adjust ();
605      /* The RTL optimizers prefer comparisons against pseudos.  */
606      if (GET_CODE (temp) == SUBREG)
607	{
608	  /* Compare promoted variables in their promoted mode.  */
609	  if (SUBREG_PROMOTED_VAR_P (temp)
610	      && REG_P (XEXP (temp, 0)))
611	    temp = XEXP (temp, 0);
612	  else
613	    temp = copy_to_reg (temp);
614	}
615      do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
616			       NE, TYPE_UNSIGNED (TREE_TYPE (exp)),
617			       GET_MODE (temp), NULL_RTX,
618			       if_false_label, if_true_label, prob);
619    }
620
621  if (drop_through_label)
622    {
623      do_pending_stack_adjust ();
624      emit_label (drop_through_label);
625    }
626}
627
628/* Compare OP0 with OP1, word at a time, in mode MODE.
629   UNSIGNEDP says to do unsigned comparison.
630   Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise.  */
631
632static void
633do_jump_by_parts_greater_rtx (enum machine_mode mode, int unsignedp, rtx op0,
634			      rtx op1, rtx if_false_label, rtx if_true_label,
635			      int prob)
636{
637  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
638  rtx drop_through_label = 0;
639  int i;
640
641  if (! if_true_label || ! if_false_label)
642    drop_through_label = gen_label_rtx ();
643  if (! if_true_label)
644    if_true_label = drop_through_label;
645  if (! if_false_label)
646    if_false_label = drop_through_label;
647
648  /* Compare a word at a time, high order first.  */
649  for (i = 0; i < nwords; i++)
650    {
651      rtx op0_word, op1_word;
652
653      if (WORDS_BIG_ENDIAN)
654        {
655          op0_word = operand_subword_force (op0, i, mode);
656          op1_word = operand_subword_force (op1, i, mode);
657        }
658      else
659        {
660          op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
661          op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
662        }
663
664      /* All but high-order word must be compared as unsigned.  */
665      do_compare_rtx_and_jump (op0_word, op1_word, GT,
666                               (unsignedp || i > 0), word_mode, NULL_RTX,
667			       NULL_RTX, if_true_label, prob);
668
669      /* Consider lower words only if these are equal.  */
670      do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
671			       NULL_RTX, NULL_RTX, if_false_label,
672			       inv (prob));
673    }
674
675  if (if_false_label)
676    emit_jump (if_false_label);
677  if (drop_through_label)
678    emit_label (drop_through_label);
679}
680
681/* Given a comparison expression EXP for values too wide to be compared
682   with one insn, test the comparison and jump to the appropriate label.
683   The code of EXP is ignored; we always test GT if SWAP is 0,
684   and LT if SWAP is 1.  */
685
686static void
687do_jump_by_parts_greater (tree treeop0, tree treeop1, int swap,
688			  rtx if_false_label, rtx if_true_label, int prob)
689{
690  rtx op0 = expand_normal (swap ? treeop1 : treeop0);
691  rtx op1 = expand_normal (swap ? treeop0 : treeop1);
692  enum machine_mode mode = TYPE_MODE (TREE_TYPE (treeop0));
693  int unsignedp = TYPE_UNSIGNED (TREE_TYPE (treeop0));
694
695  do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label,
696				if_true_label, prob);
697}
698
699/* Jump according to whether OP0 is 0.  We assume that OP0 has an integer
700   mode, MODE, that is too wide for the available compare insns.  Either
701   Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
702   to indicate drop through.  */
703
704static void
705do_jump_by_parts_zero_rtx (enum machine_mode mode, rtx op0,
706			   rtx if_false_label, rtx if_true_label, int prob)
707{
708  int nwords = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
709  rtx part;
710  int i;
711  rtx drop_through_label = 0;
712
713  /* The fastest way of doing this comparison on almost any machine is to
714     "or" all the words and compare the result.  If all have to be loaded
715     from memory and this is a very wide item, it's possible this may
716     be slower, but that's highly unlikely.  */
717
718  part = gen_reg_rtx (word_mode);
719  emit_move_insn (part, operand_subword_force (op0, 0, mode));
720  for (i = 1; i < nwords && part != 0; i++)
721    part = expand_binop (word_mode, ior_optab, part,
722                         operand_subword_force (op0, i, mode),
723                         part, 1, OPTAB_WIDEN);
724
725  if (part != 0)
726    {
727      do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
728			       NULL_RTX, if_false_label, if_true_label, prob);
729      return;
730    }
731
732  /* If we couldn't do the "or" simply, do this with a series of compares.  */
733  if (! if_false_label)
734    drop_through_label = if_false_label = gen_label_rtx ();
735
736  for (i = 0; i < nwords; i++)
737    do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
738                             const0_rtx, EQ, 1, word_mode, NULL_RTX,
739			     if_false_label, NULL_RTX, prob);
740
741  if (if_true_label)
742    emit_jump (if_true_label);
743
744  if (drop_through_label)
745    emit_label (drop_through_label);
746}
747
748/* Test for the equality of two RTX expressions OP0 and OP1 in mode MODE,
749   where MODE is an integer mode too wide to be compared with one insn.
750   Either (but not both) of IF_TRUE_LABEL and IF_FALSE_LABEL may be NULL_RTX
751   to indicate drop through.  */
752
753static void
754do_jump_by_parts_equality_rtx (enum machine_mode mode, rtx op0, rtx op1,
755			       rtx if_false_label, rtx if_true_label, int prob)
756{
757  int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
758  rtx drop_through_label = 0;
759  int i;
760
761  if (op1 == const0_rtx)
762    {
763      do_jump_by_parts_zero_rtx (mode, op0, if_false_label, if_true_label,
764				 prob);
765      return;
766    }
767  else if (op0 == const0_rtx)
768    {
769      do_jump_by_parts_zero_rtx (mode, op1, if_false_label, if_true_label,
770				 prob);
771      return;
772    }
773
774  if (! if_false_label)
775    drop_through_label = if_false_label = gen_label_rtx ();
776
777  for (i = 0; i < nwords; i++)
778    do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
779                             operand_subword_force (op1, i, mode),
780                             EQ, 0, word_mode, NULL_RTX,
781			     if_false_label, NULL_RTX, prob);
782
783  if (if_true_label)
784    emit_jump (if_true_label);
785  if (drop_through_label)
786    emit_label (drop_through_label);
787}
788
789/* Given an EQ_EXPR expression EXP for values too wide to be compared
790   with one insn, test the comparison and jump to the appropriate label.  */
791
792static void
793do_jump_by_parts_equality (tree treeop0, tree treeop1, rtx if_false_label,
794			   rtx if_true_label, int prob)
795{
796  rtx op0 = expand_normal (treeop0);
797  rtx op1 = expand_normal (treeop1);
798  enum machine_mode mode = TYPE_MODE (TREE_TYPE (treeop0));
799  do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
800				 if_true_label, prob);
801}
802
803/* Split a comparison into two others, the second of which has the other
804   "orderedness".  The first is always ORDERED or UNORDERED if MODE
805   does not honor NaNs (which means that it can be skipped in that case;
806   see do_compare_rtx_and_jump).
807
808   The two conditions are written in *CODE1 and *CODE2.  Return true if
809   the conditions must be ANDed, false if they must be ORed.  */
810
811bool
812split_comparison (enum rtx_code code, enum machine_mode mode,
813		  enum rtx_code *code1, enum rtx_code *code2)
814{
815  switch (code)
816    {
817    case LT:
818      *code1 = ORDERED;
819      *code2 = UNLT;
820      return true;
821    case LE:
822      *code1 = ORDERED;
823      *code2 = UNLE;
824      return true;
825    case GT:
826      *code1 = ORDERED;
827      *code2 = UNGT;
828      return true;
829    case GE:
830      *code1 = ORDERED;
831      *code2 = UNGE;
832      return true;
833    case EQ:
834      *code1 = ORDERED;
835      *code2 = UNEQ;
836      return true;
837    case NE:
838      *code1 = UNORDERED;
839      *code2 = LTGT;
840      return false;
841    case UNLT:
842      *code1 = UNORDERED;
843      *code2 = LT;
844      return false;
845    case UNLE:
846      *code1 = UNORDERED;
847      *code2 = LE;
848      return false;
849    case UNGT:
850      *code1 = UNORDERED;
851      *code2 = GT;
852      return false;
853    case UNGE:
854      *code1 = UNORDERED;
855      *code2 = GE;
856      return false;
857    case UNEQ:
858      *code1 = UNORDERED;
859      *code2 = EQ;
860      return false;
861    case LTGT:
862      /* Do not turn a trapping comparison into a non-trapping one.  */
863      if (HONOR_SNANS (mode))
864	{
865          *code1 = LT;
866          *code2 = GT;
867          return false;
868	}
869      else
870	{
871	  *code1 = ORDERED;
872	  *code2 = NE;
873	  return true;
874	}
875    default:
876      gcc_unreachable ();
877    }
878}
879
880
881/* Like do_compare_and_jump but expects the values to compare as two rtx's.
882   The decision as to signed or unsigned comparison must be made by the caller.
883
884   If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
885   compared.  */
886
887void
888do_compare_rtx_and_jump (rtx op0, rtx op1, enum rtx_code code, int unsignedp,
889			 enum machine_mode mode, rtx size, rtx if_false_label,
890			 rtx if_true_label, int prob)
891{
892  rtx tem;
893  rtx dummy_label = NULL_RTX;
894  rtx last;
895
896  /* Reverse the comparison if that is safe and we want to jump if it is
897     false.  Also convert to the reverse comparison if the target can
898     implement it.  */
899  if ((! if_true_label
900       || ! can_compare_p (code, mode, ccp_jump))
901      && (! FLOAT_MODE_P (mode)
902	  || code == ORDERED || code == UNORDERED
903	  || (! HONOR_NANS (mode) && (code == LTGT || code == UNEQ))
904	  || (! HONOR_SNANS (mode) && (code == EQ || code == NE))))
905    {
906      enum rtx_code rcode;
907      if (FLOAT_MODE_P (mode))
908        rcode = reverse_condition_maybe_unordered (code);
909      else
910        rcode = reverse_condition (code);
911
912      /* Canonicalize to UNORDERED for the libcall.  */
913      if (can_compare_p (rcode, mode, ccp_jump)
914	  || (code == ORDERED && ! can_compare_p (ORDERED, mode, ccp_jump)))
915	{
916          tem = if_true_label;
917          if_true_label = if_false_label;
918          if_false_label = tem;
919	  code = rcode;
920	  prob = inv (prob);
921	}
922    }
923
924  /* If one operand is constant, make it the second one.  Only do this
925     if the other operand is not constant as well.  */
926
927  if (swap_commutative_operands_p (op0, op1))
928    {
929      tem = op0;
930      op0 = op1;
931      op1 = tem;
932      code = swap_condition (code);
933    }
934
935  do_pending_stack_adjust ();
936
937  code = unsignedp ? unsigned_condition (code) : code;
938  if (0 != (tem = simplify_relational_operation (code, mode, VOIDmode,
939						 op0, op1)))
940    {
941      if (CONSTANT_P (tem))
942	{
943	  rtx label = (tem == const0_rtx || tem == CONST0_RTX (mode))
944		      ? if_false_label : if_true_label;
945	  if (label)
946	    emit_jump (label);
947	  return;
948	}
949
950      code = GET_CODE (tem);
951      mode = GET_MODE (tem);
952      op0 = XEXP (tem, 0);
953      op1 = XEXP (tem, 1);
954      unsignedp = (code == GTU || code == LTU || code == GEU || code == LEU);
955    }
956
957  if (! if_true_label)
958    dummy_label = if_true_label = gen_label_rtx ();
959
960  if (GET_MODE_CLASS (mode) == MODE_INT
961      && ! can_compare_p (code, mode, ccp_jump))
962    {
963      switch (code)
964	{
965	case LTU:
966	  do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
967					if_false_label, if_true_label, prob);
968	  break;
969
970	case LEU:
971	  do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
972					if_true_label, if_false_label,
973					inv (prob));
974	  break;
975
976	case GTU:
977	  do_jump_by_parts_greater_rtx (mode, 1, op0, op1,
978					if_false_label, if_true_label, prob);
979	  break;
980
981	case GEU:
982	  do_jump_by_parts_greater_rtx (mode, 1, op1, op0,
983					if_true_label, if_false_label,
984					inv (prob));
985	  break;
986
987	case LT:
988	  do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
989					if_false_label, if_true_label, prob);
990	  break;
991
992	case LE:
993	  do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
994					if_true_label, if_false_label,
995					inv (prob));
996	  break;
997
998	case GT:
999	  do_jump_by_parts_greater_rtx (mode, 0, op0, op1,
1000					if_false_label, if_true_label, prob);
1001	  break;
1002
1003	case GE:
1004	  do_jump_by_parts_greater_rtx (mode, 0, op1, op0,
1005					if_true_label, if_false_label,
1006					inv (prob));
1007	  break;
1008
1009	case EQ:
1010	  do_jump_by_parts_equality_rtx (mode, op0, op1, if_false_label,
1011					 if_true_label, prob);
1012	  break;
1013
1014	case NE:
1015	  do_jump_by_parts_equality_rtx (mode, op0, op1, if_true_label,
1016					 if_false_label, inv (prob));
1017	  break;
1018
1019	default:
1020	  gcc_unreachable ();
1021	}
1022    }
1023  else
1024    {
1025      if (GET_MODE_CLASS (mode) == MODE_FLOAT
1026	  && ! can_compare_p (code, mode, ccp_jump)
1027	  && can_compare_p (swap_condition (code), mode, ccp_jump))
1028	{
1029	  rtx tmp;
1030	  code = swap_condition (code);
1031	  tmp = op0;
1032	  op0 = op1;
1033	  op1 = tmp;
1034	}
1035
1036      else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1037	       && ! can_compare_p (code, mode, ccp_jump)
1038
1039	       /* Never split ORDERED and UNORDERED.  These must be implemented.  */
1040	       && (code != ORDERED && code != UNORDERED)
1041
1042               /* Split a floating-point comparison if we can jump on other
1043	          conditions...  */
1044	       && (have_insn_for (COMPARE, mode)
1045
1046	           /* ... or if there is no libcall for it.  */
1047	           || code_to_optab[code] == NULL))
1048        {
1049	  enum rtx_code first_code;
1050	  bool and_them = split_comparison (code, mode, &first_code, &code);
1051
1052	  /* If there are no NaNs, the first comparison should always fall
1053	     through.  */
1054	  if (!HONOR_NANS (mode))
1055	    gcc_assert (first_code == (and_them ? ORDERED : UNORDERED));
1056
1057	  else
1058	    {
1059	      if (and_them)
1060		{
1061		  rtx dest_label;
1062		  /* If we only jump if true, just bypass the second jump.  */
1063		  if (! if_false_label)
1064		    {
1065		      if (! dummy_label)
1066		        dummy_label = gen_label_rtx ();
1067		      dest_label = dummy_label;
1068		    }
1069		  else
1070		    dest_label = if_false_label;
1071                  do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
1072					   size, dest_label, NULL_RTX, prob);
1073		}
1074              else
1075                do_compare_rtx_and_jump (op0, op1, first_code, unsignedp, mode,
1076					 size, NULL_RTX, if_true_label, prob);
1077	    }
1078	}
1079
1080      last = get_last_insn ();
1081      emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
1082			       if_true_label);
1083      if (prob != -1 && profile_status != PROFILE_ABSENT)
1084	{
1085	  for (last = NEXT_INSN (last);
1086	       last && NEXT_INSN (last);
1087	       last = NEXT_INSN (last))
1088	    if (JUMP_P (last))
1089	      break;
1090	  if (!last
1091	      || !JUMP_P (last)
1092	      || NEXT_INSN (last)
1093	      || !any_condjump_p (last))
1094	    {
1095	      if (dump_file)
1096		fprintf (dump_file, "Failed to add probability note\n");
1097	    }
1098	  else
1099	    {
1100	      gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
1101	      add_reg_note (last, REG_BR_PROB, GEN_INT (prob));
1102	    }
1103	}
1104    }
1105
1106  if (if_false_label)
1107    emit_jump (if_false_label);
1108  if (dummy_label)
1109    emit_label (dummy_label);
1110}
1111
1112/* Generate code for a comparison expression EXP (including code to compute
1113   the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
1114   IF_TRUE_LABEL.  One of the labels can be NULL_RTX, in which case the
1115   generated code will drop through.
1116   SIGNED_CODE should be the rtx operation for this comparison for
1117   signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
1118
1119   We force a stack adjustment unless there are currently
1120   things pushed on the stack that aren't yet used.  */
1121
1122static void
1123do_compare_and_jump (tree treeop0, tree treeop1, enum rtx_code signed_code,
1124		     enum rtx_code unsigned_code, rtx if_false_label,
1125		     rtx if_true_label, int prob)
1126{
1127  rtx op0, op1;
1128  tree type;
1129  enum machine_mode mode;
1130  int unsignedp;
1131  enum rtx_code code;
1132
1133  /* Don't crash if the comparison was erroneous.  */
1134  op0 = expand_normal (treeop0);
1135  if (TREE_CODE (treeop0) == ERROR_MARK)
1136    return;
1137
1138  op1 = expand_normal (treeop1);
1139  if (TREE_CODE (treeop1) == ERROR_MARK)
1140    return;
1141
1142  type = TREE_TYPE (treeop0);
1143  mode = TYPE_MODE (type);
1144  if (TREE_CODE (treeop0) == INTEGER_CST
1145      && (TREE_CODE (treeop1) != INTEGER_CST
1146          || (GET_MODE_BITSIZE (mode)
1147              > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (treeop1))))))
1148    {
1149      /* op0 might have been replaced by promoted constant, in which
1150         case the type of second argument should be used.  */
1151      type = TREE_TYPE (treeop1);
1152      mode = TYPE_MODE (type);
1153    }
1154  unsignedp = TYPE_UNSIGNED (type);
1155  code = unsignedp ? unsigned_code : signed_code;
1156
1157#ifdef HAVE_canonicalize_funcptr_for_compare
1158  /* If function pointers need to be "canonicalized" before they can
1159     be reliably compared, then canonicalize them.
1160     Only do this if *both* sides of the comparison are function pointers.
1161     If one side isn't, we want a noncanonicalized comparison.  See PR
1162     middle-end/17564.  */
1163  if (HAVE_canonicalize_funcptr_for_compare
1164      && TREE_CODE (TREE_TYPE (treeop0)) == POINTER_TYPE
1165      && TREE_CODE (TREE_TYPE (TREE_TYPE (treeop0)))
1166          == FUNCTION_TYPE
1167      && TREE_CODE (TREE_TYPE (treeop1)) == POINTER_TYPE
1168      && TREE_CODE (TREE_TYPE (TREE_TYPE (treeop1)))
1169          == FUNCTION_TYPE)
1170    {
1171      rtx new_op0 = gen_reg_rtx (mode);
1172      rtx new_op1 = gen_reg_rtx (mode);
1173
1174      emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
1175      op0 = new_op0;
1176
1177      emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
1178      op1 = new_op1;
1179    }
1180#endif
1181
1182  do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
1183                           ((mode == BLKmode)
1184                            ? expr_size (treeop0) : NULL_RTX),
1185			   if_false_label, if_true_label, prob);
1186}
1187
1188#include "gt-dojump.h"
1189