fold-const.c revision 235623
1/* Fold a constant sub-tree into a single node for C-compiler
2   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3   2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4   Free Software Foundation, Inc.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING.  If not, write to the Free
20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2102110-1301, USA.  */
22
23/*@@ This file should be rewritten to use an arbitrary precision
24  @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
25  @@ Perhaps the routines could also be used for bc/dc, and made a lib.
26  @@ The routines that translate from the ap rep should
27  @@ warn if precision et. al. is lost.
28  @@ This would also make life easier when this technology is used
29  @@ for cross-compilers.  */
30
31/* The entry points in this file are fold, size_int_wide, size_binop
32   and force_fit_type.
33
34   fold takes a tree as argument and returns a simplified tree.
35
36   size_binop takes a tree code for an arithmetic operation
37   and two operands that are trees, and produces a tree for the
38   result, assuming the type comes from `sizetype'.
39
40   size_int takes an integer value, and creates a tree constant
41   with type from `sizetype'.
42
43   force_fit_type takes a constant, an overflowable flag and prior
44   overflow indicators.  It forces the value to fit the type and sets
45   TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate.  */
46
47#include "config.h"
48#include "system.h"
49#include "coretypes.h"
50#include "tm.h"
51#include "flags.h"
52#include "tree.h"
53#include "real.h"
54#include "rtl.h"
55#include "expr.h"
56#include "tm_p.h"
57#include "toplev.h"
58#include "intl.h"
59#include "ggc.h"
60#include "hashtab.h"
61#include "langhooks.h"
62#include "md5.h"
63
64/* Non-zero if we are folding constants inside an initializer; zero
65   otherwise.  */
66int folding_initializer = 0;
67
68/* The following constants represent a bit based encoding of GCC's
69   comparison operators.  This encoding simplifies transformations
70   on relational comparison operators, such as AND and OR.  */
71enum comparison_code {
72  COMPCODE_FALSE = 0,
73  COMPCODE_LT = 1,
74  COMPCODE_EQ = 2,
75  COMPCODE_LE = 3,
76  COMPCODE_GT = 4,
77  COMPCODE_LTGT = 5,
78  COMPCODE_GE = 6,
79  COMPCODE_ORD = 7,
80  COMPCODE_UNORD = 8,
81  COMPCODE_UNLT = 9,
82  COMPCODE_UNEQ = 10,
83  COMPCODE_UNLE = 11,
84  COMPCODE_UNGT = 12,
85  COMPCODE_NE = 13,
86  COMPCODE_UNGE = 14,
87  COMPCODE_TRUE = 15
88};
89
90static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
91static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
92static bool negate_mathfn_p (enum built_in_function);
93static bool negate_expr_p (tree);
94static tree negate_expr (tree);
95static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
96static tree associate_trees (tree, tree, enum tree_code, tree);
97static tree const_binop (enum tree_code, tree, tree, int);
98static enum comparison_code comparison_to_compcode (enum tree_code);
99static enum tree_code compcode_to_comparison (enum comparison_code);
100static tree combine_comparisons (enum tree_code, enum tree_code,
101				 enum tree_code, tree, tree, tree);
102static int truth_value_p (enum tree_code);
103static int operand_equal_for_comparison_p (tree, tree, tree);
104static int twoval_comparison_p (tree, tree *, tree *, int *);
105static tree eval_subst (tree, tree, tree, tree, tree);
106static tree pedantic_omit_one_operand (tree, tree, tree);
107static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
108static tree make_bit_field_ref (tree, tree, int, int, int);
109static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
110static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
111				    enum machine_mode *, int *, int *,
112				    tree *, tree *);
113static int all_ones_mask_p (tree, int);
114static tree sign_bit_p (tree, tree);
115static int simple_operand_p (tree);
116static tree range_binop (enum tree_code, tree, tree, int, tree, int);
117static tree range_predecessor (tree);
118static tree range_successor (tree);
119static tree make_range (tree, int *, tree *, tree *, bool *);
120static tree build_range_check (tree, tree, int, tree, tree);
121static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
122			 tree);
123static tree fold_range_test (enum tree_code, tree, tree, tree);
124static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
125static tree unextend (tree, int, int, tree);
126static tree fold_truthop (enum tree_code, tree, tree, tree);
127static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
128static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
129static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
130static int multiple_of_p (tree, tree, tree);
131static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
132						 tree, tree,
133						 tree, tree, int);
134static bool fold_real_zero_addition_p (tree, tree, int);
135static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
136				 tree, tree, tree);
137static tree fold_inf_compare (enum tree_code, tree, tree, tree);
138static tree fold_div_compare (enum tree_code, tree, tree, tree);
139static bool reorder_operands_p (tree, tree);
140static tree fold_negate_const (tree, tree);
141static tree fold_not_const (tree, tree);
142static tree fold_relational_const (enum tree_code, tree, tree, tree);
143static int native_encode_expr (tree, unsigned char *, int);
144static tree native_interpret_expr (tree, unsigned char *, int);
145
146
147/* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
148   overflow.  Suppose A, B and SUM have the same respective signs as A1, B1,
149   and SUM1.  Then this yields nonzero if overflow occurred during the
150   addition.
151
152   Overflow occurs if A and B have the same sign, but A and SUM differ in
153   sign.  Use `^' to test whether signs differ, and `< 0' to isolate the
154   sign.  */
155#define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
156
157/* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
158   We do that by representing the two-word integer in 4 words, with only
159   HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
160   number.  The value of the word is LOWPART + HIGHPART * BASE.  */
161
162#define LOWPART(x) \
163  ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
164#define HIGHPART(x) \
165  ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
166#define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
167
168/* Unpack a two-word integer into 4 words.
169   LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
170   WORDS points to the array of HOST_WIDE_INTs.  */
171
172static void
173encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
174{
175  words[0] = LOWPART (low);
176  words[1] = HIGHPART (low);
177  words[2] = LOWPART (hi);
178  words[3] = HIGHPART (hi);
179}
180
181/* Pack an array of 4 words into a two-word integer.
182   WORDS points to the array of words.
183   The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces.  */
184
185static void
186decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
187	HOST_WIDE_INT *hi)
188{
189  *low = words[0] + words[1] * BASE;
190  *hi = words[2] + words[3] * BASE;
191}
192
193/* T is an INT_CST node.  OVERFLOWABLE indicates if we are interested
194   in overflow of the value, when >0 we are only interested in signed
195   overflow, for <0 we are interested in any overflow.  OVERFLOWED
196   indicates whether overflow has already occurred.  CONST_OVERFLOWED
197   indicates whether constant overflow has already occurred.  We force
198   T's value to be within range of T's type (by setting to 0 or 1 all
199   the bits outside the type's range).  We set TREE_OVERFLOWED if,
200  	OVERFLOWED is nonzero,
201	or OVERFLOWABLE is >0 and signed overflow occurs
202	or OVERFLOWABLE is <0 and any overflow occurs
203   We set TREE_CONSTANT_OVERFLOWED if,
204        CONST_OVERFLOWED is nonzero
205	or we set TREE_OVERFLOWED.
206  We return either the original T, or a copy.  */
207
208tree
209force_fit_type (tree t, int overflowable,
210		bool overflowed, bool overflowed_const)
211{
212  unsigned HOST_WIDE_INT low;
213  HOST_WIDE_INT high;
214  unsigned int prec;
215  int sign_extended_type;
216
217  gcc_assert (TREE_CODE (t) == INTEGER_CST);
218
219  low = TREE_INT_CST_LOW (t);
220  high = TREE_INT_CST_HIGH (t);
221
222  if (POINTER_TYPE_P (TREE_TYPE (t))
223      || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
224    prec = POINTER_SIZE;
225  else
226    prec = TYPE_PRECISION (TREE_TYPE (t));
227  /* Size types *are* sign extended.  */
228  sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
229			|| (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
230			    && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
231
232  /* First clear all bits that are beyond the type's precision.  */
233
234  if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
235    ;
236  else if (prec > HOST_BITS_PER_WIDE_INT)
237    high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
238  else
239    {
240      high = 0;
241      if (prec < HOST_BITS_PER_WIDE_INT)
242	low &= ~((HOST_WIDE_INT) (-1) << prec);
243    }
244
245  if (!sign_extended_type)
246    /* No sign extension */;
247  else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
248    /* Correct width already.  */;
249  else if (prec > HOST_BITS_PER_WIDE_INT)
250    {
251      /* Sign extend top half? */
252      if (high & ((unsigned HOST_WIDE_INT)1
253		  << (prec - HOST_BITS_PER_WIDE_INT - 1)))
254	high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
255    }
256  else if (prec == HOST_BITS_PER_WIDE_INT)
257    {
258      if ((HOST_WIDE_INT)low < 0)
259	high = -1;
260    }
261  else
262    {
263      /* Sign extend bottom half? */
264      if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
265	{
266	  high = -1;
267	  low |= (HOST_WIDE_INT)(-1) << prec;
268	}
269    }
270
271  /* If the value changed, return a new node.  */
272  if (overflowed || overflowed_const
273      || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
274    {
275      t = build_int_cst_wide (TREE_TYPE (t), low, high);
276
277      if (overflowed
278	  || overflowable < 0
279	  || (overflowable > 0 && sign_extended_type))
280	{
281	  t = copy_node (t);
282	  TREE_OVERFLOW (t) = 1;
283	  TREE_CONSTANT_OVERFLOW (t) = 1;
284	}
285      else if (overflowed_const)
286	{
287	  t = copy_node (t);
288	  TREE_CONSTANT_OVERFLOW (t) = 1;
289	}
290    }
291
292  return t;
293}
294
295/* Add two doubleword integers with doubleword result.
296   Return nonzero if the operation overflows according to UNSIGNED_P.
297   Each argument is given as two `HOST_WIDE_INT' pieces.
298   One argument is L1 and H1; the other, L2 and H2.
299   The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
300
301int
302add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
303		      unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
304		      unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
305		      bool unsigned_p)
306{
307  unsigned HOST_WIDE_INT l;
308  HOST_WIDE_INT h;
309
310  l = l1 + l2;
311  h = h1 + h2 + (l < l1);
312
313  *lv = l;
314  *hv = h;
315
316  if (unsigned_p)
317    return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
318  else
319    return OVERFLOW_SUM_SIGN (h1, h2, h);
320}
321
322/* Negate a doubleword integer with doubleword result.
323   Return nonzero if the operation overflows, assuming it's signed.
324   The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
325   The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
326
327int
328neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
329	    unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
330{
331  if (l1 == 0)
332    {
333      *lv = 0;
334      *hv = - h1;
335      return (*hv & h1) < 0;
336    }
337  else
338    {
339      *lv = -l1;
340      *hv = ~h1;
341      return 0;
342    }
343}
344
345/* Multiply two doubleword integers with doubleword result.
346   Return nonzero if the operation overflows according to UNSIGNED_P.
347   Each argument is given as two `HOST_WIDE_INT' pieces.
348   One argument is L1 and H1; the other, L2 and H2.
349   The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
350
351int
352mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
353		      unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
354		      unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
355		      bool unsigned_p)
356{
357  HOST_WIDE_INT arg1[4];
358  HOST_WIDE_INT arg2[4];
359  HOST_WIDE_INT prod[4 * 2];
360  unsigned HOST_WIDE_INT carry;
361  int i, j, k;
362  unsigned HOST_WIDE_INT toplow, neglow;
363  HOST_WIDE_INT tophigh, neghigh;
364
365  encode (arg1, l1, h1);
366  encode (arg2, l2, h2);
367
368  memset (prod, 0, sizeof prod);
369
370  for (i = 0; i < 4; i++)
371    {
372      carry = 0;
373      for (j = 0; j < 4; j++)
374	{
375	  k = i + j;
376	  /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000.  */
377	  carry += arg1[i] * arg2[j];
378	  /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF.  */
379	  carry += prod[k];
380	  prod[k] = LOWPART (carry);
381	  carry = HIGHPART (carry);
382	}
383      prod[i + 4] = carry;
384    }
385
386  decode (prod, lv, hv);
387  decode (prod + 4, &toplow, &tophigh);
388
389  /* Unsigned overflow is immediate.  */
390  if (unsigned_p)
391    return (toplow | tophigh) != 0;
392
393  /* Check for signed overflow by calculating the signed representation of the
394     top half of the result; it should agree with the low half's sign bit.  */
395  if (h1 < 0)
396    {
397      neg_double (l2, h2, &neglow, &neghigh);
398      add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
399    }
400  if (h2 < 0)
401    {
402      neg_double (l1, h1, &neglow, &neghigh);
403      add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
404    }
405  return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
406}
407
408/* Shift the doubleword integer in L1, H1 left by COUNT places
409   keeping only PREC bits of result.
410   Shift right if COUNT is negative.
411   ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
412   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
413
414void
415lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
416	       HOST_WIDE_INT count, unsigned int prec,
417	       unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
418{
419  unsigned HOST_WIDE_INT signmask;
420
421  if (count < 0)
422    {
423      rshift_double (l1, h1, -count, prec, lv, hv, arith);
424      return;
425    }
426
427  if (SHIFT_COUNT_TRUNCATED)
428    count %= prec;
429
430  if (count >= 2 * HOST_BITS_PER_WIDE_INT)
431    {
432      /* Shifting by the host word size is undefined according to the
433	 ANSI standard, so we must handle this as a special case.  */
434      *hv = 0;
435      *lv = 0;
436    }
437  else if (count >= HOST_BITS_PER_WIDE_INT)
438    {
439      *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
440      *lv = 0;
441    }
442  else
443    {
444      *hv = (((unsigned HOST_WIDE_INT) h1 << count)
445	     | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
446      *lv = l1 << count;
447    }
448
449  /* Sign extend all bits that are beyond the precision.  */
450
451  signmask = -((prec > HOST_BITS_PER_WIDE_INT
452		? ((unsigned HOST_WIDE_INT) *hv
453		   >> (prec - HOST_BITS_PER_WIDE_INT - 1))
454		: (*lv >> (prec - 1))) & 1);
455
456  if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
457    ;
458  else if (prec >= HOST_BITS_PER_WIDE_INT)
459    {
460      *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
461      *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
462    }
463  else
464    {
465      *hv = signmask;
466      *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
467      *lv |= signmask << prec;
468    }
469}
470
471/* Shift the doubleword integer in L1, H1 right by COUNT places
472   keeping only PREC bits of result.  COUNT must be positive.
473   ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
474   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
475
476void
477rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
478	       HOST_WIDE_INT count, unsigned int prec,
479	       unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
480	       int arith)
481{
482  unsigned HOST_WIDE_INT signmask;
483
484  signmask = (arith
485	      ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
486	      : 0);
487
488  if (SHIFT_COUNT_TRUNCATED)
489    count %= prec;
490
491  if (count >= 2 * HOST_BITS_PER_WIDE_INT)
492    {
493      /* Shifting by the host word size is undefined according to the
494	 ANSI standard, so we must handle this as a special case.  */
495      *hv = 0;
496      *lv = 0;
497    }
498  else if (count >= HOST_BITS_PER_WIDE_INT)
499    {
500      *hv = 0;
501      *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
502    }
503  else
504    {
505      *hv = (unsigned HOST_WIDE_INT) h1 >> count;
506      *lv = ((l1 >> count)
507	     | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
508    }
509
510  /* Zero / sign extend all bits that are beyond the precision.  */
511
512  if (count >= (HOST_WIDE_INT)prec)
513    {
514      *hv = signmask;
515      *lv = signmask;
516    }
517  else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
518    ;
519  else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
520    {
521      *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
522      *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
523    }
524  else
525    {
526      *hv = signmask;
527      *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
528      *lv |= signmask << (prec - count);
529    }
530}
531
532/* Rotate the doubleword integer in L1, H1 left by COUNT places
533   keeping only PREC bits of result.
534   Rotate right if COUNT is negative.
535   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
536
537void
538lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
539		HOST_WIDE_INT count, unsigned int prec,
540		unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
541{
542  unsigned HOST_WIDE_INT s1l, s2l;
543  HOST_WIDE_INT s1h, s2h;
544
545  count %= prec;
546  if (count < 0)
547    count += prec;
548
549  lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
550  rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
551  *lv = s1l | s2l;
552  *hv = s1h | s2h;
553}
554
555/* Rotate the doubleword integer in L1, H1 left by COUNT places
556   keeping only PREC bits of result.  COUNT must be positive.
557   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
558
559void
560rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
561		HOST_WIDE_INT count, unsigned int prec,
562		unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
563{
564  unsigned HOST_WIDE_INT s1l, s2l;
565  HOST_WIDE_INT s1h, s2h;
566
567  count %= prec;
568  if (count < 0)
569    count += prec;
570
571  rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
572  lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
573  *lv = s1l | s2l;
574  *hv = s1h | s2h;
575}
576
577/* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
578   for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
579   CODE is a tree code for a kind of division, one of
580   TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
581   or EXACT_DIV_EXPR
582   It controls how the quotient is rounded to an integer.
583   Return nonzero if the operation overflows.
584   UNS nonzero says do unsigned division.  */
585
586int
587div_and_round_double (enum tree_code code, int uns,
588		      unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
589		      HOST_WIDE_INT hnum_orig,
590		      unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
591		      HOST_WIDE_INT hden_orig,
592		      unsigned HOST_WIDE_INT *lquo,
593		      HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
594		      HOST_WIDE_INT *hrem)
595{
596  int quo_neg = 0;
597  HOST_WIDE_INT num[4 + 1];	/* extra element for scaling.  */
598  HOST_WIDE_INT den[4], quo[4];
599  int i, j;
600  unsigned HOST_WIDE_INT work;
601  unsigned HOST_WIDE_INT carry = 0;
602  unsigned HOST_WIDE_INT lnum = lnum_orig;
603  HOST_WIDE_INT hnum = hnum_orig;
604  unsigned HOST_WIDE_INT lden = lden_orig;
605  HOST_WIDE_INT hden = hden_orig;
606  int overflow = 0;
607
608  if (hden == 0 && lden == 0)
609    overflow = 1, lden = 1;
610
611  /* Calculate quotient sign and convert operands to unsigned.  */
612  if (!uns)
613    {
614      if (hnum < 0)
615	{
616	  quo_neg = ~ quo_neg;
617	  /* (minimum integer) / (-1) is the only overflow case.  */
618	  if (neg_double (lnum, hnum, &lnum, &hnum)
619	      && ((HOST_WIDE_INT) lden & hden) == -1)
620	    overflow = 1;
621	}
622      if (hden < 0)
623	{
624	  quo_neg = ~ quo_neg;
625	  neg_double (lden, hden, &lden, &hden);
626	}
627    }
628
629  if (hnum == 0 && hden == 0)
630    {				/* single precision */
631      *hquo = *hrem = 0;
632      /* This unsigned division rounds toward zero.  */
633      *lquo = lnum / lden;
634      goto finish_up;
635    }
636
637  if (hnum == 0)
638    {				/* trivial case: dividend < divisor */
639      /* hden != 0 already checked.  */
640      *hquo = *lquo = 0;
641      *hrem = hnum;
642      *lrem = lnum;
643      goto finish_up;
644    }
645
646  memset (quo, 0, sizeof quo);
647
648  memset (num, 0, sizeof num);	/* to zero 9th element */
649  memset (den, 0, sizeof den);
650
651  encode (num, lnum, hnum);
652  encode (den, lden, hden);
653
654  /* Special code for when the divisor < BASE.  */
655  if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
656    {
657      /* hnum != 0 already checked.  */
658      for (i = 4 - 1; i >= 0; i--)
659	{
660	  work = num[i] + carry * BASE;
661	  quo[i] = work / lden;
662	  carry = work % lden;
663	}
664    }
665  else
666    {
667      /* Full double precision division,
668	 with thanks to Don Knuth's "Seminumerical Algorithms".  */
669      int num_hi_sig, den_hi_sig;
670      unsigned HOST_WIDE_INT quo_est, scale;
671
672      /* Find the highest nonzero divisor digit.  */
673      for (i = 4 - 1;; i--)
674	if (den[i] != 0)
675	  {
676	    den_hi_sig = i;
677	    break;
678	  }
679
680      /* Insure that the first digit of the divisor is at least BASE/2.
681	 This is required by the quotient digit estimation algorithm.  */
682
683      scale = BASE / (den[den_hi_sig] + 1);
684      if (scale > 1)
685	{		/* scale divisor and dividend */
686	  carry = 0;
687	  for (i = 0; i <= 4 - 1; i++)
688	    {
689	      work = (num[i] * scale) + carry;
690	      num[i] = LOWPART (work);
691	      carry = HIGHPART (work);
692	    }
693
694	  num[4] = carry;
695	  carry = 0;
696	  for (i = 0; i <= 4 - 1; i++)
697	    {
698	      work = (den[i] * scale) + carry;
699	      den[i] = LOWPART (work);
700	      carry = HIGHPART (work);
701	      if (den[i] != 0) den_hi_sig = i;
702	    }
703	}
704
705      num_hi_sig = 4;
706
707      /* Main loop */
708      for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
709	{
710	  /* Guess the next quotient digit, quo_est, by dividing the first
711	     two remaining dividend digits by the high order quotient digit.
712	     quo_est is never low and is at most 2 high.  */
713	  unsigned HOST_WIDE_INT tmp;
714
715	  num_hi_sig = i + den_hi_sig + 1;
716	  work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
717	  if (num[num_hi_sig] != den[den_hi_sig])
718	    quo_est = work / den[den_hi_sig];
719	  else
720	    quo_est = BASE - 1;
721
722	  /* Refine quo_est so it's usually correct, and at most one high.  */
723	  tmp = work - quo_est * den[den_hi_sig];
724	  if (tmp < BASE
725	      && (den[den_hi_sig - 1] * quo_est
726		  > (tmp * BASE + num[num_hi_sig - 2])))
727	    quo_est--;
728
729	  /* Try QUO_EST as the quotient digit, by multiplying the
730	     divisor by QUO_EST and subtracting from the remaining dividend.
731	     Keep in mind that QUO_EST is the I - 1st digit.  */
732
733	  carry = 0;
734	  for (j = 0; j <= den_hi_sig; j++)
735	    {
736	      work = quo_est * den[j] + carry;
737	      carry = HIGHPART (work);
738	      work = num[i + j] - LOWPART (work);
739	      num[i + j] = LOWPART (work);
740	      carry += HIGHPART (work) != 0;
741	    }
742
743	  /* If quo_est was high by one, then num[i] went negative and
744	     we need to correct things.  */
745	  if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
746	    {
747	      quo_est--;
748	      carry = 0;		/* add divisor back in */
749	      for (j = 0; j <= den_hi_sig; j++)
750		{
751		  work = num[i + j] + den[j] + carry;
752		  carry = HIGHPART (work);
753		  num[i + j] = LOWPART (work);
754		}
755
756	      num [num_hi_sig] += carry;
757	    }
758
759	  /* Store the quotient digit.  */
760	  quo[i] = quo_est;
761	}
762    }
763
764  decode (quo, lquo, hquo);
765
766 finish_up:
767  /* If result is negative, make it so.  */
768  if (quo_neg)
769    neg_double (*lquo, *hquo, lquo, hquo);
770
771  /* Compute trial remainder:  rem = num - (quo * den)  */
772  mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
773  neg_double (*lrem, *hrem, lrem, hrem);
774  add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
775
776  switch (code)
777    {
778    case TRUNC_DIV_EXPR:
779    case TRUNC_MOD_EXPR:	/* round toward zero */
780    case EXACT_DIV_EXPR:	/* for this one, it shouldn't matter */
781      return overflow;
782
783    case FLOOR_DIV_EXPR:
784    case FLOOR_MOD_EXPR:	/* round toward negative infinity */
785      if (quo_neg && (*lrem != 0 || *hrem != 0))   /* ratio < 0 && rem != 0 */
786	{
787	  /* quo = quo - 1;  */
788	  add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT)  -1,
789		      lquo, hquo);
790	}
791      else
792	return overflow;
793      break;
794
795    case CEIL_DIV_EXPR:
796    case CEIL_MOD_EXPR:		/* round toward positive infinity */
797      if (!quo_neg && (*lrem != 0 || *hrem != 0))  /* ratio > 0 && rem != 0 */
798	{
799	  add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
800		      lquo, hquo);
801	}
802      else
803	return overflow;
804      break;
805
806    case ROUND_DIV_EXPR:
807    case ROUND_MOD_EXPR:	/* round to closest integer */
808      {
809	unsigned HOST_WIDE_INT labs_rem = *lrem;
810	HOST_WIDE_INT habs_rem = *hrem;
811	unsigned HOST_WIDE_INT labs_den = lden, ltwice;
812	HOST_WIDE_INT habs_den = hden, htwice;
813
814	/* Get absolute values.  */
815	if (*hrem < 0)
816	  neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
817	if (hden < 0)
818	  neg_double (lden, hden, &labs_den, &habs_den);
819
820	/* If (2 * abs (lrem) >= abs (lden)) */
821	mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
822		    labs_rem, habs_rem, &ltwice, &htwice);
823
824	if (((unsigned HOST_WIDE_INT) habs_den
825	     < (unsigned HOST_WIDE_INT) htwice)
826	    || (((unsigned HOST_WIDE_INT) habs_den
827		 == (unsigned HOST_WIDE_INT) htwice)
828		&& (labs_den < ltwice)))
829	  {
830	    if (*hquo < 0)
831	      /* quo = quo - 1;  */
832	      add_double (*lquo, *hquo,
833			  (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
834	    else
835	      /* quo = quo + 1; */
836	      add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
837			  lquo, hquo);
838	  }
839	else
840	  return overflow;
841      }
842      break;
843
844    default:
845      gcc_unreachable ();
846    }
847
848  /* Compute true remainder:  rem = num - (quo * den)  */
849  mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
850  neg_double (*lrem, *hrem, lrem, hrem);
851  add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
852  return overflow;
853}
854
855/* If ARG2 divides ARG1 with zero remainder, carries out the division
856   of type CODE and returns the quotient.
857   Otherwise returns NULL_TREE.  */
858
859static tree
860div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
861{
862  unsigned HOST_WIDE_INT int1l, int2l;
863  HOST_WIDE_INT int1h, int2h;
864  unsigned HOST_WIDE_INT quol, reml;
865  HOST_WIDE_INT quoh, remh;
866  tree type = TREE_TYPE (arg1);
867  int uns = TYPE_UNSIGNED (type);
868
869  int1l = TREE_INT_CST_LOW (arg1);
870  int1h = TREE_INT_CST_HIGH (arg1);
871  int2l = TREE_INT_CST_LOW (arg2);
872  int2h = TREE_INT_CST_HIGH (arg2);
873
874  div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
875		  	&quol, &quoh, &reml, &remh);
876  if (remh != 0 || reml != 0)
877    return NULL_TREE;
878
879  return build_int_cst_wide (type, quol, quoh);
880}
881
882/* This is non-zero if we should defer warnings about undefined
883   overflow.  This facility exists because these warnings are a
884   special case.  The code to estimate loop iterations does not want
885   to issue any warnings, since it works with expressions which do not
886   occur in user code.  Various bits of cleanup code call fold(), but
887   only use the result if it has certain characteristics (e.g., is a
888   constant); that code only wants to issue a warning if the result is
889   used.  */
890
891static int fold_deferring_overflow_warnings;
892
893/* If a warning about undefined overflow is deferred, this is the
894   warning.  Note that this may cause us to turn two warnings into
895   one, but that is fine since it is sufficient to only give one
896   warning per expression.  */
897
898static const char* fold_deferred_overflow_warning;
899
900/* If a warning about undefined overflow is deferred, this is the
901   level at which the warning should be emitted.  */
902
903static enum warn_strict_overflow_code fold_deferred_overflow_code;
904
905/* Start deferring overflow warnings.  We could use a stack here to
906   permit nested calls, but at present it is not necessary.  */
907
908void
909fold_defer_overflow_warnings (void)
910{
911  ++fold_deferring_overflow_warnings;
912}
913
914/* Stop deferring overflow warnings.  If there is a pending warning,
915   and ISSUE is true, then issue the warning if appropriate.  STMT is
916   the statement with which the warning should be associated (used for
917   location information); STMT may be NULL.  CODE is the level of the
918   warning--a warn_strict_overflow_code value.  This function will use
919   the smaller of CODE and the deferred code when deciding whether to
920   issue the warning.  CODE may be zero to mean to always use the
921   deferred code.  */
922
923void
924fold_undefer_overflow_warnings (bool issue, tree stmt, int code)
925{
926  const char *warnmsg;
927  location_t locus;
928
929  gcc_assert (fold_deferring_overflow_warnings > 0);
930  --fold_deferring_overflow_warnings;
931  if (fold_deferring_overflow_warnings > 0)
932    {
933      if (fold_deferred_overflow_warning != NULL
934	  && code != 0
935	  && code < (int) fold_deferred_overflow_code)
936	fold_deferred_overflow_code = code;
937      return;
938    }
939
940  warnmsg = fold_deferred_overflow_warning;
941  fold_deferred_overflow_warning = NULL;
942
943  if (!issue || warnmsg == NULL)
944    return;
945
946  /* Use the smallest code level when deciding to issue the
947     warning.  */
948  if (code == 0 || code > (int) fold_deferred_overflow_code)
949    code = fold_deferred_overflow_code;
950
951  if (!issue_strict_overflow_warning (code))
952    return;
953
954  if (stmt == NULL_TREE || !EXPR_HAS_LOCATION (stmt))
955    locus = input_location;
956  else
957    locus = EXPR_LOCATION (stmt);
958  warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
959}
960
961/* Stop deferring overflow warnings, ignoring any deferred
962   warnings.  */
963
964void
965fold_undefer_and_ignore_overflow_warnings (void)
966{
967  fold_undefer_overflow_warnings (false, NULL_TREE, 0);
968}
969
970/* Whether we are deferring overflow warnings.  */
971
972bool
973fold_deferring_overflow_warnings_p (void)
974{
975  return fold_deferring_overflow_warnings > 0;
976}
977
978/* This is called when we fold something based on the fact that signed
979   overflow is undefined.  */
980
981static void
982fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
983{
984  gcc_assert (!flag_wrapv && !flag_trapv);
985  if (fold_deferring_overflow_warnings > 0)
986    {
987      if (fold_deferred_overflow_warning == NULL
988	  || wc < fold_deferred_overflow_code)
989	{
990	  fold_deferred_overflow_warning = gmsgid;
991	  fold_deferred_overflow_code = wc;
992	}
993    }
994  else if (issue_strict_overflow_warning (wc))
995    warning (OPT_Wstrict_overflow, gmsgid);
996}
997
998/* Return true if the built-in mathematical function specified by CODE
999   is odd, i.e. -f(x) == f(-x).  */
1000
1001static bool
1002negate_mathfn_p (enum built_in_function code)
1003{
1004  switch (code)
1005    {
1006    CASE_FLT_FN (BUILT_IN_ASIN):
1007    CASE_FLT_FN (BUILT_IN_ASINH):
1008    CASE_FLT_FN (BUILT_IN_ATAN):
1009    CASE_FLT_FN (BUILT_IN_ATANH):
1010    CASE_FLT_FN (BUILT_IN_CBRT):
1011    CASE_FLT_FN (BUILT_IN_SIN):
1012    CASE_FLT_FN (BUILT_IN_SINH):
1013    CASE_FLT_FN (BUILT_IN_TAN):
1014    CASE_FLT_FN (BUILT_IN_TANH):
1015      return true;
1016
1017    default:
1018      break;
1019    }
1020  return false;
1021}
1022
1023/* Check whether we may negate an integer constant T without causing
1024   overflow.  */
1025
1026bool
1027may_negate_without_overflow_p (tree t)
1028{
1029  unsigned HOST_WIDE_INT val;
1030  unsigned int prec;
1031  tree type;
1032
1033  gcc_assert (TREE_CODE (t) == INTEGER_CST);
1034
1035  type = TREE_TYPE (t);
1036  if (TYPE_UNSIGNED (type))
1037    return false;
1038
1039  prec = TYPE_PRECISION (type);
1040  if (prec > HOST_BITS_PER_WIDE_INT)
1041    {
1042      if (TREE_INT_CST_LOW (t) != 0)
1043	return true;
1044      prec -= HOST_BITS_PER_WIDE_INT;
1045      val = TREE_INT_CST_HIGH (t);
1046    }
1047  else
1048    val = TREE_INT_CST_LOW (t);
1049  if (prec < HOST_BITS_PER_WIDE_INT)
1050    val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1051  return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1052}
1053
1054/* Determine whether an expression T can be cheaply negated using
1055   the function negate_expr without introducing undefined overflow.  */
1056
1057static bool
1058negate_expr_p (tree t)
1059{
1060  tree type;
1061
1062  if (t == 0)
1063    return false;
1064
1065  type = TREE_TYPE (t);
1066
1067  STRIP_SIGN_NOPS (t);
1068  switch (TREE_CODE (t))
1069    {
1070    case INTEGER_CST:
1071      if (TYPE_OVERFLOW_WRAPS (type))
1072	return true;
1073
1074      /* Check that -CST will not overflow type.  */
1075      return may_negate_without_overflow_p (t);
1076    case BIT_NOT_EXPR:
1077      return (INTEGRAL_TYPE_P (type)
1078	      && TYPE_OVERFLOW_WRAPS (type));
1079
1080    case REAL_CST:
1081    case NEGATE_EXPR:
1082      return true;
1083
1084    case COMPLEX_CST:
1085      return negate_expr_p (TREE_REALPART (t))
1086	     && negate_expr_p (TREE_IMAGPART (t));
1087
1088    case PLUS_EXPR:
1089      if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
1090	return false;
1091      /* -(A + B) -> (-B) - A.  */
1092      if (negate_expr_p (TREE_OPERAND (t, 1))
1093	  && reorder_operands_p (TREE_OPERAND (t, 0),
1094				 TREE_OPERAND (t, 1)))
1095	return true;
1096      /* -(A + B) -> (-A) - B.  */
1097      return negate_expr_p (TREE_OPERAND (t, 0));
1098
1099    case MINUS_EXPR:
1100      /* We can't turn -(A-B) into B-A when we honor signed zeros.  */
1101      return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1102	     && reorder_operands_p (TREE_OPERAND (t, 0),
1103				    TREE_OPERAND (t, 1));
1104
1105    case MULT_EXPR:
1106      if (TYPE_UNSIGNED (TREE_TYPE (t)))
1107        break;
1108
1109      /* Fall through.  */
1110
1111    case RDIV_EXPR:
1112      if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1113	return negate_expr_p (TREE_OPERAND (t, 1))
1114	       || negate_expr_p (TREE_OPERAND (t, 0));
1115      break;
1116
1117    case TRUNC_DIV_EXPR:
1118    case ROUND_DIV_EXPR:
1119    case FLOOR_DIV_EXPR:
1120    case CEIL_DIV_EXPR:
1121    case EXACT_DIV_EXPR:
1122      /* In general we can't negate A / B, because if A is INT_MIN and
1123	 B is 1, we may turn this into INT_MIN / -1 which is undefined
1124	 and actually traps on some architectures.  But if overflow is
1125	 undefined, we can negate, because - (INT_MIN / 1) is an
1126	 overflow.  */
1127      if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1128	  && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1129        break;
1130      return negate_expr_p (TREE_OPERAND (t, 1))
1131             || negate_expr_p (TREE_OPERAND (t, 0));
1132
1133    case NOP_EXPR:
1134      /* Negate -((double)float) as (double)(-float).  */
1135      if (TREE_CODE (type) == REAL_TYPE)
1136	{
1137	  tree tem = strip_float_extensions (t);
1138	  if (tem != t)
1139	    return negate_expr_p (tem);
1140	}
1141      break;
1142
1143    case CALL_EXPR:
1144      /* Negate -f(x) as f(-x).  */
1145      if (negate_mathfn_p (builtin_mathfn_code (t)))
1146	return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1147      break;
1148
1149    case RSHIFT_EXPR:
1150      /* Optimize -((int)x >> 31) into (unsigned)x >> 31.  */
1151      if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1152	{
1153	  tree op1 = TREE_OPERAND (t, 1);
1154	  if (TREE_INT_CST_HIGH (op1) == 0
1155	      && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1156		 == TREE_INT_CST_LOW (op1))
1157	    return true;
1158	}
1159      break;
1160
1161    default:
1162      break;
1163    }
1164  return false;
1165}
1166
1167/* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1168   simplification is possible.
1169   If negate_expr_p would return true for T, NULL_TREE will never be
1170   returned.  */
1171
1172static tree
1173fold_negate_expr (tree t)
1174{
1175  tree type = TREE_TYPE (t);
1176  tree tem;
1177
1178  switch (TREE_CODE (t))
1179    {
1180    /* Convert - (~A) to A + 1.  */
1181    case BIT_NOT_EXPR:
1182      if (INTEGRAL_TYPE_P (type))
1183        return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1184                            build_int_cst (type, 1));
1185      break;
1186
1187    case INTEGER_CST:
1188      tem = fold_negate_const (t, type);
1189      if (!TREE_OVERFLOW (tem)
1190	  || !TYPE_OVERFLOW_TRAPS (type))
1191	return tem;
1192      break;
1193
1194    case REAL_CST:
1195      tem = fold_negate_const (t, type);
1196      /* Two's complement FP formats, such as c4x, may overflow.  */
1197      if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1198	return tem;
1199      break;
1200
1201    case COMPLEX_CST:
1202      {
1203	tree rpart = negate_expr (TREE_REALPART (t));
1204	tree ipart = negate_expr (TREE_IMAGPART (t));
1205
1206	if ((TREE_CODE (rpart) == REAL_CST
1207	     && TREE_CODE (ipart) == REAL_CST)
1208	    || (TREE_CODE (rpart) == INTEGER_CST
1209		&& TREE_CODE (ipart) == INTEGER_CST))
1210	  return build_complex (type, rpart, ipart);
1211      }
1212      break;
1213
1214    case NEGATE_EXPR:
1215      return TREE_OPERAND (t, 0);
1216
1217    case PLUS_EXPR:
1218      if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1219	{
1220	  /* -(A + B) -> (-B) - A.  */
1221	  if (negate_expr_p (TREE_OPERAND (t, 1))
1222	      && reorder_operands_p (TREE_OPERAND (t, 0),
1223				     TREE_OPERAND (t, 1)))
1224	    {
1225	      tem = negate_expr (TREE_OPERAND (t, 1));
1226	      return fold_build2 (MINUS_EXPR, type,
1227				  tem, TREE_OPERAND (t, 0));
1228	    }
1229
1230	  /* -(A + B) -> (-A) - B.  */
1231	  if (negate_expr_p (TREE_OPERAND (t, 0)))
1232	    {
1233	      tem = negate_expr (TREE_OPERAND (t, 0));
1234	      return fold_build2 (MINUS_EXPR, type,
1235				  tem, TREE_OPERAND (t, 1));
1236	    }
1237	}
1238      break;
1239
1240    case MINUS_EXPR:
1241      /* - (A - B) -> B - A  */
1242      if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1243	  && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1244	return fold_build2 (MINUS_EXPR, type,
1245			    TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1246      break;
1247
1248    case MULT_EXPR:
1249      if (TYPE_UNSIGNED (type))
1250        break;
1251
1252      /* Fall through.  */
1253
1254    case RDIV_EXPR:
1255      if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1256	{
1257	  tem = TREE_OPERAND (t, 1);
1258	  if (negate_expr_p (tem))
1259	    return fold_build2 (TREE_CODE (t), type,
1260				TREE_OPERAND (t, 0), negate_expr (tem));
1261	  tem = TREE_OPERAND (t, 0);
1262	  if (negate_expr_p (tem))
1263	    return fold_build2 (TREE_CODE (t), type,
1264				negate_expr (tem), TREE_OPERAND (t, 1));
1265	}
1266      break;
1267
1268    case TRUNC_DIV_EXPR:
1269    case ROUND_DIV_EXPR:
1270    case FLOOR_DIV_EXPR:
1271    case CEIL_DIV_EXPR:
1272    case EXACT_DIV_EXPR:
1273      /* In general we can't negate A / B, because if A is INT_MIN and
1274	 B is 1, we may turn this into INT_MIN / -1 which is undefined
1275	 and actually traps on some architectures.  But if overflow is
1276	 undefined, we can negate, because - (INT_MIN / 1) is an
1277	 overflow.  */
1278      if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1279        {
1280	  const char * const warnmsg = G_("assuming signed overflow does not "
1281					  "occur when negating a division");
1282          tem = TREE_OPERAND (t, 1);
1283          if (negate_expr_p (tem))
1284	    {
1285	      if (INTEGRAL_TYPE_P (type)
1286		  && (TREE_CODE (tem) != INTEGER_CST
1287		      || integer_onep (tem)))
1288		fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1289	      return fold_build2 (TREE_CODE (t), type,
1290				  TREE_OPERAND (t, 0), negate_expr (tem));
1291	    }
1292          tem = TREE_OPERAND (t, 0);
1293          if (negate_expr_p (tem))
1294	    {
1295	      if (INTEGRAL_TYPE_P (type)
1296		  && (TREE_CODE (tem) != INTEGER_CST
1297		      || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1298		fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1299	      return fold_build2 (TREE_CODE (t), type,
1300				  negate_expr (tem), TREE_OPERAND (t, 1));
1301	    }
1302        }
1303      break;
1304
1305    case NOP_EXPR:
1306      /* Convert -((double)float) into (double)(-float).  */
1307      if (TREE_CODE (type) == REAL_TYPE)
1308	{
1309	  tem = strip_float_extensions (t);
1310	  if (tem != t && negate_expr_p (tem))
1311	    return negate_expr (tem);
1312	}
1313      break;
1314
1315    case CALL_EXPR:
1316      /* Negate -f(x) as f(-x).  */
1317      if (negate_mathfn_p (builtin_mathfn_code (t))
1318	  && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1319	{
1320	  tree fndecl, arg, arglist;
1321
1322	  fndecl = get_callee_fndecl (t);
1323	  arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1324	  arglist = build_tree_list (NULL_TREE, arg);
1325	  return build_function_call_expr (fndecl, arglist);
1326	}
1327      break;
1328
1329    case RSHIFT_EXPR:
1330      /* Optimize -((int)x >> 31) into (unsigned)x >> 31.  */
1331      if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1332	{
1333	  tree op1 = TREE_OPERAND (t, 1);
1334	  if (TREE_INT_CST_HIGH (op1) == 0
1335	      && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1336		 == TREE_INT_CST_LOW (op1))
1337	    {
1338	      tree ntype = TYPE_UNSIGNED (type)
1339			   ? lang_hooks.types.signed_type (type)
1340			   : lang_hooks.types.unsigned_type (type);
1341	      tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1342	      temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1343	      return fold_convert (type, temp);
1344	    }
1345	}
1346      break;
1347
1348    default:
1349      break;
1350    }
1351
1352  return NULL_TREE;
1353}
1354
1355/* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1356   negated in a simpler way.  Also allow for T to be NULL_TREE, in which case
1357   return NULL_TREE. */
1358
1359static tree
1360negate_expr (tree t)
1361{
1362  tree type, tem;
1363
1364  if (t == NULL_TREE)
1365    return NULL_TREE;
1366
1367  type = TREE_TYPE (t);
1368  STRIP_SIGN_NOPS (t);
1369
1370  tem = fold_negate_expr (t);
1371  if (!tem)
1372    tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1373  return fold_convert (type, tem);
1374}
1375
1376/* Split a tree IN into a constant, literal and variable parts that could be
1377   combined with CODE to make IN.  "constant" means an expression with
1378   TREE_CONSTANT but that isn't an actual constant.  CODE must be a
1379   commutative arithmetic operation.  Store the constant part into *CONP,
1380   the literal in *LITP and return the variable part.  If a part isn't
1381   present, set it to null.  If the tree does not decompose in this way,
1382   return the entire tree as the variable part and the other parts as null.
1383
1384   If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.  In that
1385   case, we negate an operand that was subtracted.  Except if it is a
1386   literal for which we use *MINUS_LITP instead.
1387
1388   If NEGATE_P is true, we are negating all of IN, again except a literal
1389   for which we use *MINUS_LITP instead.
1390
1391   If IN is itself a literal or constant, return it as appropriate.
1392
1393   Note that we do not guarantee that any of the three values will be the
1394   same type as IN, but they will have the same signedness and mode.  */
1395
1396static tree
1397split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1398	    tree *minus_litp, int negate_p)
1399{
1400  tree var = 0;
1401
1402  *conp = 0;
1403  *litp = 0;
1404  *minus_litp = 0;
1405
1406  /* Strip any conversions that don't change the machine mode or signedness.  */
1407  STRIP_SIGN_NOPS (in);
1408
1409  if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1410    *litp = in;
1411  else if (TREE_CODE (in) == code
1412	   || (! FLOAT_TYPE_P (TREE_TYPE (in))
1413	       /* We can associate addition and subtraction together (even
1414		  though the C standard doesn't say so) for integers because
1415		  the value is not affected.  For reals, the value might be
1416		  affected, so we can't.  */
1417	       && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1418		   || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1419    {
1420      tree op0 = TREE_OPERAND (in, 0);
1421      tree op1 = TREE_OPERAND (in, 1);
1422      int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1423      int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1424
1425      /* First see if either of the operands is a literal, then a constant.  */
1426      if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1427	*litp = op0, op0 = 0;
1428      else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1429	*litp = op1, neg_litp_p = neg1_p, op1 = 0;
1430
1431      if (op0 != 0 && TREE_CONSTANT (op0))
1432	*conp = op0, op0 = 0;
1433      else if (op1 != 0 && TREE_CONSTANT (op1))
1434	*conp = op1, neg_conp_p = neg1_p, op1 = 0;
1435
1436      /* If we haven't dealt with either operand, this is not a case we can
1437	 decompose.  Otherwise, VAR is either of the ones remaining, if any.  */
1438      if (op0 != 0 && op1 != 0)
1439	var = in;
1440      else if (op0 != 0)
1441	var = op0;
1442      else
1443	var = op1, neg_var_p = neg1_p;
1444
1445      /* Now do any needed negations.  */
1446      if (neg_litp_p)
1447	*minus_litp = *litp, *litp = 0;
1448      if (neg_conp_p)
1449	*conp = negate_expr (*conp);
1450      if (neg_var_p)
1451	var = negate_expr (var);
1452    }
1453  else if (TREE_CONSTANT (in))
1454    *conp = in;
1455  else
1456    var = in;
1457
1458  if (negate_p)
1459    {
1460      if (*litp)
1461	*minus_litp = *litp, *litp = 0;
1462      else if (*minus_litp)
1463	*litp = *minus_litp, *minus_litp = 0;
1464      *conp = negate_expr (*conp);
1465      var = negate_expr (var);
1466    }
1467
1468  return var;
1469}
1470
1471/* Re-associate trees split by the above function.  T1 and T2 are either
1472   expressions to associate or null.  Return the new expression, if any.  If
1473   we build an operation, do it in TYPE and with CODE.  */
1474
1475static tree
1476associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1477{
1478  if (t1 == 0)
1479    return t2;
1480  else if (t2 == 0)
1481    return t1;
1482
1483  /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1484     try to fold this since we will have infinite recursion.  But do
1485     deal with any NEGATE_EXPRs.  */
1486  if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1487      || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1488    {
1489      if (code == PLUS_EXPR)
1490	{
1491	  if (TREE_CODE (t1) == NEGATE_EXPR)
1492	    return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1493			   fold_convert (type, TREE_OPERAND (t1, 0)));
1494	  else if (TREE_CODE (t2) == NEGATE_EXPR)
1495	    return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1496			   fold_convert (type, TREE_OPERAND (t2, 0)));
1497	  else if (integer_zerop (t2))
1498	    return fold_convert (type, t1);
1499	}
1500      else if (code == MINUS_EXPR)
1501	{
1502	  if (integer_zerop (t2))
1503	    return fold_convert (type, t1);
1504	}
1505
1506      return build2 (code, type, fold_convert (type, t1),
1507		     fold_convert (type, t2));
1508    }
1509
1510  return fold_build2 (code, type, fold_convert (type, t1),
1511		      fold_convert (type, t2));
1512}
1513
1514/* Combine two integer constants ARG1 and ARG2 under operation CODE
1515   to produce a new constant.  Return NULL_TREE if we don't know how
1516   to evaluate CODE at compile-time.
1517
1518   If NOTRUNC is nonzero, do not truncate the result to fit the data type.  */
1519
1520tree
1521int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1522{
1523  unsigned HOST_WIDE_INT int1l, int2l;
1524  HOST_WIDE_INT int1h, int2h;
1525  unsigned HOST_WIDE_INT low;
1526  HOST_WIDE_INT hi;
1527  unsigned HOST_WIDE_INT garbagel;
1528  HOST_WIDE_INT garbageh;
1529  tree t;
1530  tree type = TREE_TYPE (arg1);
1531  int uns = TYPE_UNSIGNED (type);
1532  int is_sizetype
1533    = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1534  int overflow = 0;
1535
1536  int1l = TREE_INT_CST_LOW (arg1);
1537  int1h = TREE_INT_CST_HIGH (arg1);
1538  int2l = TREE_INT_CST_LOW (arg2);
1539  int2h = TREE_INT_CST_HIGH (arg2);
1540
1541  switch (code)
1542    {
1543    case BIT_IOR_EXPR:
1544      low = int1l | int2l, hi = int1h | int2h;
1545      break;
1546
1547    case BIT_XOR_EXPR:
1548      low = int1l ^ int2l, hi = int1h ^ int2h;
1549      break;
1550
1551    case BIT_AND_EXPR:
1552      low = int1l & int2l, hi = int1h & int2h;
1553      break;
1554
1555    case RSHIFT_EXPR:
1556      int2l = -int2l;
1557    case LSHIFT_EXPR:
1558      /* It's unclear from the C standard whether shifts can overflow.
1559	 The following code ignores overflow; perhaps a C standard
1560	 interpretation ruling is needed.  */
1561      lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1562		     &low, &hi, !uns);
1563      break;
1564
1565    case RROTATE_EXPR:
1566      int2l = - int2l;
1567    case LROTATE_EXPR:
1568      lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1569		      &low, &hi);
1570      break;
1571
1572    case PLUS_EXPR:
1573      overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1574      break;
1575
1576    case MINUS_EXPR:
1577      neg_double (int2l, int2h, &low, &hi);
1578      add_double (int1l, int1h, low, hi, &low, &hi);
1579      overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1580      break;
1581
1582    case MULT_EXPR:
1583      overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1584      break;
1585
1586    case TRUNC_DIV_EXPR:
1587    case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1588    case EXACT_DIV_EXPR:
1589      /* This is a shortcut for a common special case.  */
1590      if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1591	  && ! TREE_CONSTANT_OVERFLOW (arg1)
1592	  && ! TREE_CONSTANT_OVERFLOW (arg2)
1593	  && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1594	{
1595	  if (code == CEIL_DIV_EXPR)
1596	    int1l += int2l - 1;
1597
1598	  low = int1l / int2l, hi = 0;
1599	  break;
1600	}
1601
1602      /* ... fall through ...  */
1603
1604    case ROUND_DIV_EXPR:
1605      if (int2h == 0 && int2l == 0)
1606	return NULL_TREE;
1607      if (int2h == 0 && int2l == 1)
1608	{
1609	  low = int1l, hi = int1h;
1610	  break;
1611	}
1612      if (int1l == int2l && int1h == int2h
1613	  && ! (int1l == 0 && int1h == 0))
1614	{
1615	  low = 1, hi = 0;
1616	  break;
1617	}
1618      overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1619				       &low, &hi, &garbagel, &garbageh);
1620      break;
1621
1622    case TRUNC_MOD_EXPR:
1623    case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1624      /* This is a shortcut for a common special case.  */
1625      if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1626	  && ! TREE_CONSTANT_OVERFLOW (arg1)
1627	  && ! TREE_CONSTANT_OVERFLOW (arg2)
1628	  && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1629	{
1630	  if (code == CEIL_MOD_EXPR)
1631	    int1l += int2l - 1;
1632	  low = int1l % int2l, hi = 0;
1633	  break;
1634	}
1635
1636      /* ... fall through ...  */
1637
1638    case ROUND_MOD_EXPR:
1639      if (int2h == 0 && int2l == 0)
1640	return NULL_TREE;
1641      overflow = div_and_round_double (code, uns,
1642				       int1l, int1h, int2l, int2h,
1643				       &garbagel, &garbageh, &low, &hi);
1644      break;
1645
1646    case MIN_EXPR:
1647    case MAX_EXPR:
1648      if (uns)
1649	low = (((unsigned HOST_WIDE_INT) int1h
1650		< (unsigned HOST_WIDE_INT) int2h)
1651	       || (((unsigned HOST_WIDE_INT) int1h
1652		    == (unsigned HOST_WIDE_INT) int2h)
1653		   && int1l < int2l));
1654      else
1655	low = (int1h < int2h
1656	       || (int1h == int2h && int1l < int2l));
1657
1658      if (low == (code == MIN_EXPR))
1659	low = int1l, hi = int1h;
1660      else
1661	low = int2l, hi = int2h;
1662      break;
1663
1664    default:
1665      return NULL_TREE;
1666    }
1667
1668  t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1669
1670  if (notrunc)
1671    {
1672      /* Propagate overflow flags ourselves.  */
1673      if (((!uns || is_sizetype) && overflow)
1674	  | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1675	{
1676	  t = copy_node (t);
1677	  TREE_OVERFLOW (t) = 1;
1678	  TREE_CONSTANT_OVERFLOW (t) = 1;
1679	}
1680      else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1681	{
1682	  t = copy_node (t);
1683	  TREE_CONSTANT_OVERFLOW (t) = 1;
1684	}
1685    }
1686  else
1687    t = force_fit_type (t, 1,
1688			((!uns || is_sizetype) && overflow)
1689			| TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1690			TREE_CONSTANT_OVERFLOW (arg1)
1691			| TREE_CONSTANT_OVERFLOW (arg2));
1692
1693  return t;
1694}
1695
1696/* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1697   constant.  We assume ARG1 and ARG2 have the same data type, or at least
1698   are the same kind of constant and the same machine mode.  Return zero if
1699   combining the constants is not allowed in the current operating mode.
1700
1701   If NOTRUNC is nonzero, do not truncate the result to fit the data type.  */
1702
1703static tree
1704const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1705{
1706  /* Sanity check for the recursive cases.  */
1707  if (!arg1 || !arg2)
1708    return NULL_TREE;
1709
1710  STRIP_NOPS (arg1);
1711  STRIP_NOPS (arg2);
1712
1713  if (TREE_CODE (arg1) == INTEGER_CST)
1714    return int_const_binop (code, arg1, arg2, notrunc);
1715
1716  if (TREE_CODE (arg1) == REAL_CST)
1717    {
1718      enum machine_mode mode;
1719      REAL_VALUE_TYPE d1;
1720      REAL_VALUE_TYPE d2;
1721      REAL_VALUE_TYPE value;
1722      REAL_VALUE_TYPE result;
1723      bool inexact;
1724      tree t, type;
1725
1726      /* The following codes are handled by real_arithmetic.  */
1727      switch (code)
1728	{
1729	case PLUS_EXPR:
1730	case MINUS_EXPR:
1731	case MULT_EXPR:
1732	case RDIV_EXPR:
1733	case MIN_EXPR:
1734	case MAX_EXPR:
1735	  break;
1736
1737	default:
1738	  return NULL_TREE;
1739	}
1740
1741      d1 = TREE_REAL_CST (arg1);
1742      d2 = TREE_REAL_CST (arg2);
1743
1744      type = TREE_TYPE (arg1);
1745      mode = TYPE_MODE (type);
1746
1747      /* Don't perform operation if we honor signaling NaNs and
1748	 either operand is a NaN.  */
1749      if (HONOR_SNANS (mode)
1750	  && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1751	return NULL_TREE;
1752
1753      /* Don't perform operation if it would raise a division
1754	 by zero exception.  */
1755      if (code == RDIV_EXPR
1756	  && REAL_VALUES_EQUAL (d2, dconst0)
1757	  && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1758	return NULL_TREE;
1759
1760      /* If either operand is a NaN, just return it.  Otherwise, set up
1761	 for floating-point trap; we return an overflow.  */
1762      if (REAL_VALUE_ISNAN (d1))
1763	return arg1;
1764      else if (REAL_VALUE_ISNAN (d2))
1765	return arg2;
1766
1767      inexact = real_arithmetic (&value, code, &d1, &d2);
1768      real_convert (&result, mode, &value);
1769
1770      /* Don't constant fold this floating point operation if
1771	 the result has overflowed and flag_trapping_math.  */
1772      if (flag_trapping_math
1773	  && MODE_HAS_INFINITIES (mode)
1774	  && REAL_VALUE_ISINF (result)
1775	  && !REAL_VALUE_ISINF (d1)
1776	  && !REAL_VALUE_ISINF (d2))
1777	return NULL_TREE;
1778
1779      /* Don't constant fold this floating point operation if the
1780	 result may dependent upon the run-time rounding mode and
1781	 flag_rounding_math is set, or if GCC's software emulation
1782	 is unable to accurately represent the result.  */
1783      if ((flag_rounding_math
1784	   || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1785	       && !flag_unsafe_math_optimizations))
1786	  && (inexact || !real_identical (&result, &value)))
1787	return NULL_TREE;
1788
1789      t = build_real (type, result);
1790
1791      TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1792      TREE_CONSTANT_OVERFLOW (t)
1793	= TREE_OVERFLOW (t)
1794	  | TREE_CONSTANT_OVERFLOW (arg1)
1795	  | TREE_CONSTANT_OVERFLOW (arg2);
1796      return t;
1797    }
1798
1799  if (TREE_CODE (arg1) == COMPLEX_CST)
1800    {
1801      tree type = TREE_TYPE (arg1);
1802      tree r1 = TREE_REALPART (arg1);
1803      tree i1 = TREE_IMAGPART (arg1);
1804      tree r2 = TREE_REALPART (arg2);
1805      tree i2 = TREE_IMAGPART (arg2);
1806      tree real, imag;
1807
1808      switch (code)
1809	{
1810	case PLUS_EXPR:
1811	case MINUS_EXPR:
1812	  real = const_binop (code, r1, r2, notrunc);
1813	  imag = const_binop (code, i1, i2, notrunc);
1814	  break;
1815
1816	case MULT_EXPR:
1817	  real = const_binop (MINUS_EXPR,
1818			      const_binop (MULT_EXPR, r1, r2, notrunc),
1819			      const_binop (MULT_EXPR, i1, i2, notrunc),
1820			      notrunc);
1821	  imag = const_binop (PLUS_EXPR,
1822			      const_binop (MULT_EXPR, r1, i2, notrunc),
1823			      const_binop (MULT_EXPR, i1, r2, notrunc),
1824			      notrunc);
1825	  break;
1826
1827	case RDIV_EXPR:
1828	  {
1829	    tree magsquared
1830	      = const_binop (PLUS_EXPR,
1831			     const_binop (MULT_EXPR, r2, r2, notrunc),
1832			     const_binop (MULT_EXPR, i2, i2, notrunc),
1833			     notrunc);
1834	    tree t1
1835	      = const_binop (PLUS_EXPR,
1836			     const_binop (MULT_EXPR, r1, r2, notrunc),
1837			     const_binop (MULT_EXPR, i1, i2, notrunc),
1838			     notrunc);
1839	    tree t2
1840	      = const_binop (MINUS_EXPR,
1841			     const_binop (MULT_EXPR, i1, r2, notrunc),
1842			     const_binop (MULT_EXPR, r1, i2, notrunc),
1843			     notrunc);
1844
1845	    if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1846	      code = TRUNC_DIV_EXPR;
1847
1848	    real = const_binop (code, t1, magsquared, notrunc);
1849	    imag = const_binop (code, t2, magsquared, notrunc);
1850	  }
1851	  break;
1852
1853	default:
1854	  return NULL_TREE;
1855	}
1856
1857      if (real && imag)
1858	return build_complex (type, real, imag);
1859    }
1860
1861  return NULL_TREE;
1862}
1863
1864/* Create a size type INT_CST node with NUMBER sign extended.  KIND
1865   indicates which particular sizetype to create.  */
1866
1867tree
1868size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1869{
1870  return build_int_cst (sizetype_tab[(int) kind], number);
1871}
1872
1873/* Combine operands OP1 and OP2 with arithmetic operation CODE.  CODE
1874   is a tree code.  The type of the result is taken from the operands.
1875   Both must be the same type integer type and it must be a size type.
1876   If the operands are constant, so is the result.  */
1877
1878tree
1879size_binop (enum tree_code code, tree arg0, tree arg1)
1880{
1881  tree type = TREE_TYPE (arg0);
1882
1883  if (arg0 == error_mark_node || arg1 == error_mark_node)
1884    return error_mark_node;
1885
1886  gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1887	      && type == TREE_TYPE (arg1));
1888
1889  /* Handle the special case of two integer constants faster.  */
1890  if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1891    {
1892      /* And some specific cases even faster than that.  */
1893      if (code == PLUS_EXPR && integer_zerop (arg0))
1894	return arg1;
1895      else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1896	       && integer_zerop (arg1))
1897	return arg0;
1898      else if (code == MULT_EXPR && integer_onep (arg0))
1899	return arg1;
1900
1901      /* Handle general case of two integer constants.  */
1902      return int_const_binop (code, arg0, arg1, 0);
1903    }
1904
1905  return fold_build2 (code, type, arg0, arg1);
1906}
1907
1908/* Given two values, either both of sizetype or both of bitsizetype,
1909   compute the difference between the two values.  Return the value
1910   in signed type corresponding to the type of the operands.  */
1911
1912tree
1913size_diffop (tree arg0, tree arg1)
1914{
1915  tree type = TREE_TYPE (arg0);
1916  tree ctype;
1917
1918  gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1919	      && type == TREE_TYPE (arg1));
1920
1921  /* If the type is already signed, just do the simple thing.  */
1922  if (!TYPE_UNSIGNED (type))
1923    return size_binop (MINUS_EXPR, arg0, arg1);
1924
1925  ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1926
1927  /* If either operand is not a constant, do the conversions to the signed
1928     type and subtract.  The hardware will do the right thing with any
1929     overflow in the subtraction.  */
1930  if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1931    return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1932		       fold_convert (ctype, arg1));
1933
1934  /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1935     Otherwise, subtract the other way, convert to CTYPE (we know that can't
1936     overflow) and negate (which can't either).  Special-case a result
1937     of zero while we're here.  */
1938  if (tree_int_cst_equal (arg0, arg1))
1939    return build_int_cst (ctype, 0);
1940  else if (tree_int_cst_lt (arg1, arg0))
1941    return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1942  else
1943    return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1944		       fold_convert (ctype, size_binop (MINUS_EXPR,
1945							arg1, arg0)));
1946}
1947
1948/* A subroutine of fold_convert_const handling conversions of an
1949   INTEGER_CST to another integer type.  */
1950
1951static tree
1952fold_convert_const_int_from_int (tree type, tree arg1)
1953{
1954  tree t;
1955
1956  /* Given an integer constant, make new constant with new type,
1957     appropriately sign-extended or truncated.  */
1958  t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1959			  TREE_INT_CST_HIGH (arg1));
1960
1961  t = force_fit_type (t,
1962		      /* Don't set the overflow when
1963		      	 converting a pointer  */
1964		      !POINTER_TYPE_P (TREE_TYPE (arg1)),
1965		      (TREE_INT_CST_HIGH (arg1) < 0
1966		       && (TYPE_UNSIGNED (type)
1967			   < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1968		      | TREE_OVERFLOW (arg1),
1969		      TREE_CONSTANT_OVERFLOW (arg1));
1970
1971  return t;
1972}
1973
1974/* A subroutine of fold_convert_const handling conversions a REAL_CST
1975   to an integer type.  */
1976
1977static tree
1978fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1979{
1980  int overflow = 0;
1981  tree t;
1982
1983  /* The following code implements the floating point to integer
1984     conversion rules required by the Java Language Specification,
1985     that IEEE NaNs are mapped to zero and values that overflow
1986     the target precision saturate, i.e. values greater than
1987     INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1988     are mapped to INT_MIN.  These semantics are allowed by the
1989     C and C++ standards that simply state that the behavior of
1990     FP-to-integer conversion is unspecified upon overflow.  */
1991
1992  HOST_WIDE_INT high, low;
1993  REAL_VALUE_TYPE r;
1994  REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1995
1996  switch (code)
1997    {
1998    case FIX_TRUNC_EXPR:
1999      real_trunc (&r, VOIDmode, &x);
2000      break;
2001
2002    case FIX_CEIL_EXPR:
2003      real_ceil (&r, VOIDmode, &x);
2004      break;
2005
2006    case FIX_FLOOR_EXPR:
2007      real_floor (&r, VOIDmode, &x);
2008      break;
2009
2010    case FIX_ROUND_EXPR:
2011      real_round (&r, VOIDmode, &x);
2012      break;
2013
2014    default:
2015      gcc_unreachable ();
2016    }
2017
2018  /* If R is NaN, return zero and show we have an overflow.  */
2019  if (REAL_VALUE_ISNAN (r))
2020    {
2021      overflow = 1;
2022      high = 0;
2023      low = 0;
2024    }
2025
2026  /* See if R is less than the lower bound or greater than the
2027     upper bound.  */
2028
2029  if (! overflow)
2030    {
2031      tree lt = TYPE_MIN_VALUE (type);
2032      REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2033      if (REAL_VALUES_LESS (r, l))
2034	{
2035	  overflow = 1;
2036	  high = TREE_INT_CST_HIGH (lt);
2037	  low = TREE_INT_CST_LOW (lt);
2038	}
2039    }
2040
2041  if (! overflow)
2042    {
2043      tree ut = TYPE_MAX_VALUE (type);
2044      if (ut)
2045	{
2046	  REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2047	  if (REAL_VALUES_LESS (u, r))
2048	    {
2049	      overflow = 1;
2050	      high = TREE_INT_CST_HIGH (ut);
2051	      low = TREE_INT_CST_LOW (ut);
2052	    }
2053	}
2054    }
2055
2056  if (! overflow)
2057    REAL_VALUE_TO_INT (&low, &high, r);
2058
2059  t = build_int_cst_wide (type, low, high);
2060
2061  t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
2062		      TREE_CONSTANT_OVERFLOW (arg1));
2063  return t;
2064}
2065
2066/* A subroutine of fold_convert_const handling conversions a REAL_CST
2067   to another floating point type.  */
2068
2069static tree
2070fold_convert_const_real_from_real (tree type, tree arg1)
2071{
2072  REAL_VALUE_TYPE value;
2073  tree t;
2074
2075  real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2076  t = build_real (type, value);
2077
2078  TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2079  TREE_CONSTANT_OVERFLOW (t)
2080    = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2081  return t;
2082}
2083
2084/* Attempt to fold type conversion operation CODE of expression ARG1 to
2085   type TYPE.  If no simplification can be done return NULL_TREE.  */
2086
2087static tree
2088fold_convert_const (enum tree_code code, tree type, tree arg1)
2089{
2090  if (TREE_TYPE (arg1) == type)
2091    return arg1;
2092
2093  if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2094    {
2095      if (TREE_CODE (arg1) == INTEGER_CST)
2096	return fold_convert_const_int_from_int (type, arg1);
2097      else if (TREE_CODE (arg1) == REAL_CST)
2098	return fold_convert_const_int_from_real (code, type, arg1);
2099    }
2100  else if (TREE_CODE (type) == REAL_TYPE)
2101    {
2102      if (TREE_CODE (arg1) == INTEGER_CST)
2103	return build_real_from_int_cst (type, arg1);
2104      if (TREE_CODE (arg1) == REAL_CST)
2105	return fold_convert_const_real_from_real (type, arg1);
2106    }
2107  return NULL_TREE;
2108}
2109
2110/* Construct a vector of zero elements of vector type TYPE.  */
2111
2112static tree
2113build_zero_vector (tree type)
2114{
2115  tree elem, list;
2116  int i, units;
2117
2118  elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2119  units = TYPE_VECTOR_SUBPARTS (type);
2120
2121  list = NULL_TREE;
2122  for (i = 0; i < units; i++)
2123    list = tree_cons (NULL_TREE, elem, list);
2124  return build_vector (type, list);
2125}
2126
2127/* Convert expression ARG to type TYPE.  Used by the middle-end for
2128   simple conversions in preference to calling the front-end's convert.  */
2129
2130tree
2131fold_convert (tree type, tree arg)
2132{
2133  tree orig = TREE_TYPE (arg);
2134  tree tem;
2135
2136  if (type == orig)
2137    return arg;
2138
2139  if (TREE_CODE (arg) == ERROR_MARK
2140      || TREE_CODE (type) == ERROR_MARK
2141      || TREE_CODE (orig) == ERROR_MARK)
2142    return error_mark_node;
2143
2144  if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2145      || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2146					TYPE_MAIN_VARIANT (orig)))
2147    return fold_build1 (NOP_EXPR, type, arg);
2148
2149  switch (TREE_CODE (type))
2150    {
2151    case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2152    case POINTER_TYPE: case REFERENCE_TYPE:
2153    case OFFSET_TYPE:
2154      if (TREE_CODE (arg) == INTEGER_CST)
2155	{
2156	  tem = fold_convert_const (NOP_EXPR, type, arg);
2157	  if (tem != NULL_TREE)
2158	    return tem;
2159	}
2160      if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2161	  || TREE_CODE (orig) == OFFSET_TYPE)
2162        return fold_build1 (NOP_EXPR, type, arg);
2163      if (TREE_CODE (orig) == COMPLEX_TYPE)
2164	{
2165	  tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2166	  return fold_convert (type, tem);
2167	}
2168      gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2169		  && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2170      return fold_build1 (NOP_EXPR, type, arg);
2171
2172    case REAL_TYPE:
2173      if (TREE_CODE (arg) == INTEGER_CST)
2174	{
2175	  tem = fold_convert_const (FLOAT_EXPR, type, arg);
2176	  if (tem != NULL_TREE)
2177	    return tem;
2178	}
2179      else if (TREE_CODE (arg) == REAL_CST)
2180	{
2181	  tem = fold_convert_const (NOP_EXPR, type, arg);
2182	  if (tem != NULL_TREE)
2183	    return tem;
2184	}
2185
2186      switch (TREE_CODE (orig))
2187	{
2188	case INTEGER_TYPE:
2189	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2190	case POINTER_TYPE: case REFERENCE_TYPE:
2191	  return fold_build1 (FLOAT_EXPR, type, arg);
2192
2193	case REAL_TYPE:
2194	  return fold_build1 (NOP_EXPR, type, arg);
2195
2196	case COMPLEX_TYPE:
2197	  tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2198	  return fold_convert (type, tem);
2199
2200	default:
2201	  gcc_unreachable ();
2202	}
2203
2204    case COMPLEX_TYPE:
2205      switch (TREE_CODE (orig))
2206	{
2207	case INTEGER_TYPE:
2208	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2209	case POINTER_TYPE: case REFERENCE_TYPE:
2210	case REAL_TYPE:
2211	  return build2 (COMPLEX_EXPR, type,
2212			 fold_convert (TREE_TYPE (type), arg),
2213			 fold_convert (TREE_TYPE (type), integer_zero_node));
2214	case COMPLEX_TYPE:
2215	  {
2216	    tree rpart, ipart;
2217
2218	    if (TREE_CODE (arg) == COMPLEX_EXPR)
2219	      {
2220		rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2221		ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2222		return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2223	      }
2224
2225	    arg = save_expr (arg);
2226	    rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2227	    ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2228	    rpart = fold_convert (TREE_TYPE (type), rpart);
2229	    ipart = fold_convert (TREE_TYPE (type), ipart);
2230	    return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2231	  }
2232
2233	default:
2234	  gcc_unreachable ();
2235	}
2236
2237    case VECTOR_TYPE:
2238      if (integer_zerop (arg))
2239	return build_zero_vector (type);
2240      gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2241      gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2242		  || TREE_CODE (orig) == VECTOR_TYPE);
2243      return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2244
2245    case VOID_TYPE:
2246      return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2247
2248    default:
2249      gcc_unreachable ();
2250    }
2251}
2252
2253/* Return false if expr can be assumed not to be an lvalue, true
2254   otherwise.  */
2255
2256static bool
2257maybe_lvalue_p (tree x)
2258{
2259  /* We only need to wrap lvalue tree codes.  */
2260  switch (TREE_CODE (x))
2261  {
2262  case VAR_DECL:
2263  case PARM_DECL:
2264  case RESULT_DECL:
2265  case LABEL_DECL:
2266  case FUNCTION_DECL:
2267  case SSA_NAME:
2268
2269  case COMPONENT_REF:
2270  case INDIRECT_REF:
2271  case ALIGN_INDIRECT_REF:
2272  case MISALIGNED_INDIRECT_REF:
2273  case ARRAY_REF:
2274  case ARRAY_RANGE_REF:
2275  case BIT_FIELD_REF:
2276  case OBJ_TYPE_REF:
2277
2278  case REALPART_EXPR:
2279  case IMAGPART_EXPR:
2280  case PREINCREMENT_EXPR:
2281  case PREDECREMENT_EXPR:
2282  case SAVE_EXPR:
2283  case TRY_CATCH_EXPR:
2284  case WITH_CLEANUP_EXPR:
2285  case COMPOUND_EXPR:
2286  case MODIFY_EXPR:
2287  case TARGET_EXPR:
2288  case COND_EXPR:
2289  case BIND_EXPR:
2290  case MIN_EXPR:
2291  case MAX_EXPR:
2292    break;
2293
2294  default:
2295    /* Assume the worst for front-end tree codes.  */
2296    if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2297      break;
2298    return false;
2299  }
2300
2301  return true;
2302}
2303
2304/* Return an expr equal to X but certainly not valid as an lvalue.  */
2305
2306tree
2307non_lvalue (tree x)
2308{
2309  /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2310     us.  */
2311  if (in_gimple_form)
2312    return x;
2313
2314  if (! maybe_lvalue_p (x))
2315    return x;
2316  return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2317}
2318
2319/* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2320   Zero means allow extended lvalues.  */
2321
2322int pedantic_lvalues;
2323
2324/* When pedantic, return an expr equal to X but certainly not valid as a
2325   pedantic lvalue.  Otherwise, return X.  */
2326
2327static tree
2328pedantic_non_lvalue (tree x)
2329{
2330  if (pedantic_lvalues)
2331    return non_lvalue (x);
2332  else
2333    return x;
2334}
2335
2336/* Given a tree comparison code, return the code that is the logical inverse
2337   of the given code.  It is not safe to do this for floating-point
2338   comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2339   as well: if reversing the comparison is unsafe, return ERROR_MARK.  */
2340
2341enum tree_code
2342invert_tree_comparison (enum tree_code code, bool honor_nans)
2343{
2344  if (honor_nans && flag_trapping_math)
2345    return ERROR_MARK;
2346
2347  switch (code)
2348    {
2349    case EQ_EXPR:
2350      return NE_EXPR;
2351    case NE_EXPR:
2352      return EQ_EXPR;
2353    case GT_EXPR:
2354      return honor_nans ? UNLE_EXPR : LE_EXPR;
2355    case GE_EXPR:
2356      return honor_nans ? UNLT_EXPR : LT_EXPR;
2357    case LT_EXPR:
2358      return honor_nans ? UNGE_EXPR : GE_EXPR;
2359    case LE_EXPR:
2360      return honor_nans ? UNGT_EXPR : GT_EXPR;
2361    case LTGT_EXPR:
2362      return UNEQ_EXPR;
2363    case UNEQ_EXPR:
2364      return LTGT_EXPR;
2365    case UNGT_EXPR:
2366      return LE_EXPR;
2367    case UNGE_EXPR:
2368      return LT_EXPR;
2369    case UNLT_EXPR:
2370      return GE_EXPR;
2371    case UNLE_EXPR:
2372      return GT_EXPR;
2373    case ORDERED_EXPR:
2374      return UNORDERED_EXPR;
2375    case UNORDERED_EXPR:
2376      return ORDERED_EXPR;
2377    default:
2378      gcc_unreachable ();
2379    }
2380}
2381
2382/* Similar, but return the comparison that results if the operands are
2383   swapped.  This is safe for floating-point.  */
2384
2385enum tree_code
2386swap_tree_comparison (enum tree_code code)
2387{
2388  switch (code)
2389    {
2390    case EQ_EXPR:
2391    case NE_EXPR:
2392    case ORDERED_EXPR:
2393    case UNORDERED_EXPR:
2394    case LTGT_EXPR:
2395    case UNEQ_EXPR:
2396      return code;
2397    case GT_EXPR:
2398      return LT_EXPR;
2399    case GE_EXPR:
2400      return LE_EXPR;
2401    case LT_EXPR:
2402      return GT_EXPR;
2403    case LE_EXPR:
2404      return GE_EXPR;
2405    case UNGT_EXPR:
2406      return UNLT_EXPR;
2407    case UNGE_EXPR:
2408      return UNLE_EXPR;
2409    case UNLT_EXPR:
2410      return UNGT_EXPR;
2411    case UNLE_EXPR:
2412      return UNGE_EXPR;
2413    default:
2414      gcc_unreachable ();
2415    }
2416}
2417
2418
2419/* Convert a comparison tree code from an enum tree_code representation
2420   into a compcode bit-based encoding.  This function is the inverse of
2421   compcode_to_comparison.  */
2422
2423static enum comparison_code
2424comparison_to_compcode (enum tree_code code)
2425{
2426  switch (code)
2427    {
2428    case LT_EXPR:
2429      return COMPCODE_LT;
2430    case EQ_EXPR:
2431      return COMPCODE_EQ;
2432    case LE_EXPR:
2433      return COMPCODE_LE;
2434    case GT_EXPR:
2435      return COMPCODE_GT;
2436    case NE_EXPR:
2437      return COMPCODE_NE;
2438    case GE_EXPR:
2439      return COMPCODE_GE;
2440    case ORDERED_EXPR:
2441      return COMPCODE_ORD;
2442    case UNORDERED_EXPR:
2443      return COMPCODE_UNORD;
2444    case UNLT_EXPR:
2445      return COMPCODE_UNLT;
2446    case UNEQ_EXPR:
2447      return COMPCODE_UNEQ;
2448    case UNLE_EXPR:
2449      return COMPCODE_UNLE;
2450    case UNGT_EXPR:
2451      return COMPCODE_UNGT;
2452    case LTGT_EXPR:
2453      return COMPCODE_LTGT;
2454    case UNGE_EXPR:
2455      return COMPCODE_UNGE;
2456    default:
2457      gcc_unreachable ();
2458    }
2459}
2460
2461/* Convert a compcode bit-based encoding of a comparison operator back
2462   to GCC's enum tree_code representation.  This function is the
2463   inverse of comparison_to_compcode.  */
2464
2465static enum tree_code
2466compcode_to_comparison (enum comparison_code code)
2467{
2468  switch (code)
2469    {
2470    case COMPCODE_LT:
2471      return LT_EXPR;
2472    case COMPCODE_EQ:
2473      return EQ_EXPR;
2474    case COMPCODE_LE:
2475      return LE_EXPR;
2476    case COMPCODE_GT:
2477      return GT_EXPR;
2478    case COMPCODE_NE:
2479      return NE_EXPR;
2480    case COMPCODE_GE:
2481      return GE_EXPR;
2482    case COMPCODE_ORD:
2483      return ORDERED_EXPR;
2484    case COMPCODE_UNORD:
2485      return UNORDERED_EXPR;
2486    case COMPCODE_UNLT:
2487      return UNLT_EXPR;
2488    case COMPCODE_UNEQ:
2489      return UNEQ_EXPR;
2490    case COMPCODE_UNLE:
2491      return UNLE_EXPR;
2492    case COMPCODE_UNGT:
2493      return UNGT_EXPR;
2494    case COMPCODE_LTGT:
2495      return LTGT_EXPR;
2496    case COMPCODE_UNGE:
2497      return UNGE_EXPR;
2498    default:
2499      gcc_unreachable ();
2500    }
2501}
2502
2503/* Return a tree for the comparison which is the combination of
2504   doing the AND or OR (depending on CODE) of the two operations LCODE
2505   and RCODE on the identical operands LL_ARG and LR_ARG.  Take into account
2506   the possibility of trapping if the mode has NaNs, and return NULL_TREE
2507   if this makes the transformation invalid.  */
2508
2509tree
2510combine_comparisons (enum tree_code code, enum tree_code lcode,
2511		     enum tree_code rcode, tree truth_type,
2512		     tree ll_arg, tree lr_arg)
2513{
2514  bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2515  enum comparison_code lcompcode = comparison_to_compcode (lcode);
2516  enum comparison_code rcompcode = comparison_to_compcode (rcode);
2517  enum comparison_code compcode;
2518
2519  switch (code)
2520    {
2521    case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2522      compcode = lcompcode & rcompcode;
2523      break;
2524
2525    case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2526      compcode = lcompcode | rcompcode;
2527      break;
2528
2529    default:
2530      return NULL_TREE;
2531    }
2532
2533  if (!honor_nans)
2534    {
2535      /* Eliminate unordered comparisons, as well as LTGT and ORD
2536	 which are not used unless the mode has NaNs.  */
2537      compcode &= ~COMPCODE_UNORD;
2538      if (compcode == COMPCODE_LTGT)
2539	compcode = COMPCODE_NE;
2540      else if (compcode == COMPCODE_ORD)
2541	compcode = COMPCODE_TRUE;
2542    }
2543   else if (flag_trapping_math)
2544     {
2545	/* Check that the original operation and the optimized ones will trap
2546	   under the same condition.  */
2547	bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2548		     && (lcompcode != COMPCODE_EQ)
2549		     && (lcompcode != COMPCODE_ORD);
2550	bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2551		     && (rcompcode != COMPCODE_EQ)
2552		     && (rcompcode != COMPCODE_ORD);
2553	bool trap = (compcode & COMPCODE_UNORD) == 0
2554		    && (compcode != COMPCODE_EQ)
2555		    && (compcode != COMPCODE_ORD);
2556
2557        /* In a short-circuited boolean expression the LHS might be
2558	   such that the RHS, if evaluated, will never trap.  For
2559	   example, in ORD (x, y) && (x < y), we evaluate the RHS only
2560	   if neither x nor y is NaN.  (This is a mixed blessing: for
2561	   example, the expression above will never trap, hence
2562	   optimizing it to x < y would be invalid).  */
2563        if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2564            || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2565          rtrap = false;
2566
2567        /* If the comparison was short-circuited, and only the RHS
2568	   trapped, we may now generate a spurious trap.  */
2569	if (rtrap && !ltrap
2570	    && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2571	  return NULL_TREE;
2572
2573	/* If we changed the conditions that cause a trap, we lose.  */
2574	if ((ltrap || rtrap) != trap)
2575	  return NULL_TREE;
2576      }
2577
2578  if (compcode == COMPCODE_TRUE)
2579    return constant_boolean_node (true, truth_type);
2580  else if (compcode == COMPCODE_FALSE)
2581    return constant_boolean_node (false, truth_type);
2582  else
2583    return fold_build2 (compcode_to_comparison (compcode),
2584			truth_type, ll_arg, lr_arg);
2585}
2586
2587/* Return nonzero if CODE is a tree code that represents a truth value.  */
2588
2589static int
2590truth_value_p (enum tree_code code)
2591{
2592  return (TREE_CODE_CLASS (code) == tcc_comparison
2593	  || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2594	  || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2595	  || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2596}
2597
2598/* Return nonzero if two operands (typically of the same tree node)
2599   are necessarily equal.  If either argument has side-effects this
2600   function returns zero.  FLAGS modifies behavior as follows:
2601
2602   If OEP_ONLY_CONST is set, only return nonzero for constants.
2603   This function tests whether the operands are indistinguishable;
2604   it does not test whether they are equal using C's == operation.
2605   The distinction is important for IEEE floating point, because
2606   (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2607   (2) two NaNs may be indistinguishable, but NaN!=NaN.
2608
2609   If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2610   even though it may hold multiple values during a function.
2611   This is because a GCC tree node guarantees that nothing else is
2612   executed between the evaluation of its "operands" (which may often
2613   be evaluated in arbitrary order).  Hence if the operands themselves
2614   don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2615   same value in each operand/subexpression.  Hence leaving OEP_ONLY_CONST
2616   unset means assuming isochronic (or instantaneous) tree equivalence.
2617   Unless comparing arbitrary expression trees, such as from different
2618   statements, this flag can usually be left unset.
2619
2620   If OEP_PURE_SAME is set, then pure functions with identical arguments
2621   are considered the same.  It is used when the caller has other ways
2622   to ensure that global memory is unchanged in between.  */
2623
2624int
2625operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2626{
2627  /* If either is ERROR_MARK, they aren't equal.  */
2628  if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2629    return 0;
2630
2631  /* If both types don't have the same signedness, then we can't consider
2632     them equal.  We must check this before the STRIP_NOPS calls
2633     because they may change the signedness of the arguments.  */
2634  if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2635    return 0;
2636
2637  /* If both types don't have the same precision, then it is not safe
2638     to strip NOPs.  */
2639  if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2640    return 0;
2641
2642  STRIP_NOPS (arg0);
2643  STRIP_NOPS (arg1);
2644
2645  /* In case both args are comparisons but with different comparison
2646     code, try to swap the comparison operands of one arg to produce
2647     a match and compare that variant.  */
2648  if (TREE_CODE (arg0) != TREE_CODE (arg1)
2649      && COMPARISON_CLASS_P (arg0)
2650      && COMPARISON_CLASS_P (arg1))
2651    {
2652      enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2653
2654      if (TREE_CODE (arg0) == swap_code)
2655	return operand_equal_p (TREE_OPERAND (arg0, 0),
2656			        TREE_OPERAND (arg1, 1), flags)
2657	       && operand_equal_p (TREE_OPERAND (arg0, 1),
2658				   TREE_OPERAND (arg1, 0), flags);
2659    }
2660
2661  if (TREE_CODE (arg0) != TREE_CODE (arg1)
2662      /* This is needed for conversions and for COMPONENT_REF.
2663	 Might as well play it safe and always test this.  */
2664      || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2665      || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2666      || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2667    return 0;
2668
2669  /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2670     We don't care about side effects in that case because the SAVE_EXPR
2671     takes care of that for us. In all other cases, two expressions are
2672     equal if they have no side effects.  If we have two identical
2673     expressions with side effects that should be treated the same due
2674     to the only side effects being identical SAVE_EXPR's, that will
2675     be detected in the recursive calls below.  */
2676  if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2677      && (TREE_CODE (arg0) == SAVE_EXPR
2678	  || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2679    return 1;
2680
2681  /* Next handle constant cases, those for which we can return 1 even
2682     if ONLY_CONST is set.  */
2683  if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2684    switch (TREE_CODE (arg0))
2685      {
2686      case INTEGER_CST:
2687	return (! TREE_CONSTANT_OVERFLOW (arg0)
2688		&& ! TREE_CONSTANT_OVERFLOW (arg1)
2689		&& tree_int_cst_equal (arg0, arg1));
2690
2691      case REAL_CST:
2692	return (! TREE_CONSTANT_OVERFLOW (arg0)
2693		&& ! TREE_CONSTANT_OVERFLOW (arg1)
2694		&& REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2695					  TREE_REAL_CST (arg1)));
2696
2697      case VECTOR_CST:
2698	{
2699	  tree v1, v2;
2700
2701	  if (TREE_CONSTANT_OVERFLOW (arg0)
2702	      || TREE_CONSTANT_OVERFLOW (arg1))
2703	    return 0;
2704
2705	  v1 = TREE_VECTOR_CST_ELTS (arg0);
2706	  v2 = TREE_VECTOR_CST_ELTS (arg1);
2707	  while (v1 && v2)
2708	    {
2709	      if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2710				    flags))
2711		return 0;
2712	      v1 = TREE_CHAIN (v1);
2713	      v2 = TREE_CHAIN (v2);
2714	    }
2715
2716	  return v1 == v2;
2717	}
2718
2719      case COMPLEX_CST:
2720	return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2721				 flags)
2722		&& operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2723				    flags));
2724
2725      case STRING_CST:
2726	return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2727		&& ! memcmp (TREE_STRING_POINTER (arg0),
2728			      TREE_STRING_POINTER (arg1),
2729			      TREE_STRING_LENGTH (arg0)));
2730
2731      case ADDR_EXPR:
2732	return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2733				0);
2734      default:
2735	break;
2736      }
2737
2738  if (flags & OEP_ONLY_CONST)
2739    return 0;
2740
2741/* Define macros to test an operand from arg0 and arg1 for equality and a
2742   variant that allows null and views null as being different from any
2743   non-null value.  In the latter case, if either is null, the both
2744   must be; otherwise, do the normal comparison.  */
2745#define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N),	\
2746				    TREE_OPERAND (arg1, N), flags)
2747
2748#define OP_SAME_WITH_NULL(N)				\
2749  ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N))	\
2750   ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2751
2752  switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2753    {
2754    case tcc_unary:
2755      /* Two conversions are equal only if signedness and modes match.  */
2756      switch (TREE_CODE (arg0))
2757        {
2758        case NOP_EXPR:
2759        case CONVERT_EXPR:
2760        case FIX_CEIL_EXPR:
2761        case FIX_TRUNC_EXPR:
2762        case FIX_FLOOR_EXPR:
2763        case FIX_ROUND_EXPR:
2764	  if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2765	      != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2766	    return 0;
2767	  break;
2768	default:
2769	  break;
2770	}
2771
2772      return OP_SAME (0);
2773
2774
2775    case tcc_comparison:
2776    case tcc_binary:
2777      if (OP_SAME (0) && OP_SAME (1))
2778	return 1;
2779
2780      /* For commutative ops, allow the other order.  */
2781      return (commutative_tree_code (TREE_CODE (arg0))
2782	      && operand_equal_p (TREE_OPERAND (arg0, 0),
2783				  TREE_OPERAND (arg1, 1), flags)
2784	      && operand_equal_p (TREE_OPERAND (arg0, 1),
2785				  TREE_OPERAND (arg1, 0), flags));
2786
2787    case tcc_reference:
2788      /* If either of the pointer (or reference) expressions we are
2789	 dereferencing contain a side effect, these cannot be equal.  */
2790      if (TREE_SIDE_EFFECTS (arg0)
2791	  || TREE_SIDE_EFFECTS (arg1))
2792	return 0;
2793
2794      switch (TREE_CODE (arg0))
2795	{
2796	case INDIRECT_REF:
2797	case ALIGN_INDIRECT_REF:
2798	case MISALIGNED_INDIRECT_REF:
2799	case REALPART_EXPR:
2800	case IMAGPART_EXPR:
2801	  return OP_SAME (0);
2802
2803	case ARRAY_REF:
2804	case ARRAY_RANGE_REF:
2805	  /* Operands 2 and 3 may be null.  */
2806	  return (OP_SAME (0)
2807		  && OP_SAME (1)
2808		  && OP_SAME_WITH_NULL (2)
2809		  && OP_SAME_WITH_NULL (3));
2810
2811	case COMPONENT_REF:
2812	  /* Handle operand 2 the same as for ARRAY_REF.  Operand 0
2813	     may be NULL when we're called to compare MEM_EXPRs.  */
2814	  return OP_SAME_WITH_NULL (0)
2815		 && OP_SAME (1)
2816		 && OP_SAME_WITH_NULL (2);
2817
2818	case BIT_FIELD_REF:
2819	  return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2820
2821	default:
2822	  return 0;
2823	}
2824
2825    case tcc_expression:
2826      switch (TREE_CODE (arg0))
2827	{
2828	case ADDR_EXPR:
2829	case TRUTH_NOT_EXPR:
2830	  return OP_SAME (0);
2831
2832	case TRUTH_ANDIF_EXPR:
2833	case TRUTH_ORIF_EXPR:
2834	  return OP_SAME (0) && OP_SAME (1);
2835
2836	case TRUTH_AND_EXPR:
2837	case TRUTH_OR_EXPR:
2838	case TRUTH_XOR_EXPR:
2839	  if (OP_SAME (0) && OP_SAME (1))
2840	    return 1;
2841
2842	  /* Otherwise take into account this is a commutative operation.  */
2843	  return (operand_equal_p (TREE_OPERAND (arg0, 0),
2844				   TREE_OPERAND (arg1, 1), flags)
2845		  && operand_equal_p (TREE_OPERAND (arg0, 1),
2846				      TREE_OPERAND (arg1, 0), flags));
2847
2848	case CALL_EXPR:
2849	  /* If the CALL_EXPRs call different functions, then they
2850	     clearly can not be equal.  */
2851	  if (!OP_SAME (0))
2852	    return 0;
2853
2854	  {
2855	    unsigned int cef = call_expr_flags (arg0);
2856	    if (flags & OEP_PURE_SAME)
2857	      cef &= ECF_CONST | ECF_PURE;
2858	    else
2859	      cef &= ECF_CONST;
2860	    if (!cef)
2861	      return 0;
2862	  }
2863
2864	  /* Now see if all the arguments are the same.  operand_equal_p
2865	     does not handle TREE_LIST, so we walk the operands here
2866	     feeding them to operand_equal_p.  */
2867	  arg0 = TREE_OPERAND (arg0, 1);
2868	  arg1 = TREE_OPERAND (arg1, 1);
2869	  while (arg0 && arg1)
2870	    {
2871	      if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2872				     flags))
2873		return 0;
2874
2875	      arg0 = TREE_CHAIN (arg0);
2876	      arg1 = TREE_CHAIN (arg1);
2877	    }
2878
2879	  /* If we get here and both argument lists are exhausted
2880	     then the CALL_EXPRs are equal.  */
2881	  return ! (arg0 || arg1);
2882
2883	default:
2884	  return 0;
2885	}
2886
2887    case tcc_declaration:
2888      /* Consider __builtin_sqrt equal to sqrt.  */
2889      return (TREE_CODE (arg0) == FUNCTION_DECL
2890	      && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2891	      && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2892	      && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2893
2894    default:
2895      return 0;
2896    }
2897
2898#undef OP_SAME
2899#undef OP_SAME_WITH_NULL
2900}
2901
2902/* Similar to operand_equal_p, but see if ARG0 might have been made by
2903   shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2904
2905   When in doubt, return 0.  */
2906
2907static int
2908operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2909{
2910  int unsignedp1, unsignedpo;
2911  tree primarg0, primarg1, primother;
2912  unsigned int correct_width;
2913
2914  if (operand_equal_p (arg0, arg1, 0))
2915    return 1;
2916
2917  if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2918      || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2919    return 0;
2920
2921  /* Discard any conversions that don't change the modes of ARG0 and ARG1
2922     and see if the inner values are the same.  This removes any
2923     signedness comparison, which doesn't matter here.  */
2924  primarg0 = arg0, primarg1 = arg1;
2925  STRIP_NOPS (primarg0);
2926  STRIP_NOPS (primarg1);
2927  if (operand_equal_p (primarg0, primarg1, 0))
2928    return 1;
2929
2930  /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2931     actual comparison operand, ARG0.
2932
2933     First throw away any conversions to wider types
2934     already present in the operands.  */
2935
2936  primarg1 = get_narrower (arg1, &unsignedp1);
2937  primother = get_narrower (other, &unsignedpo);
2938
2939  correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2940  if (unsignedp1 == unsignedpo
2941      && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2942      && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2943    {
2944      tree type = TREE_TYPE (arg0);
2945
2946      /* Make sure shorter operand is extended the right way
2947	 to match the longer operand.  */
2948      primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2949			       (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2950
2951      if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2952	return 1;
2953    }
2954
2955  return 0;
2956}
2957
2958/* See if ARG is an expression that is either a comparison or is performing
2959   arithmetic on comparisons.  The comparisons must only be comparing
2960   two different values, which will be stored in *CVAL1 and *CVAL2; if
2961   they are nonzero it means that some operands have already been found.
2962   No variables may be used anywhere else in the expression except in the
2963   comparisons.  If SAVE_P is true it means we removed a SAVE_EXPR around
2964   the expression and save_expr needs to be called with CVAL1 and CVAL2.
2965
2966   If this is true, return 1.  Otherwise, return zero.  */
2967
2968static int
2969twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2970{
2971  enum tree_code code = TREE_CODE (arg);
2972  enum tree_code_class class = TREE_CODE_CLASS (code);
2973
2974  /* We can handle some of the tcc_expression cases here.  */
2975  if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2976    class = tcc_unary;
2977  else if (class == tcc_expression
2978	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2979	       || code == COMPOUND_EXPR))
2980    class = tcc_binary;
2981
2982  else if (class == tcc_expression && code == SAVE_EXPR
2983	   && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2984    {
2985      /* If we've already found a CVAL1 or CVAL2, this expression is
2986	 two complex to handle.  */
2987      if (*cval1 || *cval2)
2988	return 0;
2989
2990      class = tcc_unary;
2991      *save_p = 1;
2992    }
2993
2994  switch (class)
2995    {
2996    case tcc_unary:
2997      return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2998
2999    case tcc_binary:
3000      return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3001	      && twoval_comparison_p (TREE_OPERAND (arg, 1),
3002				      cval1, cval2, save_p));
3003
3004    case tcc_constant:
3005      return 1;
3006
3007    case tcc_expression:
3008      if (code == COND_EXPR)
3009	return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3010				     cval1, cval2, save_p)
3011		&& twoval_comparison_p (TREE_OPERAND (arg, 1),
3012					cval1, cval2, save_p)
3013		&& twoval_comparison_p (TREE_OPERAND (arg, 2),
3014					cval1, cval2, save_p));
3015      return 0;
3016
3017    case tcc_comparison:
3018      /* First see if we can handle the first operand, then the second.  For
3019	 the second operand, we know *CVAL1 can't be zero.  It must be that
3020	 one side of the comparison is each of the values; test for the
3021	 case where this isn't true by failing if the two operands
3022	 are the same.  */
3023
3024      if (operand_equal_p (TREE_OPERAND (arg, 0),
3025			   TREE_OPERAND (arg, 1), 0))
3026	return 0;
3027
3028      if (*cval1 == 0)
3029	*cval1 = TREE_OPERAND (arg, 0);
3030      else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3031	;
3032      else if (*cval2 == 0)
3033	*cval2 = TREE_OPERAND (arg, 0);
3034      else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3035	;
3036      else
3037	return 0;
3038
3039      if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3040	;
3041      else if (*cval2 == 0)
3042	*cval2 = TREE_OPERAND (arg, 1);
3043      else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3044	;
3045      else
3046	return 0;
3047
3048      return 1;
3049
3050    default:
3051      return 0;
3052    }
3053}
3054
3055/* ARG is a tree that is known to contain just arithmetic operations and
3056   comparisons.  Evaluate the operations in the tree substituting NEW0 for
3057   any occurrence of OLD0 as an operand of a comparison and likewise for
3058   NEW1 and OLD1.  */
3059
3060static tree
3061eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3062{
3063  tree type = TREE_TYPE (arg);
3064  enum tree_code code = TREE_CODE (arg);
3065  enum tree_code_class class = TREE_CODE_CLASS (code);
3066
3067  /* We can handle some of the tcc_expression cases here.  */
3068  if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3069    class = tcc_unary;
3070  else if (class == tcc_expression
3071	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3072    class = tcc_binary;
3073
3074  switch (class)
3075    {
3076    case tcc_unary:
3077      return fold_build1 (code, type,
3078			  eval_subst (TREE_OPERAND (arg, 0),
3079				      old0, new0, old1, new1));
3080
3081    case tcc_binary:
3082      return fold_build2 (code, type,
3083			  eval_subst (TREE_OPERAND (arg, 0),
3084				      old0, new0, old1, new1),
3085			  eval_subst (TREE_OPERAND (arg, 1),
3086				      old0, new0, old1, new1));
3087
3088    case tcc_expression:
3089      switch (code)
3090	{
3091	case SAVE_EXPR:
3092	  return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3093
3094	case COMPOUND_EXPR:
3095	  return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3096
3097	case COND_EXPR:
3098	  return fold_build3 (code, type,
3099			      eval_subst (TREE_OPERAND (arg, 0),
3100					  old0, new0, old1, new1),
3101			      eval_subst (TREE_OPERAND (arg, 1),
3102					  old0, new0, old1, new1),
3103			      eval_subst (TREE_OPERAND (arg, 2),
3104					  old0, new0, old1, new1));
3105	default:
3106	  break;
3107	}
3108      /* Fall through - ???  */
3109
3110    case tcc_comparison:
3111      {
3112	tree arg0 = TREE_OPERAND (arg, 0);
3113	tree arg1 = TREE_OPERAND (arg, 1);
3114
3115	/* We need to check both for exact equality and tree equality.  The
3116	   former will be true if the operand has a side-effect.  In that
3117	   case, we know the operand occurred exactly once.  */
3118
3119	if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3120	  arg0 = new0;
3121	else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3122	  arg0 = new1;
3123
3124	if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3125	  arg1 = new0;
3126	else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3127	  arg1 = new1;
3128
3129	return fold_build2 (code, type, arg0, arg1);
3130      }
3131
3132    default:
3133      return arg;
3134    }
3135}
3136
3137/* Return a tree for the case when the result of an expression is RESULT
3138   converted to TYPE and OMITTED was previously an operand of the expression
3139   but is now not needed (e.g., we folded OMITTED * 0).
3140
3141   If OMITTED has side effects, we must evaluate it.  Otherwise, just do
3142   the conversion of RESULT to TYPE.  */
3143
3144tree
3145omit_one_operand (tree type, tree result, tree omitted)
3146{
3147  tree t = fold_convert (type, result);
3148
3149  if (TREE_SIDE_EFFECTS (omitted))
3150    return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3151
3152  return non_lvalue (t);
3153}
3154
3155/* Similar, but call pedantic_non_lvalue instead of non_lvalue.  */
3156
3157static tree
3158pedantic_omit_one_operand (tree type, tree result, tree omitted)
3159{
3160  tree t = fold_convert (type, result);
3161
3162  if (TREE_SIDE_EFFECTS (omitted))
3163    return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3164
3165  return pedantic_non_lvalue (t);
3166}
3167
3168/* Return a tree for the case when the result of an expression is RESULT
3169   converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3170   of the expression but are now not needed.
3171
3172   If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3173   If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3174   evaluated before OMITTED2.  Otherwise, if neither has side effects,
3175   just do the conversion of RESULT to TYPE.  */
3176
3177tree
3178omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3179{
3180  tree t = fold_convert (type, result);
3181
3182  if (TREE_SIDE_EFFECTS (omitted2))
3183    t = build2 (COMPOUND_EXPR, type, omitted2, t);
3184  if (TREE_SIDE_EFFECTS (omitted1))
3185    t = build2 (COMPOUND_EXPR, type, omitted1, t);
3186
3187  return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3188}
3189
3190
3191/* Return a simplified tree node for the truth-negation of ARG.  This
3192   never alters ARG itself.  We assume that ARG is an operation that
3193   returns a truth value (0 or 1).
3194
3195   FIXME: one would think we would fold the result, but it causes
3196   problems with the dominator optimizer.  */
3197
3198tree
3199fold_truth_not_expr (tree arg)
3200{
3201  tree type = TREE_TYPE (arg);
3202  enum tree_code code = TREE_CODE (arg);
3203
3204  /* If this is a comparison, we can simply invert it, except for
3205     floating-point non-equality comparisons, in which case we just
3206     enclose a TRUTH_NOT_EXPR around what we have.  */
3207
3208  if (TREE_CODE_CLASS (code) == tcc_comparison)
3209    {
3210      tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3211      if (FLOAT_TYPE_P (op_type)
3212	  && flag_trapping_math
3213	  && code != ORDERED_EXPR && code != UNORDERED_EXPR
3214	  && code != NE_EXPR && code != EQ_EXPR)
3215	return NULL_TREE;
3216      else
3217	{
3218	  code = invert_tree_comparison (code,
3219					 HONOR_NANS (TYPE_MODE (op_type)));
3220	  if (code == ERROR_MARK)
3221	    return NULL_TREE;
3222	  else
3223	    return build2 (code, type,
3224			   TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3225	}
3226    }
3227
3228  switch (code)
3229    {
3230    case INTEGER_CST:
3231      return constant_boolean_node (integer_zerop (arg), type);
3232
3233    case TRUTH_AND_EXPR:
3234      return build2 (TRUTH_OR_EXPR, type,
3235		     invert_truthvalue (TREE_OPERAND (arg, 0)),
3236		     invert_truthvalue (TREE_OPERAND (arg, 1)));
3237
3238    case TRUTH_OR_EXPR:
3239      return build2 (TRUTH_AND_EXPR, type,
3240		     invert_truthvalue (TREE_OPERAND (arg, 0)),
3241		     invert_truthvalue (TREE_OPERAND (arg, 1)));
3242
3243    case TRUTH_XOR_EXPR:
3244      /* Here we can invert either operand.  We invert the first operand
3245	 unless the second operand is a TRUTH_NOT_EXPR in which case our
3246	 result is the XOR of the first operand with the inside of the
3247	 negation of the second operand.  */
3248
3249      if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3250	return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3251		       TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3252      else
3253	return build2 (TRUTH_XOR_EXPR, type,
3254		       invert_truthvalue (TREE_OPERAND (arg, 0)),
3255		       TREE_OPERAND (arg, 1));
3256
3257    case TRUTH_ANDIF_EXPR:
3258      return build2 (TRUTH_ORIF_EXPR, type,
3259		     invert_truthvalue (TREE_OPERAND (arg, 0)),
3260		     invert_truthvalue (TREE_OPERAND (arg, 1)));
3261
3262    case TRUTH_ORIF_EXPR:
3263      return build2 (TRUTH_ANDIF_EXPR, type,
3264		     invert_truthvalue (TREE_OPERAND (arg, 0)),
3265		     invert_truthvalue (TREE_OPERAND (arg, 1)));
3266
3267    case TRUTH_NOT_EXPR:
3268      return TREE_OPERAND (arg, 0);
3269
3270    case COND_EXPR:
3271      {
3272	tree arg1 = TREE_OPERAND (arg, 1);
3273	tree arg2 = TREE_OPERAND (arg, 2);
3274	/* A COND_EXPR may have a throw as one operand, which
3275	   then has void type.  Just leave void operands
3276	   as they are.  */
3277	return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3278		       VOID_TYPE_P (TREE_TYPE (arg1))
3279		       ? arg1 : invert_truthvalue (arg1),
3280		       VOID_TYPE_P (TREE_TYPE (arg2))
3281		       ? arg2 : invert_truthvalue (arg2));
3282      }
3283
3284    case COMPOUND_EXPR:
3285      return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3286		     invert_truthvalue (TREE_OPERAND (arg, 1)));
3287
3288    case NON_LVALUE_EXPR:
3289      return invert_truthvalue (TREE_OPERAND (arg, 0));
3290
3291    case NOP_EXPR:
3292      if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3293	return build1 (TRUTH_NOT_EXPR, type, arg);
3294
3295    case CONVERT_EXPR:
3296    case FLOAT_EXPR:
3297      return build1 (TREE_CODE (arg), type,
3298		     invert_truthvalue (TREE_OPERAND (arg, 0)));
3299
3300    case BIT_AND_EXPR:
3301      if (!integer_onep (TREE_OPERAND (arg, 1)))
3302	break;
3303      return build2 (EQ_EXPR, type, arg,
3304		     build_int_cst (type, 0));
3305
3306    case SAVE_EXPR:
3307      return build1 (TRUTH_NOT_EXPR, type, arg);
3308
3309    case CLEANUP_POINT_EXPR:
3310      return build1 (CLEANUP_POINT_EXPR, type,
3311		     invert_truthvalue (TREE_OPERAND (arg, 0)));
3312
3313    default:
3314      break;
3315    }
3316
3317  return NULL_TREE;
3318}
3319
3320/* Return a simplified tree node for the truth-negation of ARG.  This
3321   never alters ARG itself.  We assume that ARG is an operation that
3322   returns a truth value (0 or 1).
3323
3324   FIXME: one would think we would fold the result, but it causes
3325   problems with the dominator optimizer.  */
3326
3327tree
3328invert_truthvalue (tree arg)
3329{
3330  tree tem;
3331
3332  if (TREE_CODE (arg) == ERROR_MARK)
3333    return arg;
3334
3335  tem = fold_truth_not_expr (arg);
3336  if (!tem)
3337    tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3338
3339  return tem;
3340}
3341
3342/* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3343   operands are another bit-wise operation with a common input.  If so,
3344   distribute the bit operations to save an operation and possibly two if
3345   constants are involved.  For example, convert
3346	(A | B) & (A | C) into A | (B & C)
3347   Further simplification will occur if B and C are constants.
3348
3349   If this optimization cannot be done, 0 will be returned.  */
3350
3351static tree
3352distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3353{
3354  tree common;
3355  tree left, right;
3356
3357  if (TREE_CODE (arg0) != TREE_CODE (arg1)
3358      || TREE_CODE (arg0) == code
3359      || (TREE_CODE (arg0) != BIT_AND_EXPR
3360	  && TREE_CODE (arg0) != BIT_IOR_EXPR))
3361    return 0;
3362
3363  if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3364    {
3365      common = TREE_OPERAND (arg0, 0);
3366      left = TREE_OPERAND (arg0, 1);
3367      right = TREE_OPERAND (arg1, 1);
3368    }
3369  else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3370    {
3371      common = TREE_OPERAND (arg0, 0);
3372      left = TREE_OPERAND (arg0, 1);
3373      right = TREE_OPERAND (arg1, 0);
3374    }
3375  else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3376    {
3377      common = TREE_OPERAND (arg0, 1);
3378      left = TREE_OPERAND (arg0, 0);
3379      right = TREE_OPERAND (arg1, 1);
3380    }
3381  else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3382    {
3383      common = TREE_OPERAND (arg0, 1);
3384      left = TREE_OPERAND (arg0, 0);
3385      right = TREE_OPERAND (arg1, 0);
3386    }
3387  else
3388    return 0;
3389
3390  return fold_build2 (TREE_CODE (arg0), type, common,
3391		      fold_build2 (code, type, left, right));
3392}
3393
3394/* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3395   with code CODE.  This optimization is unsafe.  */
3396static tree
3397distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3398{
3399  bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3400  bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3401
3402  /* (A / C) +- (B / C) -> (A +- B) / C.  */
3403  if (mul0 == mul1
3404      && operand_equal_p (TREE_OPERAND (arg0, 1),
3405		       TREE_OPERAND (arg1, 1), 0))
3406    return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3407			fold_build2 (code, type,
3408				     TREE_OPERAND (arg0, 0),
3409				     TREE_OPERAND (arg1, 0)),
3410			TREE_OPERAND (arg0, 1));
3411
3412  /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2).  */
3413  if (operand_equal_p (TREE_OPERAND (arg0, 0),
3414		       TREE_OPERAND (arg1, 0), 0)
3415      && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3416      && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3417    {
3418      REAL_VALUE_TYPE r0, r1;
3419      r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3420      r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3421      if (!mul0)
3422	real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3423      if (!mul1)
3424        real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3425      real_arithmetic (&r0, code, &r0, &r1);
3426      return fold_build2 (MULT_EXPR, type,
3427			  TREE_OPERAND (arg0, 0),
3428			  build_real (type, r0));
3429    }
3430
3431  return NULL_TREE;
3432}
3433
3434/* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3435   starting at BITPOS.  The field is unsigned if UNSIGNEDP is nonzero.  */
3436
3437static tree
3438make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3439		    int unsignedp)
3440{
3441  tree result;
3442
3443  if (bitpos == 0)
3444    {
3445      tree size = TYPE_SIZE (TREE_TYPE (inner));
3446      if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3447	   || POINTER_TYPE_P (TREE_TYPE (inner)))
3448	  && host_integerp (size, 0)
3449	  && tree_low_cst (size, 0) == bitsize)
3450	return fold_convert (type, inner);
3451    }
3452
3453  result = build3 (BIT_FIELD_REF, type, inner,
3454		   size_int (bitsize), bitsize_int (bitpos));
3455
3456  BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3457
3458  return result;
3459}
3460
3461/* Optimize a bit-field compare.
3462
3463   There are two cases:  First is a compare against a constant and the
3464   second is a comparison of two items where the fields are at the same
3465   bit position relative to the start of a chunk (byte, halfword, word)
3466   large enough to contain it.  In these cases we can avoid the shift
3467   implicit in bitfield extractions.
3468
3469   For constants, we emit a compare of the shifted constant with the
3470   BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3471   compared.  For two fields at the same position, we do the ANDs with the
3472   similar mask and compare the result of the ANDs.
3473
3474   CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3475   COMPARE_TYPE is the type of the comparison, and LHS and RHS
3476   are the left and right operands of the comparison, respectively.
3477
3478   If the optimization described above can be done, we return the resulting
3479   tree.  Otherwise we return zero.  */
3480
3481static tree
3482optimize_bit_field_compare (enum tree_code code, tree compare_type,
3483			    tree lhs, tree rhs)
3484{
3485  HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3486  tree type = TREE_TYPE (lhs);
3487  tree signed_type, unsigned_type;
3488  int const_p = TREE_CODE (rhs) == INTEGER_CST;
3489  enum machine_mode lmode, rmode, nmode;
3490  int lunsignedp, runsignedp;
3491  int lvolatilep = 0, rvolatilep = 0;
3492  tree linner, rinner = NULL_TREE;
3493  tree mask;
3494  tree offset;
3495
3496  /* Get all the information about the extractions being done.  If the bit size
3497     if the same as the size of the underlying object, we aren't doing an
3498     extraction at all and so can do nothing.  We also don't want to
3499     do anything if the inner expression is a PLACEHOLDER_EXPR since we
3500     then will no longer be able to replace it.  */
3501  linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3502				&lunsignedp, &lvolatilep, false);
3503  if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3504      || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3505    return 0;
3506
3507 if (!const_p)
3508   {
3509     /* If this is not a constant, we can only do something if bit positions,
3510	sizes, and signedness are the same.  */
3511     rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3512				   &runsignedp, &rvolatilep, false);
3513
3514     if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3515	 || lunsignedp != runsignedp || offset != 0
3516	 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3517       return 0;
3518   }
3519
3520  /* See if we can find a mode to refer to this field.  We should be able to,
3521     but fail if we can't.  */
3522  nmode = get_best_mode (lbitsize, lbitpos,
3523			 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3524			 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3525				TYPE_ALIGN (TREE_TYPE (rinner))),
3526			 word_mode, lvolatilep || rvolatilep);
3527  if (nmode == VOIDmode)
3528    return 0;
3529
3530  /* Set signed and unsigned types of the precision of this mode for the
3531     shifts below.  */
3532  signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3533  unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3534
3535  /* Compute the bit position and size for the new reference and our offset
3536     within it. If the new reference is the same size as the original, we
3537     won't optimize anything, so return zero.  */
3538  nbitsize = GET_MODE_BITSIZE (nmode);
3539  nbitpos = lbitpos & ~ (nbitsize - 1);
3540  lbitpos -= nbitpos;
3541  if (nbitsize == lbitsize)
3542    return 0;
3543
3544  if (BYTES_BIG_ENDIAN)
3545    lbitpos = nbitsize - lbitsize - lbitpos;
3546
3547  /* Make the mask to be used against the extracted field.  */
3548  mask = build_int_cst (unsigned_type, -1);
3549  mask = force_fit_type (mask, 0, false, false);
3550  mask = fold_convert (unsigned_type, mask);
3551  mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3552  mask = const_binop (RSHIFT_EXPR, mask,
3553		      size_int (nbitsize - lbitsize - lbitpos), 0);
3554
3555  if (! const_p)
3556    /* If not comparing with constant, just rework the comparison
3557       and return.  */
3558    return build2 (code, compare_type,
3559		   build2 (BIT_AND_EXPR, unsigned_type,
3560			   make_bit_field_ref (linner, unsigned_type,
3561					       nbitsize, nbitpos, 1),
3562			   mask),
3563		   build2 (BIT_AND_EXPR, unsigned_type,
3564			   make_bit_field_ref (rinner, unsigned_type,
3565					       nbitsize, nbitpos, 1),
3566			   mask));
3567
3568  /* Otherwise, we are handling the constant case. See if the constant is too
3569     big for the field.  Warn and return a tree of for 0 (false) if so.  We do
3570     this not only for its own sake, but to avoid having to test for this
3571     error case below.  If we didn't, we might generate wrong code.
3572
3573     For unsigned fields, the constant shifted right by the field length should
3574     be all zero.  For signed fields, the high-order bits should agree with
3575     the sign bit.  */
3576
3577  if (lunsignedp)
3578    {
3579      if (! integer_zerop (const_binop (RSHIFT_EXPR,
3580					fold_convert (unsigned_type, rhs),
3581					size_int (lbitsize), 0)))
3582	{
3583	  warning (0, "comparison is always %d due to width of bit-field",
3584		   code == NE_EXPR);
3585	  return constant_boolean_node (code == NE_EXPR, compare_type);
3586	}
3587    }
3588  else
3589    {
3590      tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3591			      size_int (lbitsize - 1), 0);
3592      if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3593	{
3594	  warning (0, "comparison is always %d due to width of bit-field",
3595		   code == NE_EXPR);
3596	  return constant_boolean_node (code == NE_EXPR, compare_type);
3597	}
3598    }
3599
3600  /* Single-bit compares should always be against zero.  */
3601  if (lbitsize == 1 && ! integer_zerop (rhs))
3602    {
3603      code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3604      rhs = build_int_cst (type, 0);
3605    }
3606
3607  /* Make a new bitfield reference, shift the constant over the
3608     appropriate number of bits and mask it with the computed mask
3609     (in case this was a signed field).  If we changed it, make a new one.  */
3610  lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3611  if (lvolatilep)
3612    {
3613      TREE_SIDE_EFFECTS (lhs) = 1;
3614      TREE_THIS_VOLATILE (lhs) = 1;
3615    }
3616
3617  rhs = const_binop (BIT_AND_EXPR,
3618		     const_binop (LSHIFT_EXPR,
3619				  fold_convert (unsigned_type, rhs),
3620				  size_int (lbitpos), 0),
3621		     mask, 0);
3622
3623  return build2 (code, compare_type,
3624		 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3625		 rhs);
3626}
3627
3628/* Subroutine for fold_truthop: decode a field reference.
3629
3630   If EXP is a comparison reference, we return the innermost reference.
3631
3632   *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3633   set to the starting bit number.
3634
3635   If the innermost field can be completely contained in a mode-sized
3636   unit, *PMODE is set to that mode.  Otherwise, it is set to VOIDmode.
3637
3638   *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3639   otherwise it is not changed.
3640
3641   *PUNSIGNEDP is set to the signedness of the field.
3642
3643   *PMASK is set to the mask used.  This is either contained in a
3644   BIT_AND_EXPR or derived from the width of the field.
3645
3646   *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3647
3648   Return 0 if this is not a component reference or is one that we can't
3649   do anything with.  */
3650
3651static tree
3652decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3653			HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3654			int *punsignedp, int *pvolatilep,
3655			tree *pmask, tree *pand_mask)
3656{
3657  tree outer_type = 0;
3658  tree and_mask = 0;
3659  tree mask, inner, offset;
3660  tree unsigned_type;
3661  unsigned int precision;
3662
3663  /* All the optimizations using this function assume integer fields.
3664     There are problems with FP fields since the type_for_size call
3665     below can fail for, e.g., XFmode.  */
3666  if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3667    return 0;
3668
3669  /* We are interested in the bare arrangement of bits, so strip everything
3670     that doesn't affect the machine mode.  However, record the type of the
3671     outermost expression if it may matter below.  */
3672  if (TREE_CODE (exp) == NOP_EXPR
3673      || TREE_CODE (exp) == CONVERT_EXPR
3674      || TREE_CODE (exp) == NON_LVALUE_EXPR)
3675    outer_type = TREE_TYPE (exp);
3676  STRIP_NOPS (exp);
3677
3678  if (TREE_CODE (exp) == BIT_AND_EXPR)
3679    {
3680      and_mask = TREE_OPERAND (exp, 1);
3681      exp = TREE_OPERAND (exp, 0);
3682      STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3683      if (TREE_CODE (and_mask) != INTEGER_CST)
3684	return 0;
3685    }
3686
3687  inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3688			       punsignedp, pvolatilep, false);
3689  if ((inner == exp && and_mask == 0)
3690      || *pbitsize < 0 || offset != 0
3691      || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3692    return 0;
3693
3694  /* If the number of bits in the reference is the same as the bitsize of
3695     the outer type, then the outer type gives the signedness. Otherwise
3696     (in case of a small bitfield) the signedness is unchanged.  */
3697  if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3698    *punsignedp = TYPE_UNSIGNED (outer_type);
3699
3700  /* Compute the mask to access the bitfield.  */
3701  unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3702  precision = TYPE_PRECISION (unsigned_type);
3703
3704  mask = build_int_cst (unsigned_type, -1);
3705  mask = force_fit_type (mask, 0, false, false);
3706
3707  mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3708  mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3709
3710  /* Merge it with the mask we found in the BIT_AND_EXPR, if any.  */
3711  if (and_mask != 0)
3712    mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3713			fold_convert (unsigned_type, and_mask), mask);
3714
3715  *pmask = mask;
3716  *pand_mask = and_mask;
3717  return inner;
3718}
3719
3720/* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3721   bit positions.  */
3722
3723static int
3724all_ones_mask_p (tree mask, int size)
3725{
3726  tree type = TREE_TYPE (mask);
3727  unsigned int precision = TYPE_PRECISION (type);
3728  tree tmask;
3729
3730  tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3731  tmask = force_fit_type (tmask, 0, false, false);
3732
3733  return
3734    tree_int_cst_equal (mask,
3735			const_binop (RSHIFT_EXPR,
3736				     const_binop (LSHIFT_EXPR, tmask,
3737						  size_int (precision - size),
3738						  0),
3739				     size_int (precision - size), 0));
3740}
3741
3742/* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3743   represents the sign bit of EXP's type.  If EXP represents a sign
3744   or zero extension, also test VAL against the unextended type.
3745   The return value is the (sub)expression whose sign bit is VAL,
3746   or NULL_TREE otherwise.  */
3747
3748static tree
3749sign_bit_p (tree exp, tree val)
3750{
3751  unsigned HOST_WIDE_INT mask_lo, lo;
3752  HOST_WIDE_INT mask_hi, hi;
3753  int width;
3754  tree t;
3755
3756  /* Tree EXP must have an integral type.  */
3757  t = TREE_TYPE (exp);
3758  if (! INTEGRAL_TYPE_P (t))
3759    return NULL_TREE;
3760
3761  /* Tree VAL must be an integer constant.  */
3762  if (TREE_CODE (val) != INTEGER_CST
3763      || TREE_CONSTANT_OVERFLOW (val))
3764    return NULL_TREE;
3765
3766  width = TYPE_PRECISION (t);
3767  if (width > HOST_BITS_PER_WIDE_INT)
3768    {
3769      hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3770      lo = 0;
3771
3772      mask_hi = ((unsigned HOST_WIDE_INT) -1
3773		 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3774      mask_lo = -1;
3775    }
3776  else
3777    {
3778      hi = 0;
3779      lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3780
3781      mask_hi = 0;
3782      mask_lo = ((unsigned HOST_WIDE_INT) -1
3783		 >> (HOST_BITS_PER_WIDE_INT - width));
3784    }
3785
3786  /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3787     treat VAL as if it were unsigned.  */
3788  if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3789      && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3790    return exp;
3791
3792  /* Handle extension from a narrower type.  */
3793  if (TREE_CODE (exp) == NOP_EXPR
3794      && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3795    return sign_bit_p (TREE_OPERAND (exp, 0), val);
3796
3797  return NULL_TREE;
3798}
3799
3800/* Subroutine for fold_truthop: determine if an operand is simple enough
3801   to be evaluated unconditionally.  */
3802
3803static int
3804simple_operand_p (tree exp)
3805{
3806  /* Strip any conversions that don't change the machine mode.  */
3807  STRIP_NOPS (exp);
3808
3809  return (CONSTANT_CLASS_P (exp)
3810	  || TREE_CODE (exp) == SSA_NAME
3811	  || (DECL_P (exp)
3812	      && ! TREE_ADDRESSABLE (exp)
3813	      && ! TREE_THIS_VOLATILE (exp)
3814	      && ! DECL_NONLOCAL (exp)
3815	      /* Don't regard global variables as simple.  They may be
3816		 allocated in ways unknown to the compiler (shared memory,
3817		 #pragma weak, etc).  */
3818	      && ! TREE_PUBLIC (exp)
3819	      && ! DECL_EXTERNAL (exp)
3820	      /* Loading a static variable is unduly expensive, but global
3821		 registers aren't expensive.  */
3822	      && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3823}
3824
3825/* The following functions are subroutines to fold_range_test and allow it to
3826   try to change a logical combination of comparisons into a range test.
3827
3828   For example, both
3829	X == 2 || X == 3 || X == 4 || X == 5
3830   and
3831	X >= 2 && X <= 5
3832   are converted to
3833	(unsigned) (X - 2) <= 3
3834
3835   We describe each set of comparisons as being either inside or outside
3836   a range, using a variable named like IN_P, and then describe the
3837   range with a lower and upper bound.  If one of the bounds is omitted,
3838   it represents either the highest or lowest value of the type.
3839
3840   In the comments below, we represent a range by two numbers in brackets
3841   preceded by a "+" to designate being inside that range, or a "-" to
3842   designate being outside that range, so the condition can be inverted by
3843   flipping the prefix.  An omitted bound is represented by a "-".  For
3844   example, "- [-, 10]" means being outside the range starting at the lowest
3845   possible value and ending at 10, in other words, being greater than 10.
3846   The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3847   always false.
3848
3849   We set up things so that the missing bounds are handled in a consistent
3850   manner so neither a missing bound nor "true" and "false" need to be
3851   handled using a special case.  */
3852
3853/* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3854   of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3855   and UPPER1_P are nonzero if the respective argument is an upper bound
3856   and zero for a lower.  TYPE, if nonzero, is the type of the result; it
3857   must be specified for a comparison.  ARG1 will be converted to ARG0's
3858   type if both are specified.  */
3859
3860static tree
3861range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3862	     tree arg1, int upper1_p)
3863{
3864  tree tem;
3865  int result;
3866  int sgn0, sgn1;
3867
3868  /* If neither arg represents infinity, do the normal operation.
3869     Else, if not a comparison, return infinity.  Else handle the special
3870     comparison rules. Note that most of the cases below won't occur, but
3871     are handled for consistency.  */
3872
3873  if (arg0 != 0 && arg1 != 0)
3874    {
3875      tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3876			 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3877      STRIP_NOPS (tem);
3878      return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3879    }
3880
3881  if (TREE_CODE_CLASS (code) != tcc_comparison)
3882    return 0;
3883
3884  /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3885     for neither.  In real maths, we cannot assume open ended ranges are
3886     the same. But, this is computer arithmetic, where numbers are finite.
3887     We can therefore make the transformation of any unbounded range with
3888     the value Z, Z being greater than any representable number. This permits
3889     us to treat unbounded ranges as equal.  */
3890  sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3891  sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3892  switch (code)
3893    {
3894    case EQ_EXPR:
3895      result = sgn0 == sgn1;
3896      break;
3897    case NE_EXPR:
3898      result = sgn0 != sgn1;
3899      break;
3900    case LT_EXPR:
3901      result = sgn0 < sgn1;
3902      break;
3903    case LE_EXPR:
3904      result = sgn0 <= sgn1;
3905      break;
3906    case GT_EXPR:
3907      result = sgn0 > sgn1;
3908      break;
3909    case GE_EXPR:
3910      result = sgn0 >= sgn1;
3911      break;
3912    default:
3913      gcc_unreachable ();
3914    }
3915
3916  return constant_boolean_node (result, type);
3917}
3918
3919/* Given EXP, a logical expression, set the range it is testing into
3920   variables denoted by PIN_P, PLOW, and PHIGH.  Return the expression
3921   actually being tested.  *PLOW and *PHIGH will be made of the same
3922   type as the returned expression.  If EXP is not a comparison, we
3923   will most likely not be returning a useful value and range.  Set
3924   *STRICT_OVERFLOW_P to true if the return value is only valid
3925   because signed overflow is undefined; otherwise, do not change
3926   *STRICT_OVERFLOW_P.  */
3927
3928static tree
3929make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
3930	    bool *strict_overflow_p)
3931{
3932  enum tree_code code;
3933  tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3934  tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3935  int in_p, n_in_p;
3936  tree low, high, n_low, n_high;
3937
3938  /* Start with simply saying "EXP != 0" and then look at the code of EXP
3939     and see if we can refine the range.  Some of the cases below may not
3940     happen, but it doesn't seem worth worrying about this.  We "continue"
3941     the outer loop when we've changed something; otherwise we "break"
3942     the switch, which will "break" the while.  */
3943
3944  in_p = 0;
3945  low = high = build_int_cst (TREE_TYPE (exp), 0);
3946
3947  while (1)
3948    {
3949      code = TREE_CODE (exp);
3950      exp_type = TREE_TYPE (exp);
3951
3952      if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3953	{
3954	  if (TREE_CODE_LENGTH (code) > 0)
3955	    arg0 = TREE_OPERAND (exp, 0);
3956	  if (TREE_CODE_CLASS (code) == tcc_comparison
3957	      || TREE_CODE_CLASS (code) == tcc_unary
3958	      || TREE_CODE_CLASS (code) == tcc_binary)
3959	    arg0_type = TREE_TYPE (arg0);
3960	  if (TREE_CODE_CLASS (code) == tcc_binary
3961	      || TREE_CODE_CLASS (code) == tcc_comparison
3962	      || (TREE_CODE_CLASS (code) == tcc_expression
3963		  && TREE_CODE_LENGTH (code) > 1))
3964	    arg1 = TREE_OPERAND (exp, 1);
3965	}
3966
3967      switch (code)
3968	{
3969	case TRUTH_NOT_EXPR:
3970	  in_p = ! in_p, exp = arg0;
3971	  continue;
3972
3973	case EQ_EXPR: case NE_EXPR:
3974	case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3975	  /* We can only do something if the range is testing for zero
3976	     and if the second operand is an integer constant.  Note that
3977	     saying something is "in" the range we make is done by
3978	     complementing IN_P since it will set in the initial case of
3979	     being not equal to zero; "out" is leaving it alone.  */
3980	  if (low == 0 || high == 0
3981	      || ! integer_zerop (low) || ! integer_zerop (high)
3982	      || TREE_CODE (arg1) != INTEGER_CST)
3983	    break;
3984
3985	  switch (code)
3986	    {
3987	    case NE_EXPR:  /* - [c, c]  */
3988	      low = high = arg1;
3989	      break;
3990	    case EQ_EXPR:  /* + [c, c]  */
3991	      in_p = ! in_p, low = high = arg1;
3992	      break;
3993	    case GT_EXPR:  /* - [-, c] */
3994	      low = 0, high = arg1;
3995	      break;
3996	    case GE_EXPR:  /* + [c, -] */
3997	      in_p = ! in_p, low = arg1, high = 0;
3998	      break;
3999	    case LT_EXPR:  /* - [c, -] */
4000	      low = arg1, high = 0;
4001	      break;
4002	    case LE_EXPR:  /* + [-, c] */
4003	      in_p = ! in_p, low = 0, high = arg1;
4004	      break;
4005	    default:
4006	      gcc_unreachable ();
4007	    }
4008
4009	  /* If this is an unsigned comparison, we also know that EXP is
4010	     greater than or equal to zero.  We base the range tests we make
4011	     on that fact, so we record it here so we can parse existing
4012	     range tests.  We test arg0_type since often the return type
4013	     of, e.g. EQ_EXPR, is boolean.  */
4014	  if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4015	    {
4016	      if (! merge_ranges (&n_in_p, &n_low, &n_high,
4017				  in_p, low, high, 1,
4018				  build_int_cst (arg0_type, 0),
4019				  NULL_TREE))
4020		break;
4021
4022	      in_p = n_in_p, low = n_low, high = n_high;
4023
4024	      /* If the high bound is missing, but we have a nonzero low
4025		 bound, reverse the range so it goes from zero to the low bound
4026		 minus 1.  */
4027	      if (high == 0 && low && ! integer_zerop (low))
4028		{
4029		  in_p = ! in_p;
4030		  high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4031				      integer_one_node, 0);
4032		  low = build_int_cst (arg0_type, 0);
4033		}
4034	    }
4035
4036	  exp = arg0;
4037	  continue;
4038
4039	case NEGATE_EXPR:
4040	  /* (-x) IN [a,b] -> x in [-b, -a]  */
4041	  n_low = range_binop (MINUS_EXPR, exp_type,
4042			       build_int_cst (exp_type, 0),
4043			       0, high, 1);
4044	  n_high = range_binop (MINUS_EXPR, exp_type,
4045				build_int_cst (exp_type, 0),
4046				0, low, 0);
4047	  low = n_low, high = n_high;
4048	  exp = arg0;
4049	  continue;
4050
4051	case BIT_NOT_EXPR:
4052	  /* ~ X -> -X - 1  */
4053	  exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4054			build_int_cst (exp_type, 1));
4055	  continue;
4056
4057	case PLUS_EXPR:  case MINUS_EXPR:
4058	  if (TREE_CODE (arg1) != INTEGER_CST)
4059	    break;
4060
4061	  /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4062	     move a constant to the other side.  */
4063	  if (!TYPE_UNSIGNED (arg0_type)
4064	      && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4065	    break;
4066
4067	  /* If EXP is signed, any overflow in the computation is undefined,
4068	     so we don't worry about it so long as our computations on
4069	     the bounds don't overflow.  For unsigned, overflow is defined
4070	     and this is exactly the right thing.  */
4071	  n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4072			       arg0_type, low, 0, arg1, 0);
4073	  n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4074				arg0_type, high, 1, arg1, 0);
4075	  if ((n_low != 0 && TREE_OVERFLOW (n_low))
4076	      || (n_high != 0 && TREE_OVERFLOW (n_high)))
4077	    break;
4078
4079	  if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4080	    *strict_overflow_p = true;
4081
4082	  /* Check for an unsigned range which has wrapped around the maximum
4083	     value thus making n_high < n_low, and normalize it.  */
4084	  if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4085	    {
4086	      low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4087				 integer_one_node, 0);
4088	      high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4089				  integer_one_node, 0);
4090
4091	      /* If the range is of the form +/- [ x+1, x ], we won't
4092		 be able to normalize it.  But then, it represents the
4093		 whole range or the empty set, so make it
4094		 +/- [ -, - ].  */
4095	      if (tree_int_cst_equal (n_low, low)
4096		  && tree_int_cst_equal (n_high, high))
4097		low = high = 0;
4098	      else
4099		in_p = ! in_p;
4100	    }
4101	  else
4102	    low = n_low, high = n_high;
4103
4104	  exp = arg0;
4105	  continue;
4106
4107	case NOP_EXPR:  case NON_LVALUE_EXPR:  case CONVERT_EXPR:
4108	  if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4109	    break;
4110
4111	  if (! INTEGRAL_TYPE_P (arg0_type)
4112	      || (low != 0 && ! int_fits_type_p (low, arg0_type))
4113	      || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4114	    break;
4115
4116	  n_low = low, n_high = high;
4117
4118	  if (n_low != 0)
4119	    n_low = fold_convert (arg0_type, n_low);
4120
4121	  if (n_high != 0)
4122	    n_high = fold_convert (arg0_type, n_high);
4123
4124
4125	  /* If we're converting arg0 from an unsigned type, to exp,
4126	     a signed type,  we will be doing the comparison as unsigned.
4127	     The tests above have already verified that LOW and HIGH
4128	     are both positive.
4129
4130	     So we have to ensure that we will handle large unsigned
4131	     values the same way that the current signed bounds treat
4132	     negative values.  */
4133
4134	  if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4135	    {
4136	      tree high_positive;
4137	      tree equiv_type = lang_hooks.types.type_for_mode
4138		(TYPE_MODE (arg0_type), 1);
4139
4140	      /* A range without an upper bound is, naturally, unbounded.
4141		 Since convert would have cropped a very large value, use
4142		 the max value for the destination type.  */
4143	      high_positive
4144		= TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4145		: TYPE_MAX_VALUE (arg0_type);
4146
4147	      if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4148		high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4149					     fold_convert (arg0_type,
4150							   high_positive),
4151					     fold_convert (arg0_type,
4152							   integer_one_node));
4153
4154	      /* If the low bound is specified, "and" the range with the
4155		 range for which the original unsigned value will be
4156		 positive.  */
4157	      if (low != 0)
4158		{
4159		  if (! merge_ranges (&n_in_p, &n_low, &n_high,
4160				      1, n_low, n_high, 1,
4161				      fold_convert (arg0_type,
4162						    integer_zero_node),
4163				      high_positive))
4164		    break;
4165
4166		  in_p = (n_in_p == in_p);
4167		}
4168	      else
4169		{
4170		  /* Otherwise, "or" the range with the range of the input
4171		     that will be interpreted as negative.  */
4172		  if (! merge_ranges (&n_in_p, &n_low, &n_high,
4173				      0, n_low, n_high, 1,
4174				      fold_convert (arg0_type,
4175						    integer_zero_node),
4176				      high_positive))
4177		    break;
4178
4179		  in_p = (in_p != n_in_p);
4180		}
4181	    }
4182
4183	  exp = arg0;
4184	  low = n_low, high = n_high;
4185	  continue;
4186
4187	default:
4188	  break;
4189	}
4190
4191      break;
4192    }
4193
4194  /* If EXP is a constant, we can evaluate whether this is true or false.  */
4195  if (TREE_CODE (exp) == INTEGER_CST)
4196    {
4197      in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4198						 exp, 0, low, 0))
4199		      && integer_onep (range_binop (LE_EXPR, integer_type_node,
4200						    exp, 1, high, 1)));
4201      low = high = 0;
4202      exp = 0;
4203    }
4204
4205  *pin_p = in_p, *plow = low, *phigh = high;
4206  return exp;
4207}
4208
4209/* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4210   type, TYPE, return an expression to test if EXP is in (or out of, depending
4211   on IN_P) the range.  Return 0 if the test couldn't be created.  */
4212
4213static tree
4214build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4215{
4216  tree etype = TREE_TYPE (exp);
4217  tree value;
4218
4219#ifdef HAVE_canonicalize_funcptr_for_compare
4220  /* Disable this optimization for function pointer expressions
4221     on targets that require function pointer canonicalization.  */
4222  if (HAVE_canonicalize_funcptr_for_compare
4223      && TREE_CODE (etype) == POINTER_TYPE
4224      && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4225    return NULL_TREE;
4226#endif
4227
4228  if (! in_p)
4229    {
4230      value = build_range_check (type, exp, 1, low, high);
4231      if (value != 0)
4232        return invert_truthvalue (value);
4233
4234      return 0;
4235    }
4236
4237  if (low == 0 && high == 0)
4238    return build_int_cst (type, 1);
4239
4240  if (low == 0)
4241    return fold_build2 (LE_EXPR, type, exp,
4242			fold_convert (etype, high));
4243
4244  if (high == 0)
4245    return fold_build2 (GE_EXPR, type, exp,
4246			fold_convert (etype, low));
4247
4248  if (operand_equal_p (low, high, 0))
4249    return fold_build2 (EQ_EXPR, type, exp,
4250			fold_convert (etype, low));
4251
4252  if (integer_zerop (low))
4253    {
4254      if (! TYPE_UNSIGNED (etype))
4255	{
4256	  etype = lang_hooks.types.unsigned_type (etype);
4257	  high = fold_convert (etype, high);
4258	  exp = fold_convert (etype, exp);
4259	}
4260      return build_range_check (type, exp, 1, 0, high);
4261    }
4262
4263  /* Optimize (c>=1) && (c<=127) into (signed char)c > 0.  */
4264  if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4265    {
4266      unsigned HOST_WIDE_INT lo;
4267      HOST_WIDE_INT hi;
4268      int prec;
4269
4270      prec = TYPE_PRECISION (etype);
4271      if (prec <= HOST_BITS_PER_WIDE_INT)
4272	{
4273	  hi = 0;
4274	  lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4275	}
4276      else
4277	{
4278	  hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4279	  lo = (unsigned HOST_WIDE_INT) -1;
4280	}
4281
4282      if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4283	{
4284	  if (TYPE_UNSIGNED (etype))
4285	    {
4286	      etype = lang_hooks.types.signed_type (etype);
4287	      exp = fold_convert (etype, exp);
4288	    }
4289	  return fold_build2 (GT_EXPR, type, exp,
4290			      build_int_cst (etype, 0));
4291	}
4292    }
4293
4294  /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4295     This requires wrap-around arithmetics for the type of the expression.  */
4296  switch (TREE_CODE (etype))
4297    {
4298    case INTEGER_TYPE:
4299      /* There is no requirement that LOW be within the range of ETYPE
4300	 if the latter is a subtype.  It must, however, be within the base
4301	 type of ETYPE.  So be sure we do the subtraction in that type.  */
4302      if (TREE_TYPE (etype))
4303	etype = TREE_TYPE (etype);
4304      break;
4305
4306    case ENUMERAL_TYPE:
4307    case BOOLEAN_TYPE:
4308      etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4309					      TYPE_UNSIGNED (etype));
4310      break;
4311
4312    default:
4313      break;
4314    }
4315
4316  /* If we don't have wrap-around arithmetics upfront, try to force it.  */
4317  if (TREE_CODE (etype) == INTEGER_TYPE
4318      && !TYPE_OVERFLOW_WRAPS (etype))
4319    {
4320      tree utype, minv, maxv;
4321
4322      /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4323	 for the type in question, as we rely on this here.  */
4324      utype = lang_hooks.types.unsigned_type (etype);
4325      maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4326      maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4327			  integer_one_node, 1);
4328      minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4329
4330      if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4331				      minv, 1, maxv, 1)))
4332	etype = utype;
4333      else
4334	return 0;
4335    }
4336
4337  high = fold_convert (etype, high);
4338  low = fold_convert (etype, low);
4339  exp = fold_convert (etype, exp);
4340
4341  value = const_binop (MINUS_EXPR, high, low, 0);
4342
4343  if (value != 0 && !TREE_OVERFLOW (value))
4344    return build_range_check (type,
4345			      fold_build2 (MINUS_EXPR, etype, exp, low),
4346			      1, build_int_cst (etype, 0), value);
4347
4348  return 0;
4349}
4350
4351/* Return the predecessor of VAL in its type, handling the infinite case.  */
4352
4353static tree
4354range_predecessor (tree val)
4355{
4356  tree type = TREE_TYPE (val);
4357
4358  if (INTEGRAL_TYPE_P (type)
4359      && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4360    return 0;
4361  else
4362    return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4363}
4364
4365/* Return the successor of VAL in its type, handling the infinite case.  */
4366
4367static tree
4368range_successor (tree val)
4369{
4370  tree type = TREE_TYPE (val);
4371
4372  if (INTEGRAL_TYPE_P (type)
4373      && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4374    return 0;
4375  else
4376    return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4377}
4378
4379/* Given two ranges, see if we can merge them into one.  Return 1 if we
4380   can, 0 if we can't.  Set the output range into the specified parameters.  */
4381
4382static int
4383merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4384	      tree high0, int in1_p, tree low1, tree high1)
4385{
4386  int no_overlap;
4387  int subset;
4388  int temp;
4389  tree tem;
4390  int in_p;
4391  tree low, high;
4392  int lowequal = ((low0 == 0 && low1 == 0)
4393		  || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4394						low0, 0, low1, 0)));
4395  int highequal = ((high0 == 0 && high1 == 0)
4396		   || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4397						 high0, 1, high1, 1)));
4398
4399  /* Make range 0 be the range that starts first, or ends last if they
4400     start at the same value.  Swap them if it isn't.  */
4401  if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4402				 low0, 0, low1, 0))
4403      || (lowequal
4404	  && integer_onep (range_binop (GT_EXPR, integer_type_node,
4405					high1, 1, high0, 1))))
4406    {
4407      temp = in0_p, in0_p = in1_p, in1_p = temp;
4408      tem = low0, low0 = low1, low1 = tem;
4409      tem = high0, high0 = high1, high1 = tem;
4410    }
4411
4412  /* Now flag two cases, whether the ranges are disjoint or whether the
4413     second range is totally subsumed in the first.  Note that the tests
4414     below are simplified by the ones above.  */
4415  no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4416					  high0, 1, low1, 0));
4417  subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4418				      high1, 1, high0, 1));
4419
4420  /* We now have four cases, depending on whether we are including or
4421     excluding the two ranges.  */
4422  if (in0_p && in1_p)
4423    {
4424      /* If they don't overlap, the result is false.  If the second range
4425	 is a subset it is the result.  Otherwise, the range is from the start
4426	 of the second to the end of the first.  */
4427      if (no_overlap)
4428	in_p = 0, low = high = 0;
4429      else if (subset)
4430	in_p = 1, low = low1, high = high1;
4431      else
4432	in_p = 1, low = low1, high = high0;
4433    }
4434
4435  else if (in0_p && ! in1_p)
4436    {
4437      /* If they don't overlap, the result is the first range.  If they are
4438	 equal, the result is false.  If the second range is a subset of the
4439	 first, and the ranges begin at the same place, we go from just after
4440	 the end of the second range to the end of the first.  If the second
4441	 range is not a subset of the first, or if it is a subset and both
4442	 ranges end at the same place, the range starts at the start of the
4443	 first range and ends just before the second range.
4444	 Otherwise, we can't describe this as a single range.  */
4445      if (no_overlap)
4446	in_p = 1, low = low0, high = high0;
4447      else if (lowequal && highequal)
4448	in_p = 0, low = high = 0;
4449      else if (subset && lowequal)
4450	{
4451	  low = range_successor (high1);
4452	  high = high0;
4453	  in_p = 1;
4454	  if (low == 0)
4455	    {
4456	      /* We are in the weird situation where high0 > high1 but
4457		 high1 has no successor.  Punt.  */
4458	      return 0;
4459	    }
4460	}
4461      else if (! subset || highequal)
4462	{
4463	  low = low0;
4464	  high = range_predecessor (low1);
4465	  in_p = 1;
4466	  if (high == 0)
4467	    {
4468	      /* low0 < low1 but low1 has no predecessor.  Punt.  */
4469	      return 0;
4470	    }
4471	}
4472      else
4473	return 0;
4474    }
4475
4476  else if (! in0_p && in1_p)
4477    {
4478      /* If they don't overlap, the result is the second range.  If the second
4479	 is a subset of the first, the result is false.  Otherwise,
4480	 the range starts just after the first range and ends at the
4481	 end of the second.  */
4482      if (no_overlap)
4483	in_p = 1, low = low1, high = high1;
4484      else if (subset || highequal)
4485	in_p = 0, low = high = 0;
4486      else
4487	{
4488	  low = range_successor (high0);
4489	  high = high1;
4490	  in_p = 1;
4491	  if (low == 0)
4492	    {
4493	      /* high1 > high0 but high0 has no successor.  Punt.  */
4494	      return 0;
4495	    }
4496	}
4497    }
4498
4499  else
4500    {
4501      /* The case where we are excluding both ranges.  Here the complex case
4502	 is if they don't overlap.  In that case, the only time we have a
4503	 range is if they are adjacent.  If the second is a subset of the
4504	 first, the result is the first.  Otherwise, the range to exclude
4505	 starts at the beginning of the first range and ends at the end of the
4506	 second.  */
4507      if (no_overlap)
4508	{
4509	  if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4510					 range_successor (high0),
4511					 1, low1, 0)))
4512	    in_p = 0, low = low0, high = high1;
4513	  else
4514	    {
4515	      /* Canonicalize - [min, x] into - [-, x].  */
4516	      if (low0 && TREE_CODE (low0) == INTEGER_CST)
4517		switch (TREE_CODE (TREE_TYPE (low0)))
4518		  {
4519		  case ENUMERAL_TYPE:
4520		    if (TYPE_PRECISION (TREE_TYPE (low0))
4521			!= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4522		      break;
4523		    /* FALLTHROUGH */
4524		  case INTEGER_TYPE:
4525		    if (tree_int_cst_equal (low0,
4526					    TYPE_MIN_VALUE (TREE_TYPE (low0))))
4527		      low0 = 0;
4528		    break;
4529		  case POINTER_TYPE:
4530		    if (TYPE_UNSIGNED (TREE_TYPE (low0))
4531			&& integer_zerop (low0))
4532		      low0 = 0;
4533		    break;
4534		  default:
4535		    break;
4536		  }
4537
4538	      /* Canonicalize - [x, max] into - [x, -].  */
4539	      if (high1 && TREE_CODE (high1) == INTEGER_CST)
4540		switch (TREE_CODE (TREE_TYPE (high1)))
4541		  {
4542		  case ENUMERAL_TYPE:
4543		    if (TYPE_PRECISION (TREE_TYPE (high1))
4544			!= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4545		      break;
4546		    /* FALLTHROUGH */
4547		  case INTEGER_TYPE:
4548		    if (tree_int_cst_equal (high1,
4549					    TYPE_MAX_VALUE (TREE_TYPE (high1))))
4550		      high1 = 0;
4551		    break;
4552		  case POINTER_TYPE:
4553		    if (TYPE_UNSIGNED (TREE_TYPE (high1))
4554			&& integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4555						       high1, 1,
4556						       integer_one_node, 1)))
4557		      high1 = 0;
4558		    break;
4559		  default:
4560		    break;
4561		  }
4562
4563	      /* The ranges might be also adjacent between the maximum and
4564	         minimum values of the given type.  For
4565	         - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4566	         return + [x + 1, y - 1].  */
4567	      if (low0 == 0 && high1 == 0)
4568	        {
4569		  low = range_successor (high0);
4570		  high = range_predecessor (low1);
4571		  if (low == 0 || high == 0)
4572		    return 0;
4573
4574		  in_p = 1;
4575		}
4576	      else
4577		return 0;
4578	    }
4579	}
4580      else if (subset)
4581	in_p = 0, low = low0, high = high0;
4582      else
4583	in_p = 0, low = low0, high = high1;
4584    }
4585
4586  *pin_p = in_p, *plow = low, *phigh = high;
4587  return 1;
4588}
4589
4590
4591/* Subroutine of fold, looking inside expressions of the form
4592   A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4593   of the COND_EXPR.  This function is being used also to optimize
4594   A op B ? C : A, by reversing the comparison first.
4595
4596   Return a folded expression whose code is not a COND_EXPR
4597   anymore, or NULL_TREE if no folding opportunity is found.  */
4598
4599static tree
4600fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4601{
4602  enum tree_code comp_code = TREE_CODE (arg0);
4603  tree arg00 = TREE_OPERAND (arg0, 0);
4604  tree arg01 = TREE_OPERAND (arg0, 1);
4605  tree arg1_type = TREE_TYPE (arg1);
4606  tree tem;
4607
4608  STRIP_NOPS (arg1);
4609  STRIP_NOPS (arg2);
4610
4611  /* If we have A op 0 ? A : -A, consider applying the following
4612     transformations:
4613
4614     A == 0? A : -A    same as -A
4615     A != 0? A : -A    same as A
4616     A >= 0? A : -A    same as abs (A)
4617     A > 0?  A : -A    same as abs (A)
4618     A <= 0? A : -A    same as -abs (A)
4619     A < 0?  A : -A    same as -abs (A)
4620
4621     None of these transformations work for modes with signed
4622     zeros.  If A is +/-0, the first two transformations will
4623     change the sign of the result (from +0 to -0, or vice
4624     versa).  The last four will fix the sign of the result,
4625     even though the original expressions could be positive or
4626     negative, depending on the sign of A.
4627
4628     Note that all these transformations are correct if A is
4629     NaN, since the two alternatives (A and -A) are also NaNs.  */
4630  if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4631       ? real_zerop (arg01)
4632       : integer_zerop (arg01))
4633      && ((TREE_CODE (arg2) == NEGATE_EXPR
4634	   && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4635	     /* In the case that A is of the form X-Y, '-A' (arg2) may
4636	        have already been folded to Y-X, check for that. */
4637	  || (TREE_CODE (arg1) == MINUS_EXPR
4638	      && TREE_CODE (arg2) == MINUS_EXPR
4639	      && operand_equal_p (TREE_OPERAND (arg1, 0),
4640				  TREE_OPERAND (arg2, 1), 0)
4641	      && operand_equal_p (TREE_OPERAND (arg1, 1),
4642				  TREE_OPERAND (arg2, 0), 0))))
4643    switch (comp_code)
4644      {
4645      case EQ_EXPR:
4646      case UNEQ_EXPR:
4647	tem = fold_convert (arg1_type, arg1);
4648	return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4649      case NE_EXPR:
4650      case LTGT_EXPR:
4651	return pedantic_non_lvalue (fold_convert (type, arg1));
4652      case UNGE_EXPR:
4653      case UNGT_EXPR:
4654	if (flag_trapping_math)
4655	  break;
4656	/* Fall through.  */
4657      case GE_EXPR:
4658      case GT_EXPR:
4659	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4660	  arg1 = fold_convert (lang_hooks.types.signed_type
4661			       (TREE_TYPE (arg1)), arg1);
4662	tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4663	return pedantic_non_lvalue (fold_convert (type, tem));
4664      case UNLE_EXPR:
4665      case UNLT_EXPR:
4666	if (flag_trapping_math)
4667	  break;
4668      case LE_EXPR:
4669      case LT_EXPR:
4670	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4671	  arg1 = fold_convert (lang_hooks.types.signed_type
4672			       (TREE_TYPE (arg1)), arg1);
4673	tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4674	return negate_expr (fold_convert (type, tem));
4675      default:
4676	gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4677	break;
4678      }
4679
4680  /* A != 0 ? A : 0 is simply A, unless A is -0.  Likewise
4681     A == 0 ? A : 0 is always 0 unless A is -0.  Note that
4682     both transformations are correct when A is NaN: A != 0
4683     is then true, and A == 0 is false.  */
4684
4685  if (integer_zerop (arg01) && integer_zerop (arg2))
4686    {
4687      if (comp_code == NE_EXPR)
4688	return pedantic_non_lvalue (fold_convert (type, arg1));
4689      else if (comp_code == EQ_EXPR)
4690	return build_int_cst (type, 0);
4691    }
4692
4693  /* Try some transformations of A op B ? A : B.
4694
4695     A == B? A : B    same as B
4696     A != B? A : B    same as A
4697     A >= B? A : B    same as max (A, B)
4698     A > B?  A : B    same as max (B, A)
4699     A <= B? A : B    same as min (A, B)
4700     A < B?  A : B    same as min (B, A)
4701
4702     As above, these transformations don't work in the presence
4703     of signed zeros.  For example, if A and B are zeros of
4704     opposite sign, the first two transformations will change
4705     the sign of the result.  In the last four, the original
4706     expressions give different results for (A=+0, B=-0) and
4707     (A=-0, B=+0), but the transformed expressions do not.
4708
4709     The first two transformations are correct if either A or B
4710     is a NaN.  In the first transformation, the condition will
4711     be false, and B will indeed be chosen.  In the case of the
4712     second transformation, the condition A != B will be true,
4713     and A will be chosen.
4714
4715     The conversions to max() and min() are not correct if B is
4716     a number and A is not.  The conditions in the original
4717     expressions will be false, so all four give B.  The min()
4718     and max() versions would give a NaN instead.  */
4719  if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4720      /* Avoid these transformations if the COND_EXPR may be used
4721	 as an lvalue in the C++ front-end.  PR c++/19199.  */
4722      && (in_gimple_form
4723	  || (strcmp (lang_hooks.name, "GNU C++") != 0
4724	      && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4725	  || ! maybe_lvalue_p (arg1)
4726	  || ! maybe_lvalue_p (arg2)))
4727    {
4728      tree comp_op0 = arg00;
4729      tree comp_op1 = arg01;
4730      tree comp_type = TREE_TYPE (comp_op0);
4731
4732      /* Avoid adding NOP_EXPRs in case this is an lvalue.  */
4733      if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4734	{
4735	  comp_type = type;
4736	  comp_op0 = arg1;
4737	  comp_op1 = arg2;
4738	}
4739
4740      switch (comp_code)
4741	{
4742	case EQ_EXPR:
4743	  return pedantic_non_lvalue (fold_convert (type, arg2));
4744	case NE_EXPR:
4745	  return pedantic_non_lvalue (fold_convert (type, arg1));
4746	case LE_EXPR:
4747	case LT_EXPR:
4748	case UNLE_EXPR:
4749	case UNLT_EXPR:
4750	  /* In C++ a ?: expression can be an lvalue, so put the
4751	     operand which will be used if they are equal first
4752	     so that we can convert this back to the
4753	     corresponding COND_EXPR.  */
4754	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4755	    {
4756	      comp_op0 = fold_convert (comp_type, comp_op0);
4757	      comp_op1 = fold_convert (comp_type, comp_op1);
4758	      tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4759		    ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4760		    : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4761	      return pedantic_non_lvalue (fold_convert (type, tem));
4762	    }
4763	  break;
4764	case GE_EXPR:
4765	case GT_EXPR:
4766	case UNGE_EXPR:
4767	case UNGT_EXPR:
4768	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4769	    {
4770	      comp_op0 = fold_convert (comp_type, comp_op0);
4771	      comp_op1 = fold_convert (comp_type, comp_op1);
4772	      tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4773		    ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4774		    : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4775	      return pedantic_non_lvalue (fold_convert (type, tem));
4776	    }
4777	  break;
4778	case UNEQ_EXPR:
4779	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4780	    return pedantic_non_lvalue (fold_convert (type, arg2));
4781	  break;
4782	case LTGT_EXPR:
4783	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4784	    return pedantic_non_lvalue (fold_convert (type, arg1));
4785	  break;
4786	default:
4787	  gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4788	  break;
4789	}
4790    }
4791
4792  /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4793     we might still be able to simplify this.  For example,
4794     if C1 is one less or one more than C2, this might have started
4795     out as a MIN or MAX and been transformed by this function.
4796     Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE.  */
4797
4798  if (INTEGRAL_TYPE_P (type)
4799      && TREE_CODE (arg01) == INTEGER_CST
4800      && TREE_CODE (arg2) == INTEGER_CST)
4801    switch (comp_code)
4802      {
4803      case EQ_EXPR:
4804	/* We can replace A with C1 in this case.  */
4805	arg1 = fold_convert (type, arg01);
4806	return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4807
4808      case LT_EXPR:
4809	/* If C1 is C2 + 1, this is min(A, C2).  */
4810	if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4811			       OEP_ONLY_CONST)
4812	    && operand_equal_p (arg01,
4813				const_binop (PLUS_EXPR, arg2,
4814					     integer_one_node, 0),
4815				OEP_ONLY_CONST))
4816	  return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4817						   type, arg1, arg2));
4818	break;
4819
4820      case LE_EXPR:
4821	/* If C1 is C2 - 1, this is min(A, C2).  */
4822	if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4823			       OEP_ONLY_CONST)
4824	    && operand_equal_p (arg01,
4825				const_binop (MINUS_EXPR, arg2,
4826					     integer_one_node, 0),
4827				OEP_ONLY_CONST))
4828	  return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4829						   type, arg1, arg2));
4830	break;
4831
4832      case GT_EXPR:
4833	/* If C1 is C2 - 1, this is max(A, C2).  */
4834	if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4835			       OEP_ONLY_CONST)
4836	    && operand_equal_p (arg01,
4837				const_binop (MINUS_EXPR, arg2,
4838					     integer_one_node, 0),
4839				OEP_ONLY_CONST))
4840	  return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4841						   type, arg1, arg2));
4842	break;
4843
4844      case GE_EXPR:
4845	/* If C1 is C2 + 1, this is max(A, C2).  */
4846	if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4847			       OEP_ONLY_CONST)
4848	    && operand_equal_p (arg01,
4849				const_binop (PLUS_EXPR, arg2,
4850					     integer_one_node, 0),
4851				OEP_ONLY_CONST))
4852	  return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4853						   type, arg1, arg2));
4854	break;
4855      case NE_EXPR:
4856	break;
4857      default:
4858	gcc_unreachable ();
4859      }
4860
4861  return NULL_TREE;
4862}
4863
4864
4865
4866#ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4867#define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4868#endif
4869
4870/* EXP is some logical combination of boolean tests.  See if we can
4871   merge it into some range test.  Return the new tree if so.  */
4872
4873static tree
4874fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4875{
4876  int or_op = (code == TRUTH_ORIF_EXPR
4877	       || code == TRUTH_OR_EXPR);
4878  int in0_p, in1_p, in_p;
4879  tree low0, low1, low, high0, high1, high;
4880  bool strict_overflow_p = false;
4881  tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4882  tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4883  tree tem;
4884  const char * const warnmsg = G_("assuming signed overflow does not occur "
4885				  "when simplifying range test");
4886
4887  /* If this is an OR operation, invert both sides; we will invert
4888     again at the end.  */
4889  if (or_op)
4890    in0_p = ! in0_p, in1_p = ! in1_p;
4891
4892  /* If both expressions are the same, if we can merge the ranges, and we
4893     can build the range test, return it or it inverted.  If one of the
4894     ranges is always true or always false, consider it to be the same
4895     expression as the other.  */
4896  if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4897      && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4898		       in1_p, low1, high1)
4899      && 0 != (tem = (build_range_check (type,
4900					 lhs != 0 ? lhs
4901					 : rhs != 0 ? rhs : integer_zero_node,
4902					 in_p, low, high))))
4903    {
4904      if (strict_overflow_p)
4905	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4906      return or_op ? invert_truthvalue (tem) : tem;
4907    }
4908
4909  /* On machines where the branch cost is expensive, if this is a
4910     short-circuited branch and the underlying object on both sides
4911     is the same, make a non-short-circuit operation.  */
4912  else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4913	   && lhs != 0 && rhs != 0
4914	   && (code == TRUTH_ANDIF_EXPR
4915	       || code == TRUTH_ORIF_EXPR)
4916	   && operand_equal_p (lhs, rhs, 0))
4917    {
4918      /* If simple enough, just rewrite.  Otherwise, make a SAVE_EXPR
4919	 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4920	 which cases we can't do this.  */
4921      if (simple_operand_p (lhs))
4922	return build2 (code == TRUTH_ANDIF_EXPR
4923		       ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4924		       type, op0, op1);
4925
4926      else if (lang_hooks.decls.global_bindings_p () == 0
4927	       && ! CONTAINS_PLACEHOLDER_P (lhs))
4928	{
4929	  tree common = save_expr (lhs);
4930
4931	  if (0 != (lhs = build_range_check (type, common,
4932					     or_op ? ! in0_p : in0_p,
4933					     low0, high0))
4934	      && (0 != (rhs = build_range_check (type, common,
4935						 or_op ? ! in1_p : in1_p,
4936						 low1, high1))))
4937	    {
4938	      if (strict_overflow_p)
4939		fold_overflow_warning (warnmsg,
4940				       WARN_STRICT_OVERFLOW_COMPARISON);
4941	      return build2 (code == TRUTH_ANDIF_EXPR
4942			     ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4943			     type, lhs, rhs);
4944	    }
4945	}
4946    }
4947
4948  return 0;
4949}
4950
4951/* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4952   bit value.  Arrange things so the extra bits will be set to zero if and
4953   only if C is signed-extended to its full width.  If MASK is nonzero,
4954   it is an INTEGER_CST that should be AND'ed with the extra bits.  */
4955
4956static tree
4957unextend (tree c, int p, int unsignedp, tree mask)
4958{
4959  tree type = TREE_TYPE (c);
4960  int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4961  tree temp;
4962
4963  if (p == modesize || unsignedp)
4964    return c;
4965
4966  /* We work by getting just the sign bit into the low-order bit, then
4967     into the high-order bit, then sign-extend.  We then XOR that value
4968     with C.  */
4969  temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4970  temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4971
4972  /* We must use a signed type in order to get an arithmetic right shift.
4973     However, we must also avoid introducing accidental overflows, so that
4974     a subsequent call to integer_zerop will work.  Hence we must
4975     do the type conversion here.  At this point, the constant is either
4976     zero or one, and the conversion to a signed type can never overflow.
4977     We could get an overflow if this conversion is done anywhere else.  */
4978  if (TYPE_UNSIGNED (type))
4979    temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4980
4981  temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4982  temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4983  if (mask != 0)
4984    temp = const_binop (BIT_AND_EXPR, temp,
4985			fold_convert (TREE_TYPE (c), mask), 0);
4986  /* If necessary, convert the type back to match the type of C.  */
4987  if (TYPE_UNSIGNED (type))
4988    temp = fold_convert (type, temp);
4989
4990  return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4991}
4992
4993/* Find ways of folding logical expressions of LHS and RHS:
4994   Try to merge two comparisons to the same innermost item.
4995   Look for range tests like "ch >= '0' && ch <= '9'".
4996   Look for combinations of simple terms on machines with expensive branches
4997   and evaluate the RHS unconditionally.
4998
4999   For example, if we have p->a == 2 && p->b == 4 and we can make an
5000   object large enough to span both A and B, we can do this with a comparison
5001   against the object ANDed with the a mask.
5002
5003   If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5004   operations to do this with one comparison.
5005
5006   We check for both normal comparisons and the BIT_AND_EXPRs made this by
5007   function and the one above.
5008
5009   CODE is the logical operation being done.  It can be TRUTH_ANDIF_EXPR,
5010   TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5011
5012   TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5013   two operands.
5014
5015   We return the simplified tree or 0 if no optimization is possible.  */
5016
5017static tree
5018fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5019{
5020  /* If this is the "or" of two comparisons, we can do something if
5021     the comparisons are NE_EXPR.  If this is the "and", we can do something
5022     if the comparisons are EQ_EXPR.  I.e.,
5023	(a->b == 2 && a->c == 4) can become (a->new == NEW).
5024
5025     WANTED_CODE is this operation code.  For single bit fields, we can
5026     convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5027     comparison for one-bit fields.  */
5028
5029  enum tree_code wanted_code;
5030  enum tree_code lcode, rcode;
5031  tree ll_arg, lr_arg, rl_arg, rr_arg;
5032  tree ll_inner, lr_inner, rl_inner, rr_inner;
5033  HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5034  HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5035  HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5036  HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5037  int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5038  enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5039  enum machine_mode lnmode, rnmode;
5040  tree ll_mask, lr_mask, rl_mask, rr_mask;
5041  tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5042  tree l_const, r_const;
5043  tree lntype, rntype, result;
5044  int first_bit, end_bit;
5045  int volatilep;
5046  tree orig_lhs = lhs, orig_rhs = rhs;
5047  enum tree_code orig_code = code;
5048
5049  /* Start by getting the comparison codes.  Fail if anything is volatile.
5050     If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5051     it were surrounded with a NE_EXPR.  */
5052
5053  if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5054    return 0;
5055
5056  lcode = TREE_CODE (lhs);
5057  rcode = TREE_CODE (rhs);
5058
5059  if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5060    {
5061      lhs = build2 (NE_EXPR, truth_type, lhs,
5062		    build_int_cst (TREE_TYPE (lhs), 0));
5063      lcode = NE_EXPR;
5064    }
5065
5066  if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5067    {
5068      rhs = build2 (NE_EXPR, truth_type, rhs,
5069		    build_int_cst (TREE_TYPE (rhs), 0));
5070      rcode = NE_EXPR;
5071    }
5072
5073  if (TREE_CODE_CLASS (lcode) != tcc_comparison
5074      || TREE_CODE_CLASS (rcode) != tcc_comparison)
5075    return 0;
5076
5077  ll_arg = TREE_OPERAND (lhs, 0);
5078  lr_arg = TREE_OPERAND (lhs, 1);
5079  rl_arg = TREE_OPERAND (rhs, 0);
5080  rr_arg = TREE_OPERAND (rhs, 1);
5081
5082  /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations.  */
5083  if (simple_operand_p (ll_arg)
5084      && simple_operand_p (lr_arg))
5085    {
5086      tree result;
5087      if (operand_equal_p (ll_arg, rl_arg, 0)
5088          && operand_equal_p (lr_arg, rr_arg, 0))
5089	{
5090          result = combine_comparisons (code, lcode, rcode,
5091					truth_type, ll_arg, lr_arg);
5092	  if (result)
5093	    return result;
5094	}
5095      else if (operand_equal_p (ll_arg, rr_arg, 0)
5096               && operand_equal_p (lr_arg, rl_arg, 0))
5097	{
5098          result = combine_comparisons (code, lcode,
5099					swap_tree_comparison (rcode),
5100					truth_type, ll_arg, lr_arg);
5101	  if (result)
5102	    return result;
5103	}
5104    }
5105
5106  code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5107	  ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5108
5109  /* If the RHS can be evaluated unconditionally and its operands are
5110     simple, it wins to evaluate the RHS unconditionally on machines
5111     with expensive branches.  In this case, this isn't a comparison
5112     that can be merged.  Avoid doing this if the RHS is a floating-point
5113     comparison since those can trap.  */
5114
5115  if (BRANCH_COST >= 2
5116      && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5117      && simple_operand_p (rl_arg)
5118      && simple_operand_p (rr_arg))
5119    {
5120      /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
5121      if (code == TRUTH_OR_EXPR
5122	  && lcode == NE_EXPR && integer_zerop (lr_arg)
5123	  && rcode == NE_EXPR && integer_zerop (rr_arg)
5124	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5125	return build2 (NE_EXPR, truth_type,
5126		       build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5127			       ll_arg, rl_arg),
5128		       build_int_cst (TREE_TYPE (ll_arg), 0));
5129
5130      /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
5131      if (code == TRUTH_AND_EXPR
5132	  && lcode == EQ_EXPR && integer_zerop (lr_arg)
5133	  && rcode == EQ_EXPR && integer_zerop (rr_arg)
5134	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5135	return build2 (EQ_EXPR, truth_type,
5136		       build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5137			       ll_arg, rl_arg),
5138		       build_int_cst (TREE_TYPE (ll_arg), 0));
5139
5140      if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5141	{
5142	  if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5143	    return build2 (code, truth_type, lhs, rhs);
5144	  return NULL_TREE;
5145	}
5146    }
5147
5148  /* See if the comparisons can be merged.  Then get all the parameters for
5149     each side.  */
5150
5151  if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5152      || (rcode != EQ_EXPR && rcode != NE_EXPR))
5153    return 0;
5154
5155  volatilep = 0;
5156  ll_inner = decode_field_reference (ll_arg,
5157				     &ll_bitsize, &ll_bitpos, &ll_mode,
5158				     &ll_unsignedp, &volatilep, &ll_mask,
5159				     &ll_and_mask);
5160  lr_inner = decode_field_reference (lr_arg,
5161				     &lr_bitsize, &lr_bitpos, &lr_mode,
5162				     &lr_unsignedp, &volatilep, &lr_mask,
5163				     &lr_and_mask);
5164  rl_inner = decode_field_reference (rl_arg,
5165				     &rl_bitsize, &rl_bitpos, &rl_mode,
5166				     &rl_unsignedp, &volatilep, &rl_mask,
5167				     &rl_and_mask);
5168  rr_inner = decode_field_reference (rr_arg,
5169				     &rr_bitsize, &rr_bitpos, &rr_mode,
5170				     &rr_unsignedp, &volatilep, &rr_mask,
5171				     &rr_and_mask);
5172
5173  /* It must be true that the inner operation on the lhs of each
5174     comparison must be the same if we are to be able to do anything.
5175     Then see if we have constants.  If not, the same must be true for
5176     the rhs's.  */
5177  if (volatilep || ll_inner == 0 || rl_inner == 0
5178      || ! operand_equal_p (ll_inner, rl_inner, 0))
5179    return 0;
5180
5181  if (TREE_CODE (lr_arg) == INTEGER_CST
5182      && TREE_CODE (rr_arg) == INTEGER_CST)
5183    l_const = lr_arg, r_const = rr_arg;
5184  else if (lr_inner == 0 || rr_inner == 0
5185	   || ! operand_equal_p (lr_inner, rr_inner, 0))
5186    return 0;
5187  else
5188    l_const = r_const = 0;
5189
5190  /* If either comparison code is not correct for our logical operation,
5191     fail.  However, we can convert a one-bit comparison against zero into
5192     the opposite comparison against that bit being set in the field.  */
5193
5194  wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5195  if (lcode != wanted_code)
5196    {
5197      if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5198	{
5199	  /* Make the left operand unsigned, since we are only interested
5200	     in the value of one bit.  Otherwise we are doing the wrong
5201	     thing below.  */
5202	  ll_unsignedp = 1;
5203	  l_const = ll_mask;
5204	}
5205      else
5206	return 0;
5207    }
5208
5209  /* This is analogous to the code for l_const above.  */
5210  if (rcode != wanted_code)
5211    {
5212      if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5213	{
5214	  rl_unsignedp = 1;
5215	  r_const = rl_mask;
5216	}
5217      else
5218	return 0;
5219    }
5220
5221  /* After this point all optimizations will generate bit-field
5222     references, which we might not want.  */
5223  if (! lang_hooks.can_use_bit_fields_p ())
5224    return 0;
5225
5226  /* See if we can find a mode that contains both fields being compared on
5227     the left.  If we can't, fail.  Otherwise, update all constants and masks
5228     to be relative to a field of that size.  */
5229  first_bit = MIN (ll_bitpos, rl_bitpos);
5230  end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5231  lnmode = get_best_mode (end_bit - first_bit, first_bit,
5232			  TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5233			  volatilep);
5234  if (lnmode == VOIDmode)
5235    return 0;
5236
5237  lnbitsize = GET_MODE_BITSIZE (lnmode);
5238  lnbitpos = first_bit & ~ (lnbitsize - 1);
5239  lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5240  xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5241
5242  if (BYTES_BIG_ENDIAN)
5243    {
5244      xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5245      xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5246    }
5247
5248  ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5249			 size_int (xll_bitpos), 0);
5250  rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5251			 size_int (xrl_bitpos), 0);
5252
5253  if (l_const)
5254    {
5255      l_const = fold_convert (lntype, l_const);
5256      l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5257      l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5258      if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5259					fold_build1 (BIT_NOT_EXPR,
5260						     lntype, ll_mask),
5261					0)))
5262	{
5263	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5264
5265	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5266	}
5267    }
5268  if (r_const)
5269    {
5270      r_const = fold_convert (lntype, r_const);
5271      r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5272      r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5273      if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5274					fold_build1 (BIT_NOT_EXPR,
5275						     lntype, rl_mask),
5276					0)))
5277	{
5278	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5279
5280	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5281	}
5282    }
5283
5284  /* If the right sides are not constant, do the same for it.  Also,
5285     disallow this optimization if a size or signedness mismatch occurs
5286     between the left and right sides.  */
5287  if (l_const == 0)
5288    {
5289      if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5290	  || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5291	  /* Make sure the two fields on the right
5292	     correspond to the left without being swapped.  */
5293	  || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5294	return 0;
5295
5296      first_bit = MIN (lr_bitpos, rr_bitpos);
5297      end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5298      rnmode = get_best_mode (end_bit - first_bit, first_bit,
5299			      TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5300			      volatilep);
5301      if (rnmode == VOIDmode)
5302	return 0;
5303
5304      rnbitsize = GET_MODE_BITSIZE (rnmode);
5305      rnbitpos = first_bit & ~ (rnbitsize - 1);
5306      rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5307      xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5308
5309      if (BYTES_BIG_ENDIAN)
5310	{
5311	  xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5312	  xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5313	}
5314
5315      lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5316			     size_int (xlr_bitpos), 0);
5317      rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5318			     size_int (xrr_bitpos), 0);
5319
5320      /* Make a mask that corresponds to both fields being compared.
5321	 Do this for both items being compared.  If the operands are the
5322	 same size and the bits being compared are in the same position
5323	 then we can do this by masking both and comparing the masked
5324	 results.  */
5325      ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5326      lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5327      if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5328	{
5329	  lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5330				    ll_unsignedp || rl_unsignedp);
5331	  if (! all_ones_mask_p (ll_mask, lnbitsize))
5332	    lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5333
5334	  rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5335				    lr_unsignedp || rr_unsignedp);
5336	  if (! all_ones_mask_p (lr_mask, rnbitsize))
5337	    rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5338
5339	  return build2 (wanted_code, truth_type, lhs, rhs);
5340	}
5341
5342      /* There is still another way we can do something:  If both pairs of
5343	 fields being compared are adjacent, we may be able to make a wider
5344	 field containing them both.
5345
5346	 Note that we still must mask the lhs/rhs expressions.  Furthermore,
5347	 the mask must be shifted to account for the shift done by
5348	 make_bit_field_ref.  */
5349      if ((ll_bitsize + ll_bitpos == rl_bitpos
5350	   && lr_bitsize + lr_bitpos == rr_bitpos)
5351	  || (ll_bitpos == rl_bitpos + rl_bitsize
5352	      && lr_bitpos == rr_bitpos + rr_bitsize))
5353	{
5354	  tree type;
5355
5356	  lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5357				    MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5358	  rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5359				    MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5360
5361	  ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5362				 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5363	  lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5364				 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5365
5366	  /* Convert to the smaller type before masking out unwanted bits.  */
5367	  type = lntype;
5368	  if (lntype != rntype)
5369	    {
5370	      if (lnbitsize > rnbitsize)
5371		{
5372		  lhs = fold_convert (rntype, lhs);
5373		  ll_mask = fold_convert (rntype, ll_mask);
5374		  type = rntype;
5375		}
5376	      else if (lnbitsize < rnbitsize)
5377		{
5378		  rhs = fold_convert (lntype, rhs);
5379		  lr_mask = fold_convert (lntype, lr_mask);
5380		  type = lntype;
5381		}
5382	    }
5383
5384	  if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5385	    lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5386
5387	  if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5388	    rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5389
5390	  return build2 (wanted_code, truth_type, lhs, rhs);
5391	}
5392
5393      return 0;
5394    }
5395
5396  /* Handle the case of comparisons with constants.  If there is something in
5397     common between the masks, those bits of the constants must be the same.
5398     If not, the condition is always false.  Test for this to avoid generating
5399     incorrect code below.  */
5400  result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5401  if (! integer_zerop (result)
5402      && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5403			   const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5404    {
5405      if (wanted_code == NE_EXPR)
5406	{
5407	  warning (0, "%<or%> of unmatched not-equal tests is always 1");
5408	  return constant_boolean_node (true, truth_type);
5409	}
5410      else
5411	{
5412	  warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5413	  return constant_boolean_node (false, truth_type);
5414	}
5415    }
5416
5417  /* Construct the expression we will return.  First get the component
5418     reference we will make.  Unless the mask is all ones the width of
5419     that field, perform the mask operation.  Then compare with the
5420     merged constant.  */
5421  result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5422			       ll_unsignedp || rl_unsignedp);
5423
5424  ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5425  if (! all_ones_mask_p (ll_mask, lnbitsize))
5426    result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5427
5428  return build2 (wanted_code, truth_type, result,
5429		 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5430}
5431
5432/* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5433   constant.  */
5434
5435static tree
5436optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5437{
5438  tree arg0 = op0;
5439  enum tree_code op_code;
5440  tree comp_const = op1;
5441  tree minmax_const;
5442  int consts_equal, consts_lt;
5443  tree inner;
5444
5445  STRIP_SIGN_NOPS (arg0);
5446
5447  op_code = TREE_CODE (arg0);
5448  minmax_const = TREE_OPERAND (arg0, 1);
5449  consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5450  consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5451  inner = TREE_OPERAND (arg0, 0);
5452
5453  /* If something does not permit us to optimize, return the original tree.  */
5454  if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5455      || TREE_CODE (comp_const) != INTEGER_CST
5456      || TREE_CONSTANT_OVERFLOW (comp_const)
5457      || TREE_CODE (minmax_const) != INTEGER_CST
5458      || TREE_CONSTANT_OVERFLOW (minmax_const))
5459    return NULL_TREE;
5460
5461  /* Now handle all the various comparison codes.  We only handle EQ_EXPR
5462     and GT_EXPR, doing the rest with recursive calls using logical
5463     simplifications.  */
5464  switch (code)
5465    {
5466    case NE_EXPR:  case LT_EXPR:  case LE_EXPR:
5467      {
5468	tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5469					  type, op0, op1);
5470	if (tem)
5471	  return invert_truthvalue (tem);
5472	return NULL_TREE;
5473      }
5474
5475    case GE_EXPR:
5476      return
5477	fold_build2 (TRUTH_ORIF_EXPR, type,
5478		     optimize_minmax_comparison
5479		     (EQ_EXPR, type, arg0, comp_const),
5480		     optimize_minmax_comparison
5481		     (GT_EXPR, type, arg0, comp_const));
5482
5483    case EQ_EXPR:
5484      if (op_code == MAX_EXPR && consts_equal)
5485	/* MAX (X, 0) == 0  ->  X <= 0  */
5486	return fold_build2 (LE_EXPR, type, inner, comp_const);
5487
5488      else if (op_code == MAX_EXPR && consts_lt)
5489	/* MAX (X, 0) == 5  ->  X == 5   */
5490	return fold_build2 (EQ_EXPR, type, inner, comp_const);
5491
5492      else if (op_code == MAX_EXPR)
5493	/* MAX (X, 0) == -1  ->  false  */
5494	return omit_one_operand (type, integer_zero_node, inner);
5495
5496      else if (consts_equal)
5497	/* MIN (X, 0) == 0  ->  X >= 0  */
5498	return fold_build2 (GE_EXPR, type, inner, comp_const);
5499
5500      else if (consts_lt)
5501	/* MIN (X, 0) == 5  ->  false  */
5502	return omit_one_operand (type, integer_zero_node, inner);
5503
5504      else
5505	/* MIN (X, 0) == -1  ->  X == -1  */
5506	return fold_build2 (EQ_EXPR, type, inner, comp_const);
5507
5508    case GT_EXPR:
5509      if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5510	/* MAX (X, 0) > 0  ->  X > 0
5511	   MAX (X, 0) > 5  ->  X > 5  */
5512	return fold_build2 (GT_EXPR, type, inner, comp_const);
5513
5514      else if (op_code == MAX_EXPR)
5515	/* MAX (X, 0) > -1  ->  true  */
5516	return omit_one_operand (type, integer_one_node, inner);
5517
5518      else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5519	/* MIN (X, 0) > 0  ->  false
5520	   MIN (X, 0) > 5  ->  false  */
5521	return omit_one_operand (type, integer_zero_node, inner);
5522
5523      else
5524	/* MIN (X, 0) > -1  ->  X > -1  */
5525	return fold_build2 (GT_EXPR, type, inner, comp_const);
5526
5527    default:
5528      return NULL_TREE;
5529    }
5530}
5531
5532/* T is an integer expression that is being multiplied, divided, or taken a
5533   modulus (CODE says which and what kind of divide or modulus) by a
5534   constant C.  See if we can eliminate that operation by folding it with
5535   other operations already in T.  WIDE_TYPE, if non-null, is a type that
5536   should be used for the computation if wider than our type.
5537
5538   For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5539   (X * 2) + (Y * 4).  We must, however, be assured that either the original
5540   expression would not overflow or that overflow is undefined for the type
5541   in the language in question.
5542
5543   We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5544   the machine has a multiply-accumulate insn or that this is part of an
5545   addressing calculation.
5546
5547   If we return a non-null expression, it is an equivalent form of the
5548   original computation, but need not be in the original type.
5549
5550   We set *STRICT_OVERFLOW_P to true if the return values depends on
5551   signed overflow being undefined.  Otherwise we do not change
5552   *STRICT_OVERFLOW_P.  */
5553
5554static tree
5555extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5556		bool *strict_overflow_p)
5557{
5558  /* To avoid exponential search depth, refuse to allow recursion past
5559     three levels.  Beyond that (1) it's highly unlikely that we'll find
5560     something interesting and (2) we've probably processed it before
5561     when we built the inner expression.  */
5562
5563  static int depth;
5564  tree ret;
5565
5566  if (depth > 3)
5567    return NULL;
5568
5569  depth++;
5570  ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5571  depth--;
5572
5573  return ret;
5574}
5575
5576static tree
5577extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5578		  bool *strict_overflow_p)
5579{
5580  tree type = TREE_TYPE (t);
5581  enum tree_code tcode = TREE_CODE (t);
5582  tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5583				   > GET_MODE_SIZE (TYPE_MODE (type)))
5584		? wide_type : type);
5585  tree t1, t2;
5586  int same_p = tcode == code;
5587  tree op0 = NULL_TREE, op1 = NULL_TREE;
5588  bool sub_strict_overflow_p;
5589
5590  /* Don't deal with constants of zero here; they confuse the code below.  */
5591  if (integer_zerop (c))
5592    return NULL_TREE;
5593
5594  if (TREE_CODE_CLASS (tcode) == tcc_unary)
5595    op0 = TREE_OPERAND (t, 0);
5596
5597  if (TREE_CODE_CLASS (tcode) == tcc_binary)
5598    op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5599
5600  /* Note that we need not handle conditional operations here since fold
5601     already handles those cases.  So just do arithmetic here.  */
5602  switch (tcode)
5603    {
5604    case INTEGER_CST:
5605      /* For a constant, we can always simplify if we are a multiply
5606	 or (for divide and modulus) if it is a multiple of our constant.  */
5607      if (code == MULT_EXPR
5608	  || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5609	return const_binop (code, fold_convert (ctype, t),
5610			    fold_convert (ctype, c), 0);
5611      break;
5612
5613    case CONVERT_EXPR:  case NON_LVALUE_EXPR:  case NOP_EXPR:
5614      /* If op0 is an expression ...  */
5615      if ((COMPARISON_CLASS_P (op0)
5616	   || UNARY_CLASS_P (op0)
5617	   || BINARY_CLASS_P (op0)
5618	   || EXPRESSION_CLASS_P (op0))
5619	  /* ... and is unsigned, and its type is smaller than ctype,
5620	     then we cannot pass through as widening.  */
5621	  && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5622	       && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5623		     && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5624	       && (GET_MODE_SIZE (TYPE_MODE (ctype))
5625	           > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5626	      /* ... or this is a truncation (t is narrower than op0),
5627		 then we cannot pass through this narrowing.  */
5628	      || (GET_MODE_SIZE (TYPE_MODE (type))
5629		  < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5630	      /* ... or signedness changes for division or modulus,
5631		 then we cannot pass through this conversion.  */
5632	      || (code != MULT_EXPR
5633		  && (TYPE_UNSIGNED (ctype)
5634		      != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5635	break;
5636
5637      /* Pass the constant down and see if we can make a simplification.  If
5638	 we can, replace this expression with the inner simplification for
5639	 possible later conversion to our or some other type.  */
5640      if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5641	  && TREE_CODE (t2) == INTEGER_CST
5642	  && ! TREE_CONSTANT_OVERFLOW (t2)
5643	  && (0 != (t1 = extract_muldiv (op0, t2, code,
5644					 code == MULT_EXPR
5645					 ? ctype : NULL_TREE,
5646					 strict_overflow_p))))
5647	return t1;
5648      break;
5649
5650    case ABS_EXPR:
5651      /* If widening the type changes it from signed to unsigned, then we
5652         must avoid building ABS_EXPR itself as unsigned.  */
5653      if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5654        {
5655          tree cstype = (*lang_hooks.types.signed_type) (ctype);
5656          if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5657	      != 0)
5658            {
5659              t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5660              return fold_convert (ctype, t1);
5661            }
5662          break;
5663        }
5664      /* If the constant is negative, we cannot simplify this.  */
5665      if (tree_int_cst_sgn (c) == -1)
5666	break;
5667      /* FALLTHROUGH */
5668    case NEGATE_EXPR:
5669      if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5670	  != 0)
5671	return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5672      break;
5673
5674    case MIN_EXPR:  case MAX_EXPR:
5675      /* If widening the type changes the signedness, then we can't perform
5676	 this optimization as that changes the result.  */
5677      if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5678	break;
5679
5680      /* MIN (a, b) / 5 -> MIN (a / 5, b / 5)  */
5681      sub_strict_overflow_p = false;
5682      if ((t1 = extract_muldiv (op0, c, code, wide_type,
5683				&sub_strict_overflow_p)) != 0
5684	  && (t2 = extract_muldiv (op1, c, code, wide_type,
5685				   &sub_strict_overflow_p)) != 0)
5686	{
5687	  if (tree_int_cst_sgn (c) < 0)
5688	    tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5689	  if (sub_strict_overflow_p)
5690	    *strict_overflow_p = true;
5691	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5692			      fold_convert (ctype, t2));
5693	}
5694      break;
5695
5696    case LSHIFT_EXPR:  case RSHIFT_EXPR:
5697      /* If the second operand is constant, this is a multiplication
5698	 or floor division, by a power of two, so we can treat it that
5699	 way unless the multiplier or divisor overflows.  Signed
5700	 left-shift overflow is implementation-defined rather than
5701	 undefined in C90, so do not convert signed left shift into
5702	 multiplication.  */
5703      if (TREE_CODE (op1) == INTEGER_CST
5704	  && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5705	  /* const_binop may not detect overflow correctly,
5706	     so check for it explicitly here.  */
5707	  && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5708	  && TREE_INT_CST_HIGH (op1) == 0
5709	  && 0 != (t1 = fold_convert (ctype,
5710				      const_binop (LSHIFT_EXPR,
5711						   size_one_node,
5712						   op1, 0)))
5713	  && ! TREE_OVERFLOW (t1))
5714	return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5715				       ? MULT_EXPR : FLOOR_DIV_EXPR,
5716				       ctype, fold_convert (ctype, op0), t1),
5717			       c, code, wide_type, strict_overflow_p);
5718      break;
5719
5720    case PLUS_EXPR:  case MINUS_EXPR:
5721      /* See if we can eliminate the operation on both sides.  If we can, we
5722	 can return a new PLUS or MINUS.  If we can't, the only remaining
5723	 cases where we can do anything are if the second operand is a
5724	 constant.  */
5725      sub_strict_overflow_p = false;
5726      t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5727      t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5728      if (t1 != 0 && t2 != 0
5729	  && (code == MULT_EXPR
5730	      /* If not multiplication, we can only do this if both operands
5731		 are divisible by c.  */
5732	      || (multiple_of_p (ctype, op0, c)
5733	          && multiple_of_p (ctype, op1, c))))
5734	{
5735	  if (sub_strict_overflow_p)
5736	    *strict_overflow_p = true;
5737	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5738			      fold_convert (ctype, t2));
5739	}
5740
5741      /* If this was a subtraction, negate OP1 and set it to be an addition.
5742	 This simplifies the logic below.  */
5743      if (tcode == MINUS_EXPR)
5744	tcode = PLUS_EXPR, op1 = negate_expr (op1);
5745
5746      if (TREE_CODE (op1) != INTEGER_CST)
5747	break;
5748
5749      /* If either OP1 or C are negative, this optimization is not safe for
5750	 some of the division and remainder types while for others we need
5751	 to change the code.  */
5752      if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5753	{
5754	  if (code == CEIL_DIV_EXPR)
5755	    code = FLOOR_DIV_EXPR;
5756	  else if (code == FLOOR_DIV_EXPR)
5757	    code = CEIL_DIV_EXPR;
5758	  else if (code != MULT_EXPR
5759		   && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5760	    break;
5761	}
5762
5763      /* If it's a multiply or a division/modulus operation of a multiple
5764         of our constant, do the operation and verify it doesn't overflow.  */
5765      if (code == MULT_EXPR
5766	  || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5767	{
5768	  op1 = const_binop (code, fold_convert (ctype, op1),
5769			     fold_convert (ctype, c), 0);
5770	  /* We allow the constant to overflow with wrapping semantics.  */
5771	  if (op1 == 0
5772	      || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5773	    break;
5774	}
5775      else
5776	break;
5777
5778      /* If we have an unsigned type is not a sizetype, we cannot widen
5779	 the operation since it will change the result if the original
5780	 computation overflowed.  */
5781      if (TYPE_UNSIGNED (ctype)
5782	  && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5783	  && ctype != type)
5784	break;
5785
5786      /* If we were able to eliminate our operation from the first side,
5787	 apply our operation to the second side and reform the PLUS.  */
5788      if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5789	return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5790
5791      /* The last case is if we are a multiply.  In that case, we can
5792	 apply the distributive law to commute the multiply and addition
5793	 if the multiplication of the constants doesn't overflow.  */
5794      if (code == MULT_EXPR)
5795	return fold_build2 (tcode, ctype,
5796			    fold_build2 (code, ctype,
5797					 fold_convert (ctype, op0),
5798					 fold_convert (ctype, c)),
5799			    op1);
5800
5801      break;
5802
5803    case MULT_EXPR:
5804      /* We have a special case here if we are doing something like
5805	 (C * 8) % 4 since we know that's zero.  */
5806      if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5807	   || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5808	  && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5809	  && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5810	return omit_one_operand (type, integer_zero_node, op0);
5811
5812      /* ... fall through ...  */
5813
5814    case TRUNC_DIV_EXPR:  case CEIL_DIV_EXPR:  case FLOOR_DIV_EXPR:
5815    case ROUND_DIV_EXPR:  case EXACT_DIV_EXPR:
5816      /* If we can extract our operation from the LHS, do so and return a
5817	 new operation.  Likewise for the RHS from a MULT_EXPR.  Otherwise,
5818	 do something only if the second operand is a constant.  */
5819      if (same_p
5820	  && (t1 = extract_muldiv (op0, c, code, wide_type,
5821				   strict_overflow_p)) != 0)
5822	return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5823			    fold_convert (ctype, op1));
5824      else if (tcode == MULT_EXPR && code == MULT_EXPR
5825	       && (t1 = extract_muldiv (op1, c, code, wide_type,
5826					strict_overflow_p)) != 0)
5827	return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5828			    fold_convert (ctype, t1));
5829      else if (TREE_CODE (op1) != INTEGER_CST)
5830	return 0;
5831
5832      /* If these are the same operation types, we can associate them
5833	 assuming no overflow.  */
5834      if (tcode == code
5835	  && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5836				     fold_convert (ctype, c), 0))
5837	  && ! TREE_OVERFLOW (t1))
5838	return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5839
5840      /* If these operations "cancel" each other, we have the main
5841	 optimizations of this pass, which occur when either constant is a
5842	 multiple of the other, in which case we replace this with either an
5843	 operation or CODE or TCODE.
5844
5845	 If we have an unsigned type that is not a sizetype, we cannot do
5846	 this since it will change the result if the original computation
5847	 overflowed.  */
5848      if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5849	   || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5850	  && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5851	      || (tcode == MULT_EXPR
5852		  && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5853		  && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5854	{
5855	  if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5856	    {
5857	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
5858		*strict_overflow_p = true;
5859	      return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5860				  fold_convert (ctype,
5861						const_binop (TRUNC_DIV_EXPR,
5862							     op1, c, 0)));
5863	    }
5864	  else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5865	    {
5866	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
5867		*strict_overflow_p = true;
5868	      return fold_build2 (code, ctype, fold_convert (ctype, op0),
5869				  fold_convert (ctype,
5870						const_binop (TRUNC_DIV_EXPR,
5871							     c, op1, 0)));
5872	    }
5873	}
5874      break;
5875
5876    default:
5877      break;
5878    }
5879
5880  return 0;
5881}
5882
5883/* Return a node which has the indicated constant VALUE (either 0 or
5884   1), and is of the indicated TYPE.  */
5885
5886tree
5887constant_boolean_node (int value, tree type)
5888{
5889  if (type == integer_type_node)
5890    return value ? integer_one_node : integer_zero_node;
5891  else if (type == boolean_type_node)
5892    return value ? boolean_true_node : boolean_false_node;
5893  else
5894    return build_int_cst (type, value);
5895}
5896
5897
5898/* Return true if expr looks like an ARRAY_REF and set base and
5899   offset to the appropriate trees.  If there is no offset,
5900   offset is set to NULL_TREE.  Base will be canonicalized to
5901   something you can get the element type from using
5902   TREE_TYPE (TREE_TYPE (base)).  Offset will be the offset
5903   in bytes to the base.  */
5904
5905static bool
5906extract_array_ref (tree expr, tree *base, tree *offset)
5907{
5908  /* One canonical form is a PLUS_EXPR with the first
5909     argument being an ADDR_EXPR with a possible NOP_EXPR
5910     attached.  */
5911  if (TREE_CODE (expr) == PLUS_EXPR)
5912    {
5913      tree op0 = TREE_OPERAND (expr, 0);
5914      tree inner_base, dummy1;
5915      /* Strip NOP_EXPRs here because the C frontends and/or
5916	 folders present us (int *)&x.a + 4B possibly.  */
5917      STRIP_NOPS (op0);
5918      if (extract_array_ref (op0, &inner_base, &dummy1))
5919	{
5920	  *base = inner_base;
5921	  if (dummy1 == NULL_TREE)
5922	    *offset = TREE_OPERAND (expr, 1);
5923	  else
5924	    *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5925				   dummy1, TREE_OPERAND (expr, 1));
5926	  return true;
5927	}
5928    }
5929  /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5930     which we transform into an ADDR_EXPR with appropriate
5931     offset.  For other arguments to the ADDR_EXPR we assume
5932     zero offset and as such do not care about the ADDR_EXPR
5933     type and strip possible nops from it.  */
5934  else if (TREE_CODE (expr) == ADDR_EXPR)
5935    {
5936      tree op0 = TREE_OPERAND (expr, 0);
5937      if (TREE_CODE (op0) == ARRAY_REF)
5938	{
5939	  tree idx = TREE_OPERAND (op0, 1);
5940	  *base = TREE_OPERAND (op0, 0);
5941	  *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5942				 array_ref_element_size (op0));
5943	}
5944      else
5945	{
5946	  /* Handle array-to-pointer decay as &a.  */
5947	  if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5948	    *base = TREE_OPERAND (expr, 0);
5949	  else
5950	    *base = expr;
5951	  *offset = NULL_TREE;
5952	}
5953      return true;
5954    }
5955  /* The next canonical form is a VAR_DECL with POINTER_TYPE.  */
5956  else if (SSA_VAR_P (expr)
5957	   && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5958    {
5959      *base = expr;
5960      *offset = NULL_TREE;
5961      return true;
5962    }
5963
5964  return false;
5965}
5966
5967
5968/* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5969   Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'.  Here
5970   CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5971   expression, and ARG to `a'.  If COND_FIRST_P is nonzero, then the
5972   COND is the first argument to CODE; otherwise (as in the example
5973   given here), it is the second argument.  TYPE is the type of the
5974   original expression.  Return NULL_TREE if no simplification is
5975   possible.  */
5976
5977static tree
5978fold_binary_op_with_conditional_arg (enum tree_code code,
5979				     tree type, tree op0, tree op1,
5980				     tree cond, tree arg, int cond_first_p)
5981{
5982  tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5983  tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5984  tree test, true_value, false_value;
5985  tree lhs = NULL_TREE;
5986  tree rhs = NULL_TREE;
5987
5988  /* This transformation is only worthwhile if we don't have to wrap
5989     arg in a SAVE_EXPR, and the operation can be simplified on at least
5990     one of the branches once its pushed inside the COND_EXPR.  */
5991  if (!TREE_CONSTANT (arg))
5992    return NULL_TREE;
5993
5994  if (TREE_CODE (cond) == COND_EXPR)
5995    {
5996      test = TREE_OPERAND (cond, 0);
5997      true_value = TREE_OPERAND (cond, 1);
5998      false_value = TREE_OPERAND (cond, 2);
5999      /* If this operand throws an expression, then it does not make
6000	 sense to try to perform a logical or arithmetic operation
6001	 involving it.  */
6002      if (VOID_TYPE_P (TREE_TYPE (true_value)))
6003	lhs = true_value;
6004      if (VOID_TYPE_P (TREE_TYPE (false_value)))
6005	rhs = false_value;
6006    }
6007  else
6008    {
6009      tree testtype = TREE_TYPE (cond);
6010      test = cond;
6011      true_value = constant_boolean_node (true, testtype);
6012      false_value = constant_boolean_node (false, testtype);
6013    }
6014
6015  arg = fold_convert (arg_type, arg);
6016  if (lhs == 0)
6017    {
6018      true_value = fold_convert (cond_type, true_value);
6019      if (cond_first_p)
6020	lhs = fold_build2 (code, type, true_value, arg);
6021      else
6022	lhs = fold_build2 (code, type, arg, true_value);
6023    }
6024  if (rhs == 0)
6025    {
6026      false_value = fold_convert (cond_type, false_value);
6027      if (cond_first_p)
6028	rhs = fold_build2 (code, type, false_value, arg);
6029      else
6030	rhs = fold_build2 (code, type, arg, false_value);
6031    }
6032
6033  test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6034  return fold_convert (type, test);
6035}
6036
6037
6038/* Subroutine of fold() that checks for the addition of +/- 0.0.
6039
6040   If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6041   TYPE, X + ADDEND is the same as X.  If NEGATE, return true if X -
6042   ADDEND is the same as X.
6043
6044   X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6045   and finite.  The problematic cases are when X is zero, and its mode
6046   has signed zeros.  In the case of rounding towards -infinity,
6047   X - 0 is not the same as X because 0 - 0 is -0.  In other rounding
6048   modes, X + 0 is not the same as X because -0 + 0 is 0.  */
6049
6050static bool
6051fold_real_zero_addition_p (tree type, tree addend, int negate)
6052{
6053  if (!real_zerop (addend))
6054    return false;
6055
6056  /* Don't allow the fold with -fsignaling-nans.  */
6057  if (HONOR_SNANS (TYPE_MODE (type)))
6058    return false;
6059
6060  /* Allow the fold if zeros aren't signed, or their sign isn't important.  */
6061  if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6062    return true;
6063
6064  /* Treat x + -0 as x - 0 and x - -0 as x + 0.  */
6065  if (TREE_CODE (addend) == REAL_CST
6066      && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6067    negate = !negate;
6068
6069  /* The mode has signed zeros, and we have to honor their sign.
6070     In this situation, there is only one case we can return true for.
6071     X - 0 is the same as X unless rounding towards -infinity is
6072     supported.  */
6073  return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6074}
6075
6076/* Subroutine of fold() that checks comparisons of built-in math
6077   functions against real constants.
6078
6079   FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6080   operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR.  TYPE
6081   is the type of the result and ARG0 and ARG1 are the operands of the
6082   comparison.  ARG1 must be a TREE_REAL_CST.
6083
6084   The function returns the constant folded tree if a simplification
6085   can be made, and NULL_TREE otherwise.  */
6086
6087static tree
6088fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6089		     tree type, tree arg0, tree arg1)
6090{
6091  REAL_VALUE_TYPE c;
6092
6093  if (BUILTIN_SQRT_P (fcode))
6094    {
6095      tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
6096      enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6097
6098      c = TREE_REAL_CST (arg1);
6099      if (REAL_VALUE_NEGATIVE (c))
6100	{
6101	  /* sqrt(x) < y is always false, if y is negative.  */
6102	  if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6103	    return omit_one_operand (type, integer_zero_node, arg);
6104
6105	  /* sqrt(x) > y is always true, if y is negative and we
6106	     don't care about NaNs, i.e. negative values of x.  */
6107	  if (code == NE_EXPR || !HONOR_NANS (mode))
6108	    return omit_one_operand (type, integer_one_node, arg);
6109
6110	  /* sqrt(x) > y is the same as x >= 0, if y is negative.  */
6111	  return fold_build2 (GE_EXPR, type, arg,
6112			      build_real (TREE_TYPE (arg), dconst0));
6113	}
6114      else if (code == GT_EXPR || code == GE_EXPR)
6115	{
6116	  REAL_VALUE_TYPE c2;
6117
6118	  REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6119	  real_convert (&c2, mode, &c2);
6120
6121	  if (REAL_VALUE_ISINF (c2))
6122	    {
6123	      /* sqrt(x) > y is x == +Inf, when y is very large.  */
6124	      if (HONOR_INFINITIES (mode))
6125		return fold_build2 (EQ_EXPR, type, arg,
6126				    build_real (TREE_TYPE (arg), c2));
6127
6128	      /* sqrt(x) > y is always false, when y is very large
6129		 and we don't care about infinities.  */
6130	      return omit_one_operand (type, integer_zero_node, arg);
6131	    }
6132
6133	  /* sqrt(x) > c is the same as x > c*c.  */
6134	  return fold_build2 (code, type, arg,
6135			      build_real (TREE_TYPE (arg), c2));
6136	}
6137      else if (code == LT_EXPR || code == LE_EXPR)
6138	{
6139	  REAL_VALUE_TYPE c2;
6140
6141	  REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6142	  real_convert (&c2, mode, &c2);
6143
6144	  if (REAL_VALUE_ISINF (c2))
6145	    {
6146	      /* sqrt(x) < y is always true, when y is a very large
6147		 value and we don't care about NaNs or Infinities.  */
6148	      if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6149		return omit_one_operand (type, integer_one_node, arg);
6150
6151	      /* sqrt(x) < y is x != +Inf when y is very large and we
6152		 don't care about NaNs.  */
6153	      if (! HONOR_NANS (mode))
6154		return fold_build2 (NE_EXPR, type, arg,
6155				    build_real (TREE_TYPE (arg), c2));
6156
6157	      /* sqrt(x) < y is x >= 0 when y is very large and we
6158		 don't care about Infinities.  */
6159	      if (! HONOR_INFINITIES (mode))
6160		return fold_build2 (GE_EXPR, type, arg,
6161				    build_real (TREE_TYPE (arg), dconst0));
6162
6163	      /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large.  */
6164	      if (lang_hooks.decls.global_bindings_p () != 0
6165		  || CONTAINS_PLACEHOLDER_P (arg))
6166		return NULL_TREE;
6167
6168	      arg = save_expr (arg);
6169	      return fold_build2 (TRUTH_ANDIF_EXPR, type,
6170				  fold_build2 (GE_EXPR, type, arg,
6171					       build_real (TREE_TYPE (arg),
6172							   dconst0)),
6173				  fold_build2 (NE_EXPR, type, arg,
6174					       build_real (TREE_TYPE (arg),
6175							   c2)));
6176	    }
6177
6178	  /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs.  */
6179	  if (! HONOR_NANS (mode))
6180	    return fold_build2 (code, type, arg,
6181				build_real (TREE_TYPE (arg), c2));
6182
6183	  /* sqrt(x) < c is the same as x >= 0 && x < c*c.  */
6184	  if (lang_hooks.decls.global_bindings_p () == 0
6185	      && ! CONTAINS_PLACEHOLDER_P (arg))
6186	    {
6187	      arg = save_expr (arg);
6188	      return fold_build2 (TRUTH_ANDIF_EXPR, type,
6189				  fold_build2 (GE_EXPR, type, arg,
6190					       build_real (TREE_TYPE (arg),
6191							   dconst0)),
6192				  fold_build2 (code, type, arg,
6193					       build_real (TREE_TYPE (arg),
6194							   c2)));
6195	    }
6196	}
6197    }
6198
6199  return NULL_TREE;
6200}
6201
6202/* Subroutine of fold() that optimizes comparisons against Infinities,
6203   either +Inf or -Inf.
6204
6205   CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6206   GE_EXPR or LE_EXPR.  TYPE is the type of the result and ARG0 and ARG1
6207   are the operands of the comparison.  ARG1 must be a TREE_REAL_CST.
6208
6209   The function returns the constant folded tree if a simplification
6210   can be made, and NULL_TREE otherwise.  */
6211
6212static tree
6213fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6214{
6215  enum machine_mode mode;
6216  REAL_VALUE_TYPE max;
6217  tree temp;
6218  bool neg;
6219
6220  mode = TYPE_MODE (TREE_TYPE (arg0));
6221
6222  /* For negative infinity swap the sense of the comparison.  */
6223  neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6224  if (neg)
6225    code = swap_tree_comparison (code);
6226
6227  switch (code)
6228    {
6229    case GT_EXPR:
6230      /* x > +Inf is always false, if with ignore sNANs.  */
6231      if (HONOR_SNANS (mode))
6232        return NULL_TREE;
6233      return omit_one_operand (type, integer_zero_node, arg0);
6234
6235    case LE_EXPR:
6236      /* x <= +Inf is always true, if we don't case about NaNs.  */
6237      if (! HONOR_NANS (mode))
6238	return omit_one_operand (type, integer_one_node, arg0);
6239
6240      /* x <= +Inf is the same as x == x, i.e. isfinite(x).  */
6241      if (lang_hooks.decls.global_bindings_p () == 0
6242	  && ! CONTAINS_PLACEHOLDER_P (arg0))
6243	{
6244	  arg0 = save_expr (arg0);
6245	  return fold_build2 (EQ_EXPR, type, arg0, arg0);
6246	}
6247      break;
6248
6249    case EQ_EXPR:
6250    case GE_EXPR:
6251      /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX.  */
6252      real_maxval (&max, neg, mode);
6253      return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6254			  arg0, build_real (TREE_TYPE (arg0), max));
6255
6256    case LT_EXPR:
6257      /* x < +Inf is always equal to x <= DBL_MAX.  */
6258      real_maxval (&max, neg, mode);
6259      return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6260			  arg0, build_real (TREE_TYPE (arg0), max));
6261
6262    case NE_EXPR:
6263      /* x != +Inf is always equal to !(x > DBL_MAX).  */
6264      real_maxval (&max, neg, mode);
6265      if (! HONOR_NANS (mode))
6266	return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6267			    arg0, build_real (TREE_TYPE (arg0), max));
6268
6269      /* The transformation below creates non-gimple code and thus is
6270	 not appropriate if we are in gimple form.  */
6271      if (in_gimple_form)
6272	return NULL_TREE;
6273
6274      temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6275			  arg0, build_real (TREE_TYPE (arg0), max));
6276      return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6277
6278    default:
6279      break;
6280    }
6281
6282  return NULL_TREE;
6283}
6284
6285/* Subroutine of fold() that optimizes comparisons of a division by
6286   a nonzero integer constant against an integer constant, i.e.
6287   X/C1 op C2.
6288
6289   CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6290   GE_EXPR or LE_EXPR.  TYPE is the type of the result and ARG0 and ARG1
6291   are the operands of the comparison.  ARG1 must be a TREE_REAL_CST.
6292
6293   The function returns the constant folded tree if a simplification
6294   can be made, and NULL_TREE otherwise.  */
6295
6296static tree
6297fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6298{
6299  tree prod, tmp, hi, lo;
6300  tree arg00 = TREE_OPERAND (arg0, 0);
6301  tree arg01 = TREE_OPERAND (arg0, 1);
6302  unsigned HOST_WIDE_INT lpart;
6303  HOST_WIDE_INT hpart;
6304  bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6305  bool neg_overflow;
6306  int overflow;
6307
6308  /* We have to do this the hard way to detect unsigned overflow.
6309     prod = int_const_binop (MULT_EXPR, arg01, arg1, 0);  */
6310  overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6311				   TREE_INT_CST_HIGH (arg01),
6312				   TREE_INT_CST_LOW (arg1),
6313				   TREE_INT_CST_HIGH (arg1),
6314				   &lpart, &hpart, unsigned_p);
6315  prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6316  prod = force_fit_type (prod, -1, overflow, false);
6317  neg_overflow = false;
6318
6319  if (unsigned_p)
6320    {
6321      tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6322      lo = prod;
6323
6324      /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0).  */
6325      overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6326				       TREE_INT_CST_HIGH (prod),
6327				       TREE_INT_CST_LOW (tmp),
6328				       TREE_INT_CST_HIGH (tmp),
6329				       &lpart, &hpart, unsigned_p);
6330      hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6331      hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6332			   TREE_CONSTANT_OVERFLOW (prod));
6333    }
6334  else if (tree_int_cst_sgn (arg01) >= 0)
6335    {
6336      tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6337      switch (tree_int_cst_sgn (arg1))
6338	{
6339	case -1:
6340	  neg_overflow = true;
6341	  lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6342	  hi = prod;
6343	  break;
6344
6345	case  0:
6346	  lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6347	  hi = tmp;
6348	  break;
6349
6350	case  1:
6351          hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6352	  lo = prod;
6353	  break;
6354
6355	default:
6356	  gcc_unreachable ();
6357	}
6358    }
6359  else
6360    {
6361      /* A negative divisor reverses the relational operators.  */
6362      code = swap_tree_comparison (code);
6363
6364      tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6365      switch (tree_int_cst_sgn (arg1))
6366	{
6367	case -1:
6368	  hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6369	  lo = prod;
6370	  break;
6371
6372	case  0:
6373	  hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6374	  lo = tmp;
6375	  break;
6376
6377	case  1:
6378	  neg_overflow = true;
6379	  lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6380	  hi = prod;
6381	  break;
6382
6383	default:
6384	  gcc_unreachable ();
6385	}
6386    }
6387
6388  switch (code)
6389    {
6390    case EQ_EXPR:
6391      if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6392	return omit_one_operand (type, integer_zero_node, arg00);
6393      if (TREE_OVERFLOW (hi))
6394	return fold_build2 (GE_EXPR, type, arg00, lo);
6395      if (TREE_OVERFLOW (lo))
6396	return fold_build2 (LE_EXPR, type, arg00, hi);
6397      return build_range_check (type, arg00, 1, lo, hi);
6398
6399    case NE_EXPR:
6400      if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6401	return omit_one_operand (type, integer_one_node, arg00);
6402      if (TREE_OVERFLOW (hi))
6403	return fold_build2 (LT_EXPR, type, arg00, lo);
6404      if (TREE_OVERFLOW (lo))
6405	return fold_build2 (GT_EXPR, type, arg00, hi);
6406      return build_range_check (type, arg00, 0, lo, hi);
6407
6408    case LT_EXPR:
6409      if (TREE_OVERFLOW (lo))
6410	{
6411	  tmp = neg_overflow ? integer_zero_node : integer_one_node;
6412	  return omit_one_operand (type, tmp, arg00);
6413	}
6414      return fold_build2 (LT_EXPR, type, arg00, lo);
6415
6416    case LE_EXPR:
6417      if (TREE_OVERFLOW (hi))
6418	{
6419	  tmp = neg_overflow ? integer_zero_node : integer_one_node;
6420	  return omit_one_operand (type, tmp, arg00);
6421	}
6422      return fold_build2 (LE_EXPR, type, arg00, hi);
6423
6424    case GT_EXPR:
6425      if (TREE_OVERFLOW (hi))
6426	{
6427	  tmp = neg_overflow ? integer_one_node : integer_zero_node;
6428	  return omit_one_operand (type, tmp, arg00);
6429	}
6430      return fold_build2 (GT_EXPR, type, arg00, hi);
6431
6432    case GE_EXPR:
6433      if (TREE_OVERFLOW (lo))
6434	{
6435	  tmp = neg_overflow ? integer_one_node : integer_zero_node;
6436	  return omit_one_operand (type, tmp, arg00);
6437	}
6438      return fold_build2 (GE_EXPR, type, arg00, lo);
6439
6440    default:
6441      break;
6442    }
6443
6444  return NULL_TREE;
6445}
6446
6447
6448/* If CODE with arguments ARG0 and ARG1 represents a single bit
6449   equality/inequality test, then return a simplified form of the test
6450   using a sign testing.  Otherwise return NULL.  TYPE is the desired
6451   result type.  */
6452
6453static tree
6454fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6455				     tree result_type)
6456{
6457  /* If this is testing a single bit, we can optimize the test.  */
6458  if ((code == NE_EXPR || code == EQ_EXPR)
6459      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6460      && integer_pow2p (TREE_OPERAND (arg0, 1)))
6461    {
6462      /* If we have (A & C) != 0 where C is the sign bit of A, convert
6463	 this into A < 0.  Similarly for (A & C) == 0 into A >= 0.  */
6464      tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6465
6466      if (arg00 != NULL_TREE
6467	  /* This is only a win if casting to a signed type is cheap,
6468	     i.e. when arg00's type is not a partial mode.  */
6469	  && TYPE_PRECISION (TREE_TYPE (arg00))
6470	     == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6471	{
6472	  tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6473	  return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6474			      result_type, fold_convert (stype, arg00),
6475			      build_int_cst (stype, 0));
6476	}
6477    }
6478
6479  return NULL_TREE;
6480}
6481
6482/* If CODE with arguments ARG0 and ARG1 represents a single bit
6483   equality/inequality test, then return a simplified form of
6484   the test using shifts and logical operations.  Otherwise return
6485   NULL.  TYPE is the desired result type.  */
6486
6487tree
6488fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6489		      tree result_type)
6490{
6491  /* If this is testing a single bit, we can optimize the test.  */
6492  if ((code == NE_EXPR || code == EQ_EXPR)
6493      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6494      && integer_pow2p (TREE_OPERAND (arg0, 1)))
6495    {
6496      tree inner = TREE_OPERAND (arg0, 0);
6497      tree type = TREE_TYPE (arg0);
6498      int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6499      enum machine_mode operand_mode = TYPE_MODE (type);
6500      int ops_unsigned;
6501      tree signed_type, unsigned_type, intermediate_type;
6502      tree tem;
6503
6504      /* First, see if we can fold the single bit test into a sign-bit
6505	 test.  */
6506      tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6507						 result_type);
6508      if (tem)
6509	return tem;
6510
6511      /* Otherwise we have (A & C) != 0 where C is a single bit,
6512	 convert that into ((A >> C2) & 1).  Where C2 = log2(C).
6513	 Similarly for (A & C) == 0.  */
6514
6515      /* If INNER is a right shift of a constant and it plus BITNUM does
6516	 not overflow, adjust BITNUM and INNER.  */
6517      if (TREE_CODE (inner) == RSHIFT_EXPR
6518	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6519	  && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6520	  && bitnum < TYPE_PRECISION (type)
6521	  && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6522				   bitnum - TYPE_PRECISION (type)))
6523	{
6524	  bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6525	  inner = TREE_OPERAND (inner, 0);
6526	}
6527
6528      /* If we are going to be able to omit the AND below, we must do our
6529	 operations as unsigned.  If we must use the AND, we have a choice.
6530	 Normally unsigned is faster, but for some machines signed is.  */
6531#ifdef LOAD_EXTEND_OP
6532      ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6533		      && !flag_syntax_only) ? 0 : 1;
6534#else
6535      ops_unsigned = 1;
6536#endif
6537
6538      signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6539      unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6540      intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6541      inner = fold_convert (intermediate_type, inner);
6542
6543      if (bitnum != 0)
6544	inner = build2 (RSHIFT_EXPR, intermediate_type,
6545			inner, size_int (bitnum));
6546
6547      if (code == EQ_EXPR)
6548	inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6549			     inner, integer_one_node);
6550
6551      /* Put the AND last so it can combine with more things.  */
6552      inner = build2 (BIT_AND_EXPR, intermediate_type,
6553		      inner, integer_one_node);
6554
6555      /* Make sure to return the proper type.  */
6556      inner = fold_convert (result_type, inner);
6557
6558      return inner;
6559    }
6560  return NULL_TREE;
6561}
6562
6563/* Check whether we are allowed to reorder operands arg0 and arg1,
6564   such that the evaluation of arg1 occurs before arg0.  */
6565
6566static bool
6567reorder_operands_p (tree arg0, tree arg1)
6568{
6569  if (! flag_evaluation_order)
6570      return true;
6571  if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6572    return true;
6573  return ! TREE_SIDE_EFFECTS (arg0)
6574	 && ! TREE_SIDE_EFFECTS (arg1);
6575}
6576
6577/* Test whether it is preferable two swap two operands, ARG0 and
6578   ARG1, for example because ARG0 is an integer constant and ARG1
6579   isn't.  If REORDER is true, only recommend swapping if we can
6580   evaluate the operands in reverse order.  */
6581
6582bool
6583tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6584{
6585  STRIP_SIGN_NOPS (arg0);
6586  STRIP_SIGN_NOPS (arg1);
6587
6588  if (TREE_CODE (arg1) == INTEGER_CST)
6589    return 0;
6590  if (TREE_CODE (arg0) == INTEGER_CST)
6591    return 1;
6592
6593  if (TREE_CODE (arg1) == REAL_CST)
6594    return 0;
6595  if (TREE_CODE (arg0) == REAL_CST)
6596    return 1;
6597
6598  if (TREE_CODE (arg1) == COMPLEX_CST)
6599    return 0;
6600  if (TREE_CODE (arg0) == COMPLEX_CST)
6601    return 1;
6602
6603  if (TREE_CONSTANT (arg1))
6604    return 0;
6605  if (TREE_CONSTANT (arg0))
6606    return 1;
6607
6608  if (optimize_size)
6609    return 0;
6610
6611  if (reorder && flag_evaluation_order
6612      && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6613    return 0;
6614
6615  if (DECL_P (arg1))
6616    return 0;
6617  if (DECL_P (arg0))
6618    return 1;
6619
6620  /* It is preferable to swap two SSA_NAME to ensure a canonical form
6621     for commutative and comparison operators.  Ensuring a canonical
6622     form allows the optimizers to find additional redundancies without
6623     having to explicitly check for both orderings.  */
6624  if (TREE_CODE (arg0) == SSA_NAME
6625      && TREE_CODE (arg1) == SSA_NAME
6626      && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6627    return 1;
6628
6629  return 0;
6630}
6631
6632/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6633   ARG0 is extended to a wider type.  */
6634
6635static tree
6636fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6637{
6638  tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6639  tree arg1_unw;
6640  tree shorter_type, outer_type;
6641  tree min, max;
6642  bool above, below;
6643
6644  if (arg0_unw == arg0)
6645    return NULL_TREE;
6646  shorter_type = TREE_TYPE (arg0_unw);
6647
6648#ifdef HAVE_canonicalize_funcptr_for_compare
6649  /* Disable this optimization if we're casting a function pointer
6650     type on targets that require function pointer canonicalization.  */
6651  if (HAVE_canonicalize_funcptr_for_compare
6652      && TREE_CODE (shorter_type) == POINTER_TYPE
6653      && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6654    return NULL_TREE;
6655#endif
6656
6657  if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6658    return NULL_TREE;
6659
6660  arg1_unw = get_unwidened (arg1, NULL_TREE);
6661
6662  /* If possible, express the comparison in the shorter mode.  */
6663  if ((code == EQ_EXPR || code == NE_EXPR
6664       || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6665      && (TREE_TYPE (arg1_unw) == shorter_type
6666	  || (TYPE_PRECISION (shorter_type)
6667	      >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6668	  || (TREE_CODE (arg1_unw) == INTEGER_CST
6669	      && (TREE_CODE (shorter_type) == INTEGER_TYPE
6670		  || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6671	      && int_fits_type_p (arg1_unw, shorter_type))))
6672    return fold_build2 (code, type, arg0_unw,
6673		       fold_convert (shorter_type, arg1_unw));
6674
6675  if (TREE_CODE (arg1_unw) != INTEGER_CST
6676      || TREE_CODE (shorter_type) != INTEGER_TYPE
6677      || !int_fits_type_p (arg1_unw, shorter_type))
6678    return NULL_TREE;
6679
6680  /* If we are comparing with the integer that does not fit into the range
6681     of the shorter type, the result is known.  */
6682  outer_type = TREE_TYPE (arg1_unw);
6683  min = lower_bound_in_type (outer_type, shorter_type);
6684  max = upper_bound_in_type (outer_type, shorter_type);
6685
6686  above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6687						   max, arg1_unw));
6688  below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6689						   arg1_unw, min));
6690
6691  switch (code)
6692    {
6693    case EQ_EXPR:
6694      if (above || below)
6695	return omit_one_operand (type, integer_zero_node, arg0);
6696      break;
6697
6698    case NE_EXPR:
6699      if (above || below)
6700	return omit_one_operand (type, integer_one_node, arg0);
6701      break;
6702
6703    case LT_EXPR:
6704    case LE_EXPR:
6705      if (above)
6706	return omit_one_operand (type, integer_one_node, arg0);
6707      else if (below)
6708	return omit_one_operand (type, integer_zero_node, arg0);
6709
6710    case GT_EXPR:
6711    case GE_EXPR:
6712      if (above)
6713	return omit_one_operand (type, integer_zero_node, arg0);
6714      else if (below)
6715	return omit_one_operand (type, integer_one_node, arg0);
6716
6717    default:
6718      break;
6719    }
6720
6721  return NULL_TREE;
6722}
6723
6724/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6725   ARG0 just the signedness is changed.  */
6726
6727static tree
6728fold_sign_changed_comparison (enum tree_code code, tree type,
6729			      tree arg0, tree arg1)
6730{
6731  tree arg0_inner, tmp;
6732  tree inner_type, outer_type;
6733
6734  if (TREE_CODE (arg0) != NOP_EXPR
6735      && TREE_CODE (arg0) != CONVERT_EXPR)
6736    return NULL_TREE;
6737
6738  outer_type = TREE_TYPE (arg0);
6739  arg0_inner = TREE_OPERAND (arg0, 0);
6740  inner_type = TREE_TYPE (arg0_inner);
6741
6742#ifdef HAVE_canonicalize_funcptr_for_compare
6743  /* Disable this optimization if we're casting a function pointer
6744     type on targets that require function pointer canonicalization.  */
6745  if (HAVE_canonicalize_funcptr_for_compare
6746      && TREE_CODE (inner_type) == POINTER_TYPE
6747      && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6748    return NULL_TREE;
6749#endif
6750
6751  if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6752    return NULL_TREE;
6753
6754  if (TREE_CODE (arg1) != INTEGER_CST
6755      && !((TREE_CODE (arg1) == NOP_EXPR
6756	    || TREE_CODE (arg1) == CONVERT_EXPR)
6757	   && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6758    return NULL_TREE;
6759
6760  if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6761      && code != NE_EXPR
6762      && code != EQ_EXPR)
6763    return NULL_TREE;
6764
6765  if (TREE_CODE (arg1) == INTEGER_CST)
6766    {
6767      tmp = build_int_cst_wide (inner_type,
6768				TREE_INT_CST_LOW (arg1),
6769				TREE_INT_CST_HIGH (arg1));
6770      arg1 = force_fit_type (tmp, 0,
6771			     TREE_OVERFLOW (arg1),
6772			     TREE_CONSTANT_OVERFLOW (arg1));
6773    }
6774  else
6775    arg1 = fold_convert (inner_type, arg1);
6776
6777  return fold_build2 (code, type, arg0_inner, arg1);
6778}
6779
6780/* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6781   step of the array.  Reconstructs s and delta in the case of s * delta
6782   being an integer constant (and thus already folded).
6783   ADDR is the address. MULT is the multiplicative expression.
6784   If the function succeeds, the new address expression is returned.  Otherwise
6785   NULL_TREE is returned.  */
6786
6787static tree
6788try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6789{
6790  tree s, delta, step;
6791  tree ref = TREE_OPERAND (addr, 0), pref;
6792  tree ret, pos;
6793  tree itype;
6794
6795  /* Canonicalize op1 into a possibly non-constant delta
6796     and an INTEGER_CST s.  */
6797  if (TREE_CODE (op1) == MULT_EXPR)
6798    {
6799      tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6800
6801      STRIP_NOPS (arg0);
6802      STRIP_NOPS (arg1);
6803
6804      if (TREE_CODE (arg0) == INTEGER_CST)
6805        {
6806          s = arg0;
6807          delta = arg1;
6808        }
6809      else if (TREE_CODE (arg1) == INTEGER_CST)
6810        {
6811          s = arg1;
6812          delta = arg0;
6813        }
6814      else
6815        return NULL_TREE;
6816    }
6817  else if (TREE_CODE (op1) == INTEGER_CST)
6818    {
6819      delta = op1;
6820      s = NULL_TREE;
6821    }
6822  else
6823    {
6824      /* Simulate we are delta * 1.  */
6825      delta = op1;
6826      s = integer_one_node;
6827    }
6828
6829  for (;; ref = TREE_OPERAND (ref, 0))
6830    {
6831      if (TREE_CODE (ref) == ARRAY_REF)
6832	{
6833	  itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6834	  if (! itype)
6835	    continue;
6836
6837	  step = array_ref_element_size (ref);
6838	  if (TREE_CODE (step) != INTEGER_CST)
6839	    continue;
6840
6841	  if (s)
6842	    {
6843	      if (! tree_int_cst_equal (step, s))
6844                continue;
6845	    }
6846	  else
6847	    {
6848	      /* Try if delta is a multiple of step.  */
6849	      tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6850	      if (! tmp)
6851		continue;
6852	      delta = tmp;
6853	    }
6854
6855	  break;
6856	}
6857
6858      if (!handled_component_p (ref))
6859	return NULL_TREE;
6860    }
6861
6862  /* We found the suitable array reference.  So copy everything up to it,
6863     and replace the index.  */
6864
6865  pref = TREE_OPERAND (addr, 0);
6866  ret = copy_node (pref);
6867  pos = ret;
6868
6869  while (pref != ref)
6870    {
6871      pref = TREE_OPERAND (pref, 0);
6872      TREE_OPERAND (pos, 0) = copy_node (pref);
6873      pos = TREE_OPERAND (pos, 0);
6874    }
6875
6876  TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6877				       fold_convert (itype,
6878						     TREE_OPERAND (pos, 1)),
6879				       fold_convert (itype, delta));
6880
6881  return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6882}
6883
6884
6885/* Fold A < X && A + 1 > Y to A < X && A >= Y.  Normally A + 1 > Y
6886   means A >= Y && A != MAX, but in this case we know that
6887   A < X <= MAX.  INEQ is A + 1 > Y, BOUND is A < X.  */
6888
6889static tree
6890fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6891{
6892  tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6893
6894  if (TREE_CODE (bound) == LT_EXPR)
6895    a = TREE_OPERAND (bound, 0);
6896  else if (TREE_CODE (bound) == GT_EXPR)
6897    a = TREE_OPERAND (bound, 1);
6898  else
6899    return NULL_TREE;
6900
6901  typea = TREE_TYPE (a);
6902  if (!INTEGRAL_TYPE_P (typea)
6903      && !POINTER_TYPE_P (typea))
6904    return NULL_TREE;
6905
6906  if (TREE_CODE (ineq) == LT_EXPR)
6907    {
6908      a1 = TREE_OPERAND (ineq, 1);
6909      y = TREE_OPERAND (ineq, 0);
6910    }
6911  else if (TREE_CODE (ineq) == GT_EXPR)
6912    {
6913      a1 = TREE_OPERAND (ineq, 0);
6914      y = TREE_OPERAND (ineq, 1);
6915    }
6916  else
6917    return NULL_TREE;
6918
6919  if (TREE_TYPE (a1) != typea)
6920    return NULL_TREE;
6921
6922  diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6923  if (!integer_onep (diff))
6924    return NULL_TREE;
6925
6926  return fold_build2 (GE_EXPR, type, a, y);
6927}
6928
6929/* Fold a sum or difference of at least one multiplication.
6930   Returns the folded tree or NULL if no simplification could be made.  */
6931
6932static tree
6933fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6934{
6935  tree arg00, arg01, arg10, arg11;
6936  tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6937
6938  /* (A * C) +- (B * C) -> (A+-B) * C.
6939     (A * C) +- A -> A * (C+-1).
6940     We are most concerned about the case where C is a constant,
6941     but other combinations show up during loop reduction.  Since
6942     it is not difficult, try all four possibilities.  */
6943
6944  if (TREE_CODE (arg0) == MULT_EXPR)
6945    {
6946      arg00 = TREE_OPERAND (arg0, 0);
6947      arg01 = TREE_OPERAND (arg0, 1);
6948    }
6949  else
6950    {
6951      arg00 = arg0;
6952      arg01 = build_one_cst (type);
6953    }
6954  if (TREE_CODE (arg1) == MULT_EXPR)
6955    {
6956      arg10 = TREE_OPERAND (arg1, 0);
6957      arg11 = TREE_OPERAND (arg1, 1);
6958    }
6959  else
6960    {
6961      arg10 = arg1;
6962      arg11 = build_one_cst (type);
6963    }
6964  same = NULL_TREE;
6965
6966  if (operand_equal_p (arg01, arg11, 0))
6967    same = arg01, alt0 = arg00, alt1 = arg10;
6968  else if (operand_equal_p (arg00, arg10, 0))
6969    same = arg00, alt0 = arg01, alt1 = arg11;
6970  else if (operand_equal_p (arg00, arg11, 0))
6971    same = arg00, alt0 = arg01, alt1 = arg10;
6972  else if (operand_equal_p (arg01, arg10, 0))
6973    same = arg01, alt0 = arg00, alt1 = arg11;
6974
6975  /* No identical multiplicands; see if we can find a common
6976     power-of-two factor in non-power-of-two multiplies.  This
6977     can help in multi-dimensional array access.  */
6978  else if (host_integerp (arg01, 0)
6979	   && host_integerp (arg11, 0))
6980    {
6981      HOST_WIDE_INT int01, int11, tmp;
6982      bool swap = false;
6983      tree maybe_same;
6984      int01 = TREE_INT_CST_LOW (arg01);
6985      int11 = TREE_INT_CST_LOW (arg11);
6986
6987      /* Move min of absolute values to int11.  */
6988      if ((int01 >= 0 ? int01 : -int01)
6989	  < (int11 >= 0 ? int11 : -int11))
6990        {
6991	  tmp = int01, int01 = int11, int11 = tmp;
6992	  alt0 = arg00, arg00 = arg10, arg10 = alt0;
6993	  maybe_same = arg01;
6994	  swap = true;
6995	}
6996      else
6997	maybe_same = arg11;
6998
6999      if (exact_log2 (int11) > 0 && int01 % int11 == 0)
7000        {
7001	  alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7002			      build_int_cst (TREE_TYPE (arg00),
7003					     int01 / int11));
7004	  alt1 = arg10;
7005	  same = maybe_same;
7006	  if (swap)
7007	    maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7008	}
7009    }
7010
7011  if (same)
7012    return fold_build2 (MULT_EXPR, type,
7013			fold_build2 (code, type,
7014				     fold_convert (type, alt0),
7015				     fold_convert (type, alt1)),
7016			fold_convert (type, same));
7017
7018  return NULL_TREE;
7019}
7020
7021/* Subroutine of native_encode_expr.  Encode the INTEGER_CST
7022   specified by EXPR into the buffer PTR of length LEN bytes.
7023   Return the number of bytes placed in the buffer, or zero
7024   upon failure.  */
7025
7026static int
7027native_encode_int (tree expr, unsigned char *ptr, int len)
7028{
7029  tree type = TREE_TYPE (expr);
7030  int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7031  int byte, offset, word, words;
7032  unsigned char value;
7033
7034  if (total_bytes > len)
7035    return 0;
7036  words = total_bytes / UNITS_PER_WORD;
7037
7038  for (byte = 0; byte < total_bytes; byte++)
7039    {
7040      int bitpos = byte * BITS_PER_UNIT;
7041      if (bitpos < HOST_BITS_PER_WIDE_INT)
7042	value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7043      else
7044	value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7045				 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7046
7047      if (total_bytes > UNITS_PER_WORD)
7048	{
7049	  word = byte / UNITS_PER_WORD;
7050	  if (WORDS_BIG_ENDIAN)
7051	    word = (words - 1) - word;
7052	  offset = word * UNITS_PER_WORD;
7053	  if (BYTES_BIG_ENDIAN)
7054	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7055	  else
7056	    offset += byte % UNITS_PER_WORD;
7057	}
7058      else
7059	offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7060      ptr[offset] = value;
7061    }
7062  return total_bytes;
7063}
7064
7065
7066/* Subroutine of native_encode_expr.  Encode the REAL_CST
7067   specified by EXPR into the buffer PTR of length LEN bytes.
7068   Return the number of bytes placed in the buffer, or zero
7069   upon failure.  */
7070
7071static int
7072native_encode_real (tree expr, unsigned char *ptr, int len)
7073{
7074  tree type = TREE_TYPE (expr);
7075  int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7076  int byte, offset, word, words, bitpos;
7077  unsigned char value;
7078
7079  /* There are always 32 bits in each long, no matter the size of
7080     the hosts long.  We handle floating point representations with
7081     up to 192 bits.  */
7082  long tmp[6];
7083
7084  if (total_bytes > len)
7085    return 0;
7086  words = 32 / UNITS_PER_WORD;
7087
7088  real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7089
7090  for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7091       bitpos += BITS_PER_UNIT)
7092    {
7093      byte = (bitpos / BITS_PER_UNIT) & 3;
7094      value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7095
7096      if (UNITS_PER_WORD < 4)
7097	{
7098	  word = byte / UNITS_PER_WORD;
7099	  if (WORDS_BIG_ENDIAN)
7100	    word = (words - 1) - word;
7101	  offset = word * UNITS_PER_WORD;
7102	  if (BYTES_BIG_ENDIAN)
7103	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7104	  else
7105	    offset += byte % UNITS_PER_WORD;
7106	}
7107      else
7108	offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7109      ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7110    }
7111  return total_bytes;
7112}
7113
7114/* Subroutine of native_encode_expr.  Encode the COMPLEX_CST
7115   specified by EXPR into the buffer PTR of length LEN bytes.
7116   Return the number of bytes placed in the buffer, or zero
7117   upon failure.  */
7118
7119static int
7120native_encode_complex (tree expr, unsigned char *ptr, int len)
7121{
7122  int rsize, isize;
7123  tree part;
7124
7125  part = TREE_REALPART (expr);
7126  rsize = native_encode_expr (part, ptr, len);
7127  if (rsize == 0)
7128    return 0;
7129  part = TREE_IMAGPART (expr);
7130  isize = native_encode_expr (part, ptr+rsize, len-rsize);
7131  if (isize != rsize)
7132    return 0;
7133  return rsize + isize;
7134}
7135
7136
7137/* Subroutine of native_encode_expr.  Encode the VECTOR_CST
7138   specified by EXPR into the buffer PTR of length LEN bytes.
7139   Return the number of bytes placed in the buffer, or zero
7140   upon failure.  */
7141
7142static int
7143native_encode_vector (tree expr, unsigned char *ptr, int len)
7144{
7145  int i, size, offset, count;
7146  tree itype, elem, elements;
7147
7148  offset = 0;
7149  elements = TREE_VECTOR_CST_ELTS (expr);
7150  count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7151  itype = TREE_TYPE (TREE_TYPE (expr));
7152  size = GET_MODE_SIZE (TYPE_MODE (itype));
7153  for (i = 0; i < count; i++)
7154    {
7155      if (elements)
7156	{
7157	  elem = TREE_VALUE (elements);
7158	  elements = TREE_CHAIN (elements);
7159	}
7160      else
7161	elem = NULL_TREE;
7162
7163      if (elem)
7164	{
7165	  if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7166	    return 0;
7167	}
7168      else
7169	{
7170	  if (offset + size > len)
7171	    return 0;
7172	  memset (ptr+offset, 0, size);
7173	}
7174      offset += size;
7175    }
7176  return offset;
7177}
7178
7179
7180/* Subroutine of fold_view_convert_expr.  Encode the INTEGER_CST,
7181   REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7182   buffer PTR of length LEN bytes.  Return the number of bytes
7183   placed in the buffer, or zero upon failure.  */
7184
7185static int
7186native_encode_expr (tree expr, unsigned char *ptr, int len)
7187{
7188  switch (TREE_CODE (expr))
7189    {
7190    case INTEGER_CST:
7191      return native_encode_int (expr, ptr, len);
7192
7193    case REAL_CST:
7194      return native_encode_real (expr, ptr, len);
7195
7196    case COMPLEX_CST:
7197      return native_encode_complex (expr, ptr, len);
7198
7199    case VECTOR_CST:
7200      return native_encode_vector (expr, ptr, len);
7201
7202    default:
7203      return 0;
7204    }
7205}
7206
7207
7208/* Subroutine of native_interpret_expr.  Interpret the contents of
7209   the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7210   If the buffer cannot be interpreted, return NULL_TREE.  */
7211
7212static tree
7213native_interpret_int (tree type, unsigned char *ptr, int len)
7214{
7215  int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7216  int byte, offset, word, words;
7217  unsigned char value;
7218  unsigned int HOST_WIDE_INT lo = 0;
7219  HOST_WIDE_INT hi = 0;
7220
7221  if (total_bytes > len)
7222    return NULL_TREE;
7223  if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7224    return NULL_TREE;
7225  words = total_bytes / UNITS_PER_WORD;
7226
7227  for (byte = 0; byte < total_bytes; byte++)
7228    {
7229      int bitpos = byte * BITS_PER_UNIT;
7230      if (total_bytes > UNITS_PER_WORD)
7231	{
7232	  word = byte / UNITS_PER_WORD;
7233	  if (WORDS_BIG_ENDIAN)
7234	    word = (words - 1) - word;
7235	  offset = word * UNITS_PER_WORD;
7236	  if (BYTES_BIG_ENDIAN)
7237	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7238	  else
7239	    offset += byte % UNITS_PER_WORD;
7240	}
7241      else
7242	offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7243      value = ptr[offset];
7244
7245      if (bitpos < HOST_BITS_PER_WIDE_INT)
7246	lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7247      else
7248	hi |= (unsigned HOST_WIDE_INT) value
7249	      << (bitpos - HOST_BITS_PER_WIDE_INT);
7250    }
7251
7252  return force_fit_type (build_int_cst_wide (type, lo, hi),
7253			 0, false, false);
7254}
7255
7256
7257/* Subroutine of native_interpret_expr.  Interpret the contents of
7258   the buffer PTR of length LEN as a REAL_CST of type TYPE.
7259   If the buffer cannot be interpreted, return NULL_TREE.  */
7260
7261static tree
7262native_interpret_real (tree type, unsigned char *ptr, int len)
7263{
7264  enum machine_mode mode = TYPE_MODE (type);
7265  int total_bytes = GET_MODE_SIZE (mode);
7266  int byte, offset, word, words, bitpos;
7267  unsigned char value;
7268  /* There are always 32 bits in each long, no matter the size of
7269     the hosts long.  We handle floating point representations with
7270     up to 192 bits.  */
7271  REAL_VALUE_TYPE r;
7272  long tmp[6];
7273
7274  total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7275  if (total_bytes > len || total_bytes > 24)
7276    return NULL_TREE;
7277  words = 32 / UNITS_PER_WORD;
7278
7279  memset (tmp, 0, sizeof (tmp));
7280  for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7281       bitpos += BITS_PER_UNIT)
7282    {
7283      byte = (bitpos / BITS_PER_UNIT) & 3;
7284      if (UNITS_PER_WORD < 4)
7285	{
7286	  word = byte / UNITS_PER_WORD;
7287	  if (WORDS_BIG_ENDIAN)
7288	    word = (words - 1) - word;
7289	  offset = word * UNITS_PER_WORD;
7290	  if (BYTES_BIG_ENDIAN)
7291	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7292	  else
7293	    offset += byte % UNITS_PER_WORD;
7294	}
7295      else
7296	offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7297      value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7298
7299      tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7300    }
7301
7302  real_from_target (&r, tmp, mode);
7303  return build_real (type, r);
7304}
7305
7306
7307/* Subroutine of native_interpret_expr.  Interpret the contents of
7308   the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7309   If the buffer cannot be interpreted, return NULL_TREE.  */
7310
7311static tree
7312native_interpret_complex (tree type, unsigned char *ptr, int len)
7313{
7314  tree etype, rpart, ipart;
7315  int size;
7316
7317  etype = TREE_TYPE (type);
7318  size = GET_MODE_SIZE (TYPE_MODE (etype));
7319  if (size * 2 > len)
7320    return NULL_TREE;
7321  rpart = native_interpret_expr (etype, ptr, size);
7322  if (!rpart)
7323    return NULL_TREE;
7324  ipart = native_interpret_expr (etype, ptr+size, size);
7325  if (!ipart)
7326    return NULL_TREE;
7327  return build_complex (type, rpart, ipart);
7328}
7329
7330
7331/* Subroutine of native_interpret_expr.  Interpret the contents of
7332   the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7333   If the buffer cannot be interpreted, return NULL_TREE.  */
7334
7335static tree
7336native_interpret_vector (tree type, unsigned char *ptr, int len)
7337{
7338  tree etype, elem, elements;
7339  int i, size, count;
7340
7341  etype = TREE_TYPE (type);
7342  size = GET_MODE_SIZE (TYPE_MODE (etype));
7343  count = TYPE_VECTOR_SUBPARTS (type);
7344  if (size * count > len)
7345    return NULL_TREE;
7346
7347  elements = NULL_TREE;
7348  for (i = count - 1; i >= 0; i--)
7349    {
7350      elem = native_interpret_expr (etype, ptr+(i*size), size);
7351      if (!elem)
7352	return NULL_TREE;
7353      elements = tree_cons (NULL_TREE, elem, elements);
7354    }
7355  return build_vector (type, elements);
7356}
7357
7358
7359/* Subroutine of fold_view_convert_expr.  Interpret the contents of
7360   the buffer PTR of length LEN as a constant of type TYPE.  For
7361   INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7362   we return a REAL_CST, etc...  If the buffer cannot be interpreted,
7363   return NULL_TREE.  */
7364
7365static tree
7366native_interpret_expr (tree type, unsigned char *ptr, int len)
7367{
7368  switch (TREE_CODE (type))
7369    {
7370    case INTEGER_TYPE:
7371    case ENUMERAL_TYPE:
7372    case BOOLEAN_TYPE:
7373      return native_interpret_int (type, ptr, len);
7374
7375    case REAL_TYPE:
7376      return native_interpret_real (type, ptr, len);
7377
7378    case COMPLEX_TYPE:
7379      return native_interpret_complex (type, ptr, len);
7380
7381    case VECTOR_TYPE:
7382      return native_interpret_vector (type, ptr, len);
7383
7384    default:
7385      return NULL_TREE;
7386    }
7387}
7388
7389
7390/* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7391   TYPE at compile-time.  If we're unable to perform the conversion
7392   return NULL_TREE.  */
7393
7394static tree
7395fold_view_convert_expr (tree type, tree expr)
7396{
7397  /* We support up to 512-bit values (for V8DFmode).  */
7398  unsigned char buffer[64];
7399  int len;
7400
7401  /* Check that the host and target are sane.  */
7402  if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7403    return NULL_TREE;
7404
7405  len = native_encode_expr (expr, buffer, sizeof (buffer));
7406  if (len == 0)
7407    return NULL_TREE;
7408
7409  return native_interpret_expr (type, buffer, len);
7410}
7411
7412
7413/* Fold a unary expression of code CODE and type TYPE with operand
7414   OP0.  Return the folded expression if folding is successful.
7415   Otherwise, return NULL_TREE.  */
7416
7417tree
7418fold_unary (enum tree_code code, tree type, tree op0)
7419{
7420  tree tem;
7421  tree arg0;
7422  enum tree_code_class kind = TREE_CODE_CLASS (code);
7423
7424  gcc_assert (IS_EXPR_CODE_CLASS (kind)
7425	      && TREE_CODE_LENGTH (code) == 1);
7426
7427  arg0 = op0;
7428  if (arg0)
7429    {
7430      if (code == NOP_EXPR || code == CONVERT_EXPR
7431	  || code == FLOAT_EXPR || code == ABS_EXPR)
7432	{
7433	  /* Don't use STRIP_NOPS, because signedness of argument type
7434	     matters.  */
7435	  STRIP_SIGN_NOPS (arg0);
7436	}
7437      else
7438	{
7439	  /* Strip any conversions that don't change the mode.  This
7440	     is safe for every expression, except for a comparison
7441	     expression because its signedness is derived from its
7442	     operands.
7443
7444	     Note that this is done as an internal manipulation within
7445	     the constant folder, in order to find the simplest
7446	     representation of the arguments so that their form can be
7447	     studied.  In any cases, the appropriate type conversions
7448	     should be put back in the tree that will get out of the
7449	     constant folder.  */
7450	  STRIP_NOPS (arg0);
7451	}
7452    }
7453
7454  if (TREE_CODE_CLASS (code) == tcc_unary)
7455    {
7456      if (TREE_CODE (arg0) == COMPOUND_EXPR)
7457	return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7458		       fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7459      else if (TREE_CODE (arg0) == COND_EXPR)
7460	{
7461	  tree arg01 = TREE_OPERAND (arg0, 1);
7462	  tree arg02 = TREE_OPERAND (arg0, 2);
7463	  if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7464	    arg01 = fold_build1 (code, type, arg01);
7465	  if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7466	    arg02 = fold_build1 (code, type, arg02);
7467	  tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7468			     arg01, arg02);
7469
7470	  /* If this was a conversion, and all we did was to move into
7471	     inside the COND_EXPR, bring it back out.  But leave it if
7472	     it is a conversion from integer to integer and the
7473	     result precision is no wider than a word since such a
7474	     conversion is cheap and may be optimized away by combine,
7475	     while it couldn't if it were outside the COND_EXPR.  Then return
7476	     so we don't get into an infinite recursion loop taking the
7477	     conversion out and then back in.  */
7478
7479	  if ((code == NOP_EXPR || code == CONVERT_EXPR
7480	       || code == NON_LVALUE_EXPR)
7481	      && TREE_CODE (tem) == COND_EXPR
7482	      && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7483	      && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7484	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7485	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7486	      && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7487		  == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7488	      && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7489		     && (INTEGRAL_TYPE_P
7490			 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7491		     && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7492		  || flag_syntax_only))
7493	    tem = build1 (code, type,
7494			  build3 (COND_EXPR,
7495				  TREE_TYPE (TREE_OPERAND
7496					     (TREE_OPERAND (tem, 1), 0)),
7497				  TREE_OPERAND (tem, 0),
7498				  TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7499				  TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7500	  return tem;
7501	}
7502      else if (COMPARISON_CLASS_P (arg0))
7503	{
7504	  if (TREE_CODE (type) == BOOLEAN_TYPE)
7505	    {
7506	      arg0 = copy_node (arg0);
7507	      TREE_TYPE (arg0) = type;
7508	      return arg0;
7509	    }
7510	  else if (TREE_CODE (type) != INTEGER_TYPE)
7511	    return fold_build3 (COND_EXPR, type, arg0,
7512				fold_build1 (code, type,
7513					     integer_one_node),
7514				fold_build1 (code, type,
7515					     integer_zero_node));
7516	}
7517   }
7518
7519  switch (code)
7520    {
7521    case NOP_EXPR:
7522    case FLOAT_EXPR:
7523    case CONVERT_EXPR:
7524    case FIX_TRUNC_EXPR:
7525    case FIX_CEIL_EXPR:
7526    case FIX_FLOOR_EXPR:
7527    case FIX_ROUND_EXPR:
7528      if (TREE_TYPE (op0) == type)
7529	return op0;
7530
7531      /* If we have (type) (a CMP b) and type is an integral type, return
7532         new expression involving the new type.  */
7533      if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7534	return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7535			    TREE_OPERAND (op0, 1));
7536
7537      /* Handle cases of two conversions in a row.  */
7538      if (TREE_CODE (op0) == NOP_EXPR
7539	  || TREE_CODE (op0) == CONVERT_EXPR)
7540	{
7541	  tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7542	  tree inter_type = TREE_TYPE (op0);
7543	  int inside_int = INTEGRAL_TYPE_P (inside_type);
7544	  int inside_ptr = POINTER_TYPE_P (inside_type);
7545	  int inside_float = FLOAT_TYPE_P (inside_type);
7546	  int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7547	  unsigned int inside_prec = TYPE_PRECISION (inside_type);
7548	  int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7549	  int inter_int = INTEGRAL_TYPE_P (inter_type);
7550	  int inter_ptr = POINTER_TYPE_P (inter_type);
7551	  int inter_float = FLOAT_TYPE_P (inter_type);
7552	  int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7553	  unsigned int inter_prec = TYPE_PRECISION (inter_type);
7554	  int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7555	  int final_int = INTEGRAL_TYPE_P (type);
7556	  int final_ptr = POINTER_TYPE_P (type);
7557	  int final_float = FLOAT_TYPE_P (type);
7558	  int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7559	  unsigned int final_prec = TYPE_PRECISION (type);
7560	  int final_unsignedp = TYPE_UNSIGNED (type);
7561
7562	  /* In addition to the cases of two conversions in a row
7563	     handled below, if we are converting something to its own
7564	     type via an object of identical or wider precision, neither
7565	     conversion is needed.  */
7566	  if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7567	      && (((inter_int || inter_ptr) && final_int)
7568		  || (inter_float && final_float))
7569	      && inter_prec >= final_prec)
7570	    return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7571
7572	  /* Likewise, if the intermediate and final types are either both
7573	     float or both integer, we don't need the middle conversion if
7574	     it is wider than the final type and doesn't change the signedness
7575	     (for integers).  Avoid this if the final type is a pointer
7576	     since then we sometimes need the inner conversion.  Likewise if
7577	     the outer has a precision not equal to the size of its mode.  */
7578	  if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7579	       || (inter_float && inside_float)
7580	       || (inter_vec && inside_vec))
7581	      && inter_prec >= inside_prec
7582	      && (inter_float || inter_vec
7583		  || inter_unsignedp == inside_unsignedp)
7584	      && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7585		    && TYPE_MODE (type) == TYPE_MODE (inter_type))
7586	      && ! final_ptr
7587	      && (! final_vec || inter_prec == inside_prec))
7588	    return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7589
7590	  /* If we have a sign-extension of a zero-extended value, we can
7591	     replace that by a single zero-extension.  */
7592	  if (inside_int && inter_int && final_int
7593	      && inside_prec < inter_prec && inter_prec < final_prec
7594	      && inside_unsignedp && !inter_unsignedp)
7595	    return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7596
7597	  /* Two conversions in a row are not needed unless:
7598	     - some conversion is floating-point (overstrict for now), or
7599	     - some conversion is a vector (overstrict for now), or
7600	     - the intermediate type is narrower than both initial and
7601	       final, or
7602	     - the intermediate type and innermost type differ in signedness,
7603	       and the outermost type is wider than the intermediate, or
7604	     - the initial type is a pointer type and the precisions of the
7605	       intermediate and final types differ, or
7606	     - the final type is a pointer type and the precisions of the
7607	       initial and intermediate types differ.
7608	     - the final type is a pointer type and the initial type not
7609	     - the initial type is a pointer to an array and the final type
7610	       not.  */
7611	  /* Java pointer type conversions generate checks in some
7612	     cases, so we explicitly disallow this optimization.  */
7613	  if (! inside_float && ! inter_float && ! final_float
7614	      && ! inside_vec && ! inter_vec && ! final_vec
7615	      && (inter_prec >= inside_prec || inter_prec >= final_prec)
7616	      && ! (inside_int && inter_int
7617		    && inter_unsignedp != inside_unsignedp
7618		    && inter_prec < final_prec)
7619	      && ((inter_unsignedp && inter_prec > inside_prec)
7620		  == (final_unsignedp && final_prec > inter_prec))
7621	      && ! (inside_ptr && inter_prec != final_prec)
7622	      && ! (final_ptr && inside_prec != inter_prec)
7623	      && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7624		    && TYPE_MODE (type) == TYPE_MODE (inter_type))
7625	      && final_ptr == inside_ptr
7626	      && ! (inside_ptr
7627		    && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7628		    && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE)
7629	      && ! ((strcmp (lang_hooks.name, "GNU Java") == 0)
7630		    && final_ptr))
7631	    return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7632	}
7633
7634      /* Handle (T *)&A.B.C for A being of type T and B and C
7635	 living at offset zero.  This occurs frequently in
7636	 C++ upcasting and then accessing the base.  */
7637      if (TREE_CODE (op0) == ADDR_EXPR
7638	  && POINTER_TYPE_P (type)
7639	  && handled_component_p (TREE_OPERAND (op0, 0)))
7640        {
7641	  HOST_WIDE_INT bitsize, bitpos;
7642	  tree offset;
7643	  enum machine_mode mode;
7644	  int unsignedp, volatilep;
7645          tree base = TREE_OPERAND (op0, 0);
7646	  base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7647				      &mode, &unsignedp, &volatilep, false);
7648	  /* If the reference was to a (constant) zero offset, we can use
7649	     the address of the base if it has the same base type
7650	     as the result type.  */
7651	  if (! offset && bitpos == 0
7652	      && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7653		  == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7654	    return fold_convert (type, build_fold_addr_expr (base));
7655        }
7656
7657      if (TREE_CODE (op0) == MODIFY_EXPR
7658	  && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7659	  /* Detect assigning a bitfield.  */
7660	  && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7661	       && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7662	{
7663	  /* Don't leave an assignment inside a conversion
7664	     unless assigning a bitfield.  */
7665	  tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7666	  /* First do the assignment, then return converted constant.  */
7667	  tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7668	  TREE_NO_WARNING (tem) = 1;
7669	  TREE_USED (tem) = 1;
7670	  return tem;
7671	}
7672
7673      /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7674	 constants (if x has signed type, the sign bit cannot be set
7675	 in c).  This folds extension into the BIT_AND_EXPR.  */
7676      if (INTEGRAL_TYPE_P (type)
7677	  && TREE_CODE (type) != BOOLEAN_TYPE
7678	  && TREE_CODE (op0) == BIT_AND_EXPR
7679	  && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7680	{
7681	  tree and = op0;
7682	  tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7683	  int change = 0;
7684
7685	  if (TYPE_UNSIGNED (TREE_TYPE (and))
7686	      || (TYPE_PRECISION (type)
7687		  <= TYPE_PRECISION (TREE_TYPE (and))))
7688	    change = 1;
7689	  else if (TYPE_PRECISION (TREE_TYPE (and1))
7690		   <= HOST_BITS_PER_WIDE_INT
7691		   && host_integerp (and1, 1))
7692	    {
7693	      unsigned HOST_WIDE_INT cst;
7694
7695	      cst = tree_low_cst (and1, 1);
7696	      cst &= (HOST_WIDE_INT) -1
7697		     << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7698	      change = (cst == 0);
7699#ifdef LOAD_EXTEND_OP
7700	      if (change
7701		  && !flag_syntax_only
7702		  && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7703		      == ZERO_EXTEND))
7704		{
7705		  tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7706		  and0 = fold_convert (uns, and0);
7707		  and1 = fold_convert (uns, and1);
7708		}
7709#endif
7710	    }
7711	  if (change)
7712	    {
7713	      tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7714					TREE_INT_CST_HIGH (and1));
7715	      tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7716				    TREE_CONSTANT_OVERFLOW (and1));
7717	      return fold_build2 (BIT_AND_EXPR, type,
7718				  fold_convert (type, and0), tem);
7719	    }
7720	}
7721
7722      /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7723	 T2 being pointers to types of the same size.  */
7724      if (POINTER_TYPE_P (type)
7725	  && BINARY_CLASS_P (arg0)
7726	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7727	  && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7728	{
7729	  tree arg00 = TREE_OPERAND (arg0, 0);
7730	  tree t0 = type;
7731	  tree t1 = TREE_TYPE (arg00);
7732	  tree tt0 = TREE_TYPE (t0);
7733	  tree tt1 = TREE_TYPE (t1);
7734	  tree s0 = TYPE_SIZE (tt0);
7735	  tree s1 = TYPE_SIZE (tt1);
7736
7737	  if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7738	    return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7739			   TREE_OPERAND (arg0, 1));
7740	}
7741
7742      /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7743	 of the same precision, and X is a integer type not narrower than
7744	 types T1 or T2, i.e. the cast (T2)X isn't an extension.  */
7745      if (INTEGRAL_TYPE_P (type)
7746	  && TREE_CODE (op0) == BIT_NOT_EXPR
7747	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7748	  && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7749	      || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7750	  && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7751	{
7752	  tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7753	  if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7754	      && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7755	    return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7756	}
7757
7758      tem = fold_convert_const (code, type, op0);
7759      return tem ? tem : NULL_TREE;
7760
7761    case VIEW_CONVERT_EXPR:
7762      if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7763	return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7764      return fold_view_convert_expr (type, op0);
7765
7766    case NEGATE_EXPR:
7767      tem = fold_negate_expr (arg0);
7768      if (tem)
7769	return fold_convert (type, tem);
7770      return NULL_TREE;
7771
7772    case ABS_EXPR:
7773      if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7774	return fold_abs_const (arg0, type);
7775      else if (TREE_CODE (arg0) == NEGATE_EXPR)
7776	return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7777      /* Convert fabs((double)float) into (double)fabsf(float).  */
7778      else if (TREE_CODE (arg0) == NOP_EXPR
7779	       && TREE_CODE (type) == REAL_TYPE)
7780	{
7781	  tree targ0 = strip_float_extensions (arg0);
7782	  if (targ0 != arg0)
7783	    return fold_convert (type, fold_build1 (ABS_EXPR,
7784						    TREE_TYPE (targ0),
7785						    targ0));
7786	}
7787      /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on.  */
7788      else if (TREE_CODE (arg0) == ABS_EXPR)
7789	return arg0;
7790      else if (tree_expr_nonnegative_p (arg0))
7791	return arg0;
7792
7793      /* Strip sign ops from argument.  */
7794      if (TREE_CODE (type) == REAL_TYPE)
7795	{
7796	  tem = fold_strip_sign_ops (arg0);
7797	  if (tem)
7798	    return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7799	}
7800      return NULL_TREE;
7801
7802    case CONJ_EXPR:
7803      if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7804	return fold_convert (type, arg0);
7805      if (TREE_CODE (arg0) == COMPLEX_EXPR)
7806	{
7807	  tree itype = TREE_TYPE (type);
7808	  tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7809	  tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7810	  return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7811	}
7812      if (TREE_CODE (arg0) == COMPLEX_CST)
7813	{
7814	  tree itype = TREE_TYPE (type);
7815	  tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7816	  tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7817	  return build_complex (type, rpart, negate_expr (ipart));
7818	}
7819      if (TREE_CODE (arg0) == CONJ_EXPR)
7820	return fold_convert (type, TREE_OPERAND (arg0, 0));
7821      return NULL_TREE;
7822
7823    case BIT_NOT_EXPR:
7824      if (TREE_CODE (arg0) == INTEGER_CST)
7825        return fold_not_const (arg0, type);
7826      else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7827	return TREE_OPERAND (arg0, 0);
7828      /* Convert ~ (-A) to A - 1.  */
7829      else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7830	return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7831			    build_int_cst (type, 1));
7832      /* Convert ~ (A - 1) or ~ (A + -1) to -A.  */
7833      else if (INTEGRAL_TYPE_P (type)
7834	       && ((TREE_CODE (arg0) == MINUS_EXPR
7835		    && integer_onep (TREE_OPERAND (arg0, 1)))
7836		   || (TREE_CODE (arg0) == PLUS_EXPR
7837		       && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7838	return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7839      /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify.  */
7840      else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7841	       && (tem = fold_unary (BIT_NOT_EXPR, type,
7842			       	     fold_convert (type,
7843					     	   TREE_OPERAND (arg0, 0)))))
7844	return fold_build2 (BIT_XOR_EXPR, type, tem,
7845			    fold_convert (type, TREE_OPERAND (arg0, 1)));
7846      else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7847	       && (tem = fold_unary (BIT_NOT_EXPR, type,
7848			       	     fold_convert (type,
7849					     	   TREE_OPERAND (arg0, 1)))))
7850	return fold_build2 (BIT_XOR_EXPR, type,
7851			    fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7852
7853      return NULL_TREE;
7854
7855    case TRUTH_NOT_EXPR:
7856      /* The argument to invert_truthvalue must have Boolean type.  */
7857      if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7858          arg0 = fold_convert (boolean_type_node, arg0);
7859
7860      /* Note that the operand of this must be an int
7861	 and its values must be 0 or 1.
7862	 ("true" is a fixed value perhaps depending on the language,
7863	 but we don't handle values other than 1 correctly yet.)  */
7864      tem = fold_truth_not_expr (arg0);
7865      if (!tem)
7866	return NULL_TREE;
7867      return fold_convert (type, tem);
7868
7869    case REALPART_EXPR:
7870      if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7871	return fold_convert (type, arg0);
7872      if (TREE_CODE (arg0) == COMPLEX_EXPR)
7873	return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7874				 TREE_OPERAND (arg0, 1));
7875      if (TREE_CODE (arg0) == COMPLEX_CST)
7876	return fold_convert (type, TREE_REALPART (arg0));
7877      if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7878	{
7879	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
7880	  tem = fold_build2 (TREE_CODE (arg0), itype,
7881			     fold_build1 (REALPART_EXPR, itype,
7882					  TREE_OPERAND (arg0, 0)),
7883			     fold_build1 (REALPART_EXPR, itype,
7884					  TREE_OPERAND (arg0, 1)));
7885	  return fold_convert (type, tem);
7886	}
7887      if (TREE_CODE (arg0) == CONJ_EXPR)
7888	{
7889	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
7890	  tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7891	  return fold_convert (type, tem);
7892	}
7893      return NULL_TREE;
7894
7895    case IMAGPART_EXPR:
7896      if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7897	return fold_convert (type, integer_zero_node);
7898      if (TREE_CODE (arg0) == COMPLEX_EXPR)
7899	return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7900				 TREE_OPERAND (arg0, 0));
7901      if (TREE_CODE (arg0) == COMPLEX_CST)
7902	return fold_convert (type, TREE_IMAGPART (arg0));
7903      if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7904	{
7905	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
7906	  tem = fold_build2 (TREE_CODE (arg0), itype,
7907			     fold_build1 (IMAGPART_EXPR, itype,
7908					  TREE_OPERAND (arg0, 0)),
7909			     fold_build1 (IMAGPART_EXPR, itype,
7910					  TREE_OPERAND (arg0, 1)));
7911	  return fold_convert (type, tem);
7912	}
7913      if (TREE_CODE (arg0) == CONJ_EXPR)
7914	{
7915	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
7916	  tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7917	  return fold_convert (type, negate_expr (tem));
7918	}
7919      return NULL_TREE;
7920
7921    default:
7922      return NULL_TREE;
7923    } /* switch (code) */
7924}
7925
7926/* Fold a binary expression of code CODE and type TYPE with operands
7927   OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7928   Return the folded expression if folding is successful.  Otherwise,
7929   return NULL_TREE.  */
7930
7931static tree
7932fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7933{
7934  enum tree_code compl_code;
7935
7936  if (code == MIN_EXPR)
7937    compl_code = MAX_EXPR;
7938  else if (code == MAX_EXPR)
7939    compl_code = MIN_EXPR;
7940  else
7941    gcc_unreachable ();
7942
7943  /* MIN (MAX (a, b), b) == b.  */
7944  if (TREE_CODE (op0) == compl_code
7945      && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7946    return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7947
7948  /* MIN (MAX (b, a), b) == b.  */
7949  if (TREE_CODE (op0) == compl_code
7950      && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7951      && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7952    return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7953
7954  /* MIN (a, MAX (a, b)) == a.  */
7955  if (TREE_CODE (op1) == compl_code
7956      && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7957      && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7958    return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7959
7960  /* MIN (a, MAX (b, a)) == a.  */
7961  if (TREE_CODE (op1) == compl_code
7962      && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7963      && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7964    return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7965
7966  return NULL_TREE;
7967}
7968
7969/* Subroutine of fold_binary.  This routine performs all of the
7970   transformations that are common to the equality/inequality
7971   operators (EQ_EXPR and NE_EXPR) and the ordering operators
7972   (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR).  Callers other than
7973   fold_binary should call fold_binary.  Fold a comparison with
7974   tree code CODE and type TYPE with operands OP0 and OP1.  Return
7975   the folded comparison or NULL_TREE.  */
7976
7977static tree
7978fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7979{
7980  tree arg0, arg1, tem;
7981
7982  arg0 = op0;
7983  arg1 = op1;
7984
7985  STRIP_SIGN_NOPS (arg0);
7986  STRIP_SIGN_NOPS (arg1);
7987
7988  tem = fold_relational_const (code, type, arg0, arg1);
7989  if (tem != NULL_TREE)
7990    return tem;
7991
7992  /* If one arg is a real or integer constant, put it last.  */
7993  if (tree_swap_operands_p (arg0, arg1, true))
7994    return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7995
7996  /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1.  */
7997  if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7998      && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7999	  && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8000	  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8001      && (TREE_CODE (arg1) == INTEGER_CST
8002	  && !TREE_OVERFLOW (arg1)))
8003    {
8004      tree const1 = TREE_OPERAND (arg0, 1);
8005      tree const2 = arg1;
8006      tree variable = TREE_OPERAND (arg0, 0);
8007      tree lhs;
8008      int lhs_add;
8009      lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8010
8011      lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8012			 TREE_TYPE (arg1), const2, const1);
8013      if (TREE_CODE (lhs) == TREE_CODE (arg1)
8014	  && (TREE_CODE (lhs) != INTEGER_CST
8015	      || !TREE_OVERFLOW (lhs)))
8016	{
8017	  fold_overflow_warning (("assuming signed overflow does not occur "
8018				  "when changing X +- C1 cmp C2 to "
8019				  "X cmp C1 +- C2"),
8020				 WARN_STRICT_OVERFLOW_COMPARISON);
8021	  return fold_build2 (code, type, variable, lhs);
8022	}
8023    }
8024
8025  /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8026     same object, then we can fold this to a comparison of the two offsets in
8027     signed size type.  This is possible because pointer arithmetic is
8028     restricted to retain within an object and overflow on pointer differences
8029     is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8030
8031     We check flag_wrapv directly because pointers types are unsigned,
8032     and therefore TYPE_OVERFLOW_WRAPS returns true for them.  That is
8033     normally what we want to avoid certain odd overflow cases, but
8034     not here.  */
8035  if (POINTER_TYPE_P (TREE_TYPE (arg0))
8036      && !flag_wrapv
8037      && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0)))
8038    {
8039      tree base0, offset0, base1, offset1;
8040
8041      if (extract_array_ref (arg0, &base0, &offset0)
8042	  && extract_array_ref (arg1, &base1, &offset1)
8043	  && operand_equal_p (base0, base1, 0))
8044        {
8045	  tree signed_size_type_node;
8046	  signed_size_type_node = signed_type_for (size_type_node);
8047
8048	  /* By converting to signed size type we cover middle-end pointer
8049	     arithmetic which operates on unsigned pointer types of size
8050	     type size and ARRAY_REF offsets which are properly sign or
8051	     zero extended from their type in case it is narrower than
8052	     size type.  */
8053	  if (offset0 == NULL_TREE)
8054	    offset0 = build_int_cst (signed_size_type_node, 0);
8055	  else
8056	    offset0 = fold_convert (signed_size_type_node, offset0);
8057	  if (offset1 == NULL_TREE)
8058	    offset1 = build_int_cst (signed_size_type_node, 0);
8059	  else
8060	    offset1 = fold_convert (signed_size_type_node, offset1);
8061
8062	  return fold_build2 (code, type, offset0, offset1);
8063	}
8064    }
8065
8066  if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8067    {
8068      tree targ0 = strip_float_extensions (arg0);
8069      tree targ1 = strip_float_extensions (arg1);
8070      tree newtype = TREE_TYPE (targ0);
8071
8072      if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8073	newtype = TREE_TYPE (targ1);
8074
8075      /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
8076      if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8077	return fold_build2 (code, type, fold_convert (newtype, targ0),
8078			    fold_convert (newtype, targ1));
8079
8080      /* (-a) CMP (-b) -> b CMP a  */
8081      if (TREE_CODE (arg0) == NEGATE_EXPR
8082	  && TREE_CODE (arg1) == NEGATE_EXPR)
8083	return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8084			    TREE_OPERAND (arg0, 0));
8085
8086      if (TREE_CODE (arg1) == REAL_CST)
8087	{
8088	  REAL_VALUE_TYPE cst;
8089	  cst = TREE_REAL_CST (arg1);
8090
8091	  /* (-a) CMP CST -> a swap(CMP) (-CST)  */
8092	  if (TREE_CODE (arg0) == NEGATE_EXPR)
8093	    return fold_build2 (swap_tree_comparison (code), type,
8094				TREE_OPERAND (arg0, 0),
8095				build_real (TREE_TYPE (arg1),
8096					    REAL_VALUE_NEGATE (cst)));
8097
8098	  /* IEEE doesn't distinguish +0 and -0 in comparisons.  */
8099	  /* a CMP (-0) -> a CMP 0  */
8100	  if (REAL_VALUE_MINUS_ZERO (cst))
8101	    return fold_build2 (code, type, arg0,
8102				build_real (TREE_TYPE (arg1), dconst0));
8103
8104	  /* x != NaN is always true, other ops are always false.  */
8105	  if (REAL_VALUE_ISNAN (cst)
8106	      && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8107	    {
8108	      tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8109	      return omit_one_operand (type, tem, arg0);
8110	    }
8111
8112	  /* Fold comparisons against infinity.  */
8113	  if (REAL_VALUE_ISINF (cst))
8114	    {
8115	      tem = fold_inf_compare (code, type, arg0, arg1);
8116	      if (tem != NULL_TREE)
8117		return tem;
8118	    }
8119	}
8120
8121      /* If this is a comparison of a real constant with a PLUS_EXPR
8122	 or a MINUS_EXPR of a real constant, we can convert it into a
8123	 comparison with a revised real constant as long as no overflow
8124	 occurs when unsafe_math_optimizations are enabled.  */
8125      if (flag_unsafe_math_optimizations
8126	  && TREE_CODE (arg1) == REAL_CST
8127	  && (TREE_CODE (arg0) == PLUS_EXPR
8128	      || TREE_CODE (arg0) == MINUS_EXPR)
8129	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8130	  && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8131				      ? MINUS_EXPR : PLUS_EXPR,
8132				      arg1, TREE_OPERAND (arg0, 1), 0))
8133	  && ! TREE_CONSTANT_OVERFLOW (tem))
8134	return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8135
8136      /* Likewise, we can simplify a comparison of a real constant with
8137         a MINUS_EXPR whose first operand is also a real constant, i.e.
8138         (c1 - x) < c2 becomes x > c1-c2.  */
8139      if (flag_unsafe_math_optimizations
8140	  && TREE_CODE (arg1) == REAL_CST
8141	  && TREE_CODE (arg0) == MINUS_EXPR
8142	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8143	  && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8144				      arg1, 0))
8145	  && ! TREE_CONSTANT_OVERFLOW (tem))
8146	return fold_build2 (swap_tree_comparison (code), type,
8147			    TREE_OPERAND (arg0, 1), tem);
8148
8149      /* Fold comparisons against built-in math functions.  */
8150      if (TREE_CODE (arg1) == REAL_CST
8151	  && flag_unsafe_math_optimizations
8152	  && ! flag_errno_math)
8153	{
8154	  enum built_in_function fcode = builtin_mathfn_code (arg0);
8155
8156	  if (fcode != END_BUILTINS)
8157	    {
8158	      tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8159	      if (tem != NULL_TREE)
8160		return tem;
8161	    }
8162	}
8163    }
8164
8165  /* Convert foo++ == CONST into ++foo == CONST + INCR.  */
8166  if (TREE_CONSTANT (arg1)
8167      && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8168	  || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8169      /* This optimization is invalid for ordered comparisons
8170         if CONST+INCR overflows or if foo+incr might overflow.
8171	 This optimization is invalid for floating point due to rounding.
8172	 For pointer types we assume overflow doesn't happen.  */
8173      && (POINTER_TYPE_P (TREE_TYPE (arg0))
8174	  || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8175	      && (code == EQ_EXPR || code == NE_EXPR))))
8176    {
8177      tree varop, newconst;
8178
8179      if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8180	{
8181	  newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8182				  arg1, TREE_OPERAND (arg0, 1));
8183	  varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8184			  TREE_OPERAND (arg0, 0),
8185			  TREE_OPERAND (arg0, 1));
8186	}
8187      else
8188	{
8189	  newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8190				  arg1, TREE_OPERAND (arg0, 1));
8191	  varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8192			  TREE_OPERAND (arg0, 0),
8193			  TREE_OPERAND (arg0, 1));
8194	}
8195
8196
8197      /* If VAROP is a reference to a bitfield, we must mask
8198	 the constant by the width of the field.  */
8199      if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8200	  && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8201	  && host_integerp (DECL_SIZE (TREE_OPERAND
8202					 (TREE_OPERAND (varop, 0), 1)), 1))
8203	{
8204	  tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8205	  HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8206	  tree folded_compare, shift;
8207
8208	  /* First check whether the comparison would come out
8209	     always the same.  If we don't do that we would
8210	     change the meaning with the masking.  */
8211	  folded_compare = fold_build2 (code, type,
8212					TREE_OPERAND (varop, 0), arg1);
8213	  if (TREE_CODE (folded_compare) == INTEGER_CST)
8214	    return omit_one_operand (type, folded_compare, varop);
8215
8216	  shift = build_int_cst (NULL_TREE,
8217				 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8218	  shift = fold_convert (TREE_TYPE (varop), shift);
8219	  newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8220				  newconst, shift);
8221	  newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8222				  newconst, shift);
8223	}
8224
8225      return fold_build2 (code, type, varop, newconst);
8226    }
8227
8228  if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8229      && (TREE_CODE (arg0) == NOP_EXPR
8230	  || TREE_CODE (arg0) == CONVERT_EXPR))
8231    {
8232      /* If we are widening one operand of an integer comparison,
8233	 see if the other operand is similarly being widened.  Perhaps we
8234	 can do the comparison in the narrower type.  */
8235      tem = fold_widened_comparison (code, type, arg0, arg1);
8236      if (tem)
8237	return tem;
8238
8239      /* Or if we are changing signedness.  */
8240      tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8241      if (tem)
8242	return tem;
8243    }
8244
8245  /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8246     constant, we can simplify it.  */
8247  if (TREE_CODE (arg1) == INTEGER_CST
8248      && (TREE_CODE (arg0) == MIN_EXPR
8249	  || TREE_CODE (arg0) == MAX_EXPR)
8250      && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8251    {
8252      tem = optimize_minmax_comparison (code, type, op0, op1);
8253      if (tem)
8254	return tem;
8255    }
8256
8257  /* Simplify comparison of something with itself.  (For IEEE
8258     floating-point, we can only do some of these simplifications.)  */
8259  if (operand_equal_p (arg0, arg1, 0))
8260    {
8261      switch (code)
8262	{
8263	case EQ_EXPR:
8264	  if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8265	      || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8266	    return constant_boolean_node (1, type);
8267	  break;
8268
8269	case GE_EXPR:
8270	case LE_EXPR:
8271	  if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8272	      || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8273	    return constant_boolean_node (1, type);
8274	  return fold_build2 (EQ_EXPR, type, arg0, arg1);
8275
8276	case NE_EXPR:
8277	  /* For NE, we can only do this simplification if integer
8278	     or we don't honor IEEE floating point NaNs.  */
8279	  if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8280	      && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8281	    break;
8282	  /* ... fall through ...  */
8283	case GT_EXPR:
8284	case LT_EXPR:
8285	  return constant_boolean_node (0, type);
8286	default:
8287	  gcc_unreachable ();
8288	}
8289    }
8290
8291  /* If we are comparing an expression that just has comparisons
8292     of two integer values, arithmetic expressions of those comparisons,
8293     and constants, we can simplify it.  There are only three cases
8294     to check: the two values can either be equal, the first can be
8295     greater, or the second can be greater.  Fold the expression for
8296     those three values.  Since each value must be 0 or 1, we have
8297     eight possibilities, each of which corresponds to the constant 0
8298     or 1 or one of the six possible comparisons.
8299
8300     This handles common cases like (a > b) == 0 but also handles
8301     expressions like  ((x > y) - (y > x)) > 0, which supposedly
8302     occur in macroized code.  */
8303
8304  if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8305    {
8306      tree cval1 = 0, cval2 = 0;
8307      int save_p = 0;
8308
8309      if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8310	  /* Don't handle degenerate cases here; they should already
8311	     have been handled anyway.  */
8312	  && cval1 != 0 && cval2 != 0
8313	  && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8314	  && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8315	  && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8316	  && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8317	  && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8318	  && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8319				TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8320	{
8321	  tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8322	  tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8323
8324	  /* We can't just pass T to eval_subst in case cval1 or cval2
8325	     was the same as ARG1.  */
8326
8327	  tree high_result
8328		= fold_build2 (code, type,
8329			       eval_subst (arg0, cval1, maxval,
8330					   cval2, minval),
8331			       arg1);
8332	  tree equal_result
8333		= fold_build2 (code, type,
8334			       eval_subst (arg0, cval1, maxval,
8335					   cval2, maxval),
8336			       arg1);
8337	  tree low_result
8338		= fold_build2 (code, type,
8339			       eval_subst (arg0, cval1, minval,
8340					   cval2, maxval),
8341			       arg1);
8342
8343	  /* All three of these results should be 0 or 1.  Confirm they are.
8344	     Then use those values to select the proper code to use.  */
8345
8346	  if (TREE_CODE (high_result) == INTEGER_CST
8347	      && TREE_CODE (equal_result) == INTEGER_CST
8348	      && TREE_CODE (low_result) == INTEGER_CST)
8349	    {
8350	      /* Make a 3-bit mask with the high-order bit being the
8351		 value for `>', the next for '=', and the low for '<'.  */
8352	      switch ((integer_onep (high_result) * 4)
8353		      + (integer_onep (equal_result) * 2)
8354		      + integer_onep (low_result))
8355		{
8356		case 0:
8357		  /* Always false.  */
8358		  return omit_one_operand (type, integer_zero_node, arg0);
8359		case 1:
8360		  code = LT_EXPR;
8361		  break;
8362		case 2:
8363		  code = EQ_EXPR;
8364		  break;
8365		case 3:
8366		  code = LE_EXPR;
8367		  break;
8368		case 4:
8369		  code = GT_EXPR;
8370		  break;
8371		case 5:
8372		  code = NE_EXPR;
8373		  break;
8374		case 6:
8375		  code = GE_EXPR;
8376		  break;
8377		case 7:
8378		  /* Always true.  */
8379		  return omit_one_operand (type, integer_one_node, arg0);
8380		}
8381
8382	      if (save_p)
8383		return save_expr (build2 (code, type, cval1, cval2));
8384	      return fold_build2 (code, type, cval1, cval2);
8385	    }
8386	}
8387    }
8388
8389  /* Fold a comparison of the address of COMPONENT_REFs with the same
8390     type and component to a comparison of the address of the base
8391     object.  In short, &x->a OP &y->a to x OP y and
8392     &x->a OP &y.a to x OP &y  */
8393  if (TREE_CODE (arg0) == ADDR_EXPR
8394      && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8395      && TREE_CODE (arg1) == ADDR_EXPR
8396      && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8397    {
8398      tree cref0 = TREE_OPERAND (arg0, 0);
8399      tree cref1 = TREE_OPERAND (arg1, 0);
8400      if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8401	{
8402	  tree op0 = TREE_OPERAND (cref0, 0);
8403	  tree op1 = TREE_OPERAND (cref1, 0);
8404	  return fold_build2 (code, type,
8405			      build_fold_addr_expr (op0),
8406			      build_fold_addr_expr (op1));
8407	}
8408    }
8409
8410  /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8411     into a single range test.  */
8412  if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8413       || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8414      && TREE_CODE (arg1) == INTEGER_CST
8415      && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8416      && !integer_zerop (TREE_OPERAND (arg0, 1))
8417      && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8418      && !TREE_OVERFLOW (arg1))
8419    {
8420      tem = fold_div_compare (code, type, arg0, arg1);
8421      if (tem != NULL_TREE)
8422	return tem;
8423    }
8424
8425  return NULL_TREE;
8426}
8427
8428
8429/* Subroutine of fold_binary.  Optimize complex multiplications of the
8430   form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2).  The
8431   argument EXPR represents the expression "z" of type TYPE.  */
8432
8433static tree
8434fold_mult_zconjz (tree type, tree expr)
8435{
8436  tree itype = TREE_TYPE (type);
8437  tree rpart, ipart, tem;
8438
8439  if (TREE_CODE (expr) == COMPLEX_EXPR)
8440    {
8441      rpart = TREE_OPERAND (expr, 0);
8442      ipart = TREE_OPERAND (expr, 1);
8443    }
8444  else if (TREE_CODE (expr) == COMPLEX_CST)
8445    {
8446      rpart = TREE_REALPART (expr);
8447      ipart = TREE_IMAGPART (expr);
8448    }
8449  else
8450    {
8451      expr = save_expr (expr);
8452      rpart = fold_build1 (REALPART_EXPR, itype, expr);
8453      ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8454    }
8455
8456  rpart = save_expr (rpart);
8457  ipart = save_expr (ipart);
8458  tem = fold_build2 (PLUS_EXPR, itype,
8459		     fold_build2 (MULT_EXPR, itype, rpart, rpart),
8460		     fold_build2 (MULT_EXPR, itype, ipart, ipart));
8461  return fold_build2 (COMPLEX_EXPR, type, tem,
8462		      fold_convert (itype, integer_zero_node));
8463}
8464
8465
8466/* Fold a binary expression of code CODE and type TYPE with operands
8467   OP0 and OP1.  Return the folded expression if folding is
8468   successful.  Otherwise, return NULL_TREE.  */
8469
8470tree
8471fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8472{
8473  enum tree_code_class kind = TREE_CODE_CLASS (code);
8474  tree arg0, arg1, tem;
8475  tree t1 = NULL_TREE;
8476  bool strict_overflow_p;
8477
8478  gcc_assert (IS_EXPR_CODE_CLASS (kind)
8479	      && TREE_CODE_LENGTH (code) == 2
8480	      && op0 != NULL_TREE
8481	      && op1 != NULL_TREE);
8482
8483  arg0 = op0;
8484  arg1 = op1;
8485
8486  /* Strip any conversions that don't change the mode.  This is
8487     safe for every expression, except for a comparison expression
8488     because its signedness is derived from its operands.  So, in
8489     the latter case, only strip conversions that don't change the
8490     signedness.
8491
8492     Note that this is done as an internal manipulation within the
8493     constant folder, in order to find the simplest representation
8494     of the arguments so that their form can be studied.  In any
8495     cases, the appropriate type conversions should be put back in
8496     the tree that will get out of the constant folder.  */
8497
8498  if (kind == tcc_comparison)
8499    {
8500      STRIP_SIGN_NOPS (arg0);
8501      STRIP_SIGN_NOPS (arg1);
8502    }
8503  else
8504    {
8505      STRIP_NOPS (arg0);
8506      STRIP_NOPS (arg1);
8507    }
8508
8509  /* Note that TREE_CONSTANT isn't enough: static var addresses are
8510     constant but we can't do arithmetic on them.  */
8511  if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8512      || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8513      || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8514      || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8515    {
8516      if (kind == tcc_binary)
8517	tem = const_binop (code, arg0, arg1, 0);
8518      else if (kind == tcc_comparison)
8519	tem = fold_relational_const (code, type, arg0, arg1);
8520      else
8521	tem = NULL_TREE;
8522
8523      if (tem != NULL_TREE)
8524	{
8525	  if (TREE_TYPE (tem) != type)
8526	    tem = fold_convert (type, tem);
8527	  return tem;
8528	}
8529    }
8530
8531  /* If this is a commutative operation, and ARG0 is a constant, move it
8532     to ARG1 to reduce the number of tests below.  */
8533  if (commutative_tree_code (code)
8534      && tree_swap_operands_p (arg0, arg1, true))
8535    return fold_build2 (code, type, op1, op0);
8536
8537  /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8538
8539     First check for cases where an arithmetic operation is applied to a
8540     compound, conditional, or comparison operation.  Push the arithmetic
8541     operation inside the compound or conditional to see if any folding
8542     can then be done.  Convert comparison to conditional for this purpose.
8543     The also optimizes non-constant cases that used to be done in
8544     expand_expr.
8545
8546     Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8547     one of the operands is a comparison and the other is a comparison, a
8548     BIT_AND_EXPR with the constant 1, or a truth value.  In that case, the
8549     code below would make the expression more complex.  Change it to a
8550     TRUTH_{AND,OR}_EXPR.  Likewise, convert a similar NE_EXPR to
8551     TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR.  */
8552
8553  if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8554       || code == EQ_EXPR || code == NE_EXPR)
8555      && ((truth_value_p (TREE_CODE (arg0))
8556	   && (truth_value_p (TREE_CODE (arg1))
8557	       || (TREE_CODE (arg1) == BIT_AND_EXPR
8558		   && integer_onep (TREE_OPERAND (arg1, 1)))))
8559	  || (truth_value_p (TREE_CODE (arg1))
8560	      && (truth_value_p (TREE_CODE (arg0))
8561		  || (TREE_CODE (arg0) == BIT_AND_EXPR
8562		      && integer_onep (TREE_OPERAND (arg0, 1)))))))
8563    {
8564      tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8565			 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8566			 : TRUTH_XOR_EXPR,
8567			 boolean_type_node,
8568			 fold_convert (boolean_type_node, arg0),
8569			 fold_convert (boolean_type_node, arg1));
8570
8571      if (code == EQ_EXPR)
8572	tem = invert_truthvalue (tem);
8573
8574      return fold_convert (type, tem);
8575    }
8576
8577  if (TREE_CODE_CLASS (code) == tcc_binary
8578      || TREE_CODE_CLASS (code) == tcc_comparison)
8579    {
8580      if (TREE_CODE (arg0) == COMPOUND_EXPR)
8581	return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8582		       fold_build2 (code, type,
8583				    TREE_OPERAND (arg0, 1), op1));
8584      if (TREE_CODE (arg1) == COMPOUND_EXPR
8585	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8586	return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8587		       fold_build2 (code, type,
8588				    op0, TREE_OPERAND (arg1, 1)));
8589
8590      if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8591	{
8592	  tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8593						     arg0, arg1,
8594						     /*cond_first_p=*/1);
8595	  if (tem != NULL_TREE)
8596	    return tem;
8597	}
8598
8599      if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8600	{
8601	  tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8602						     arg1, arg0,
8603					             /*cond_first_p=*/0);
8604	  if (tem != NULL_TREE)
8605	    return tem;
8606	}
8607    }
8608
8609  switch (code)
8610    {
8611    case PLUS_EXPR:
8612      /* A + (-B) -> A - B */
8613      if (TREE_CODE (arg1) == NEGATE_EXPR)
8614	return fold_build2 (MINUS_EXPR, type,
8615			    fold_convert (type, arg0),
8616			    fold_convert (type, TREE_OPERAND (arg1, 0)));
8617      /* (-A) + B -> B - A */
8618      if (TREE_CODE (arg0) == NEGATE_EXPR
8619	  && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8620	return fold_build2 (MINUS_EXPR, type,
8621			    fold_convert (type, arg1),
8622			    fold_convert (type, TREE_OPERAND (arg0, 0)));
8623      /* Convert ~A + 1 to -A.  */
8624      if (INTEGRAL_TYPE_P (type)
8625	  && TREE_CODE (arg0) == BIT_NOT_EXPR
8626	  && integer_onep (arg1))
8627	return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8628
8629      /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8630	 same or one.  */
8631      if ((TREE_CODE (arg0) == MULT_EXPR
8632	   || TREE_CODE (arg1) == MULT_EXPR)
8633	  && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8634        {
8635	  tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8636	  if (tem)
8637	    return tem;
8638	}
8639
8640      if (! FLOAT_TYPE_P (type))
8641	{
8642	  if (integer_zerop (arg1))
8643	    return non_lvalue (fold_convert (type, arg0));
8644
8645	  /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8646	     with a constant, and the two constants have no bits in common,
8647	     we should treat this as a BIT_IOR_EXPR since this may produce more
8648	     simplifications.  */
8649	  if (TREE_CODE (arg0) == BIT_AND_EXPR
8650	      && TREE_CODE (arg1) == BIT_AND_EXPR
8651	      && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8652	      && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8653	      && integer_zerop (const_binop (BIT_AND_EXPR,
8654					     TREE_OPERAND (arg0, 1),
8655					     TREE_OPERAND (arg1, 1), 0)))
8656	    {
8657	      code = BIT_IOR_EXPR;
8658	      goto bit_ior;
8659	    }
8660
8661	  /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8662	     (plus (plus (mult) (mult)) (foo)) so that we can
8663	     take advantage of the factoring cases below.  */
8664	  if (((TREE_CODE (arg0) == PLUS_EXPR
8665		|| TREE_CODE (arg0) == MINUS_EXPR)
8666	       && TREE_CODE (arg1) == MULT_EXPR)
8667	      || ((TREE_CODE (arg1) == PLUS_EXPR
8668		   || TREE_CODE (arg1) == MINUS_EXPR)
8669		  && TREE_CODE (arg0) == MULT_EXPR))
8670	    {
8671	      tree parg0, parg1, parg, marg;
8672	      enum tree_code pcode;
8673
8674	      if (TREE_CODE (arg1) == MULT_EXPR)
8675		parg = arg0, marg = arg1;
8676	      else
8677		parg = arg1, marg = arg0;
8678	      pcode = TREE_CODE (parg);
8679	      parg0 = TREE_OPERAND (parg, 0);
8680	      parg1 = TREE_OPERAND (parg, 1);
8681	      STRIP_NOPS (parg0);
8682	      STRIP_NOPS (parg1);
8683
8684	      if (TREE_CODE (parg0) == MULT_EXPR
8685		  && TREE_CODE (parg1) != MULT_EXPR)
8686		return fold_build2 (pcode, type,
8687				    fold_build2 (PLUS_EXPR, type,
8688						 fold_convert (type, parg0),
8689						 fold_convert (type, marg)),
8690				    fold_convert (type, parg1));
8691	      if (TREE_CODE (parg0) != MULT_EXPR
8692		  && TREE_CODE (parg1) == MULT_EXPR)
8693		return fold_build2 (PLUS_EXPR, type,
8694				    fold_convert (type, parg0),
8695				    fold_build2 (pcode, type,
8696						 fold_convert (type, marg),
8697						 fold_convert (type,
8698							       parg1)));
8699	    }
8700
8701	  /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8702	     of the array.  Loop optimizer sometimes produce this type of
8703	     expressions.  */
8704	  if (TREE_CODE (arg0) == ADDR_EXPR)
8705	    {
8706	      tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8707	      if (tem)
8708		return fold_convert (type, tem);
8709	    }
8710	  else if (TREE_CODE (arg1) == ADDR_EXPR)
8711	    {
8712	      tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8713	      if (tem)
8714		return fold_convert (type, tem);
8715	    }
8716	}
8717      else
8718	{
8719	  /* See if ARG1 is zero and X + ARG1 reduces to X.  */
8720	  if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8721	    return non_lvalue (fold_convert (type, arg0));
8722
8723	  /* Likewise if the operands are reversed.  */
8724	  if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8725	    return non_lvalue (fold_convert (type, arg1));
8726
8727	  /* Convert X + -C into X - C.  */
8728	  if (TREE_CODE (arg1) == REAL_CST
8729	      && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8730	    {
8731	      tem = fold_negate_const (arg1, type);
8732	      if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8733		return fold_build2 (MINUS_EXPR, type,
8734				    fold_convert (type, arg0),
8735				    fold_convert (type, tem));
8736	    }
8737
8738          if (flag_unsafe_math_optimizations
8739	      && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8740	      && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8741	      && (tem = distribute_real_division (code, type, arg0, arg1)))
8742	    return tem;
8743
8744	  /* Convert x+x into x*2.0.  */
8745	  if (operand_equal_p (arg0, arg1, 0)
8746	      && SCALAR_FLOAT_TYPE_P (type))
8747	    return fold_build2 (MULT_EXPR, type, arg0,
8748				build_real (type, dconst2));
8749
8750          /* Convert a + (b*c + d*e) into (a + b*c) + d*e.  */
8751          if (flag_unsafe_math_optimizations
8752              && TREE_CODE (arg1) == PLUS_EXPR
8753              && TREE_CODE (arg0) != MULT_EXPR)
8754            {
8755              tree tree10 = TREE_OPERAND (arg1, 0);
8756              tree tree11 = TREE_OPERAND (arg1, 1);
8757              if (TREE_CODE (tree11) == MULT_EXPR
8758		  && TREE_CODE (tree10) == MULT_EXPR)
8759                {
8760                  tree tree0;
8761                  tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8762                  return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8763                }
8764            }
8765          /* Convert (b*c + d*e) + a into b*c + (d*e +a).  */
8766          if (flag_unsafe_math_optimizations
8767              && TREE_CODE (arg0) == PLUS_EXPR
8768              && TREE_CODE (arg1) != MULT_EXPR)
8769            {
8770              tree tree00 = TREE_OPERAND (arg0, 0);
8771              tree tree01 = TREE_OPERAND (arg0, 1);
8772              if (TREE_CODE (tree01) == MULT_EXPR
8773		  && TREE_CODE (tree00) == MULT_EXPR)
8774                {
8775                  tree tree0;
8776                  tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8777                  return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8778                }
8779            }
8780	}
8781
8782     bit_rotate:
8783      /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8784	 is a rotate of A by C1 bits.  */
8785      /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8786	 is a rotate of A by B bits.  */
8787      {
8788	enum tree_code code0, code1;
8789	code0 = TREE_CODE (arg0);
8790	code1 = TREE_CODE (arg1);
8791	if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8792	     || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8793	    && operand_equal_p (TREE_OPERAND (arg0, 0),
8794			        TREE_OPERAND (arg1, 0), 0)
8795	    && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8796	  {
8797	    tree tree01, tree11;
8798	    enum tree_code code01, code11;
8799
8800	    tree01 = TREE_OPERAND (arg0, 1);
8801	    tree11 = TREE_OPERAND (arg1, 1);
8802	    STRIP_NOPS (tree01);
8803	    STRIP_NOPS (tree11);
8804	    code01 = TREE_CODE (tree01);
8805	    code11 = TREE_CODE (tree11);
8806	    if (code01 == INTEGER_CST
8807		&& code11 == INTEGER_CST
8808		&& TREE_INT_CST_HIGH (tree01) == 0
8809		&& TREE_INT_CST_HIGH (tree11) == 0
8810		&& ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
8811		    == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
8812	      return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
8813			     code0 == LSHIFT_EXPR ? tree01 : tree11);
8814	    else if (code11 == MINUS_EXPR)
8815	      {
8816		tree tree110, tree111;
8817		tree110 = TREE_OPERAND (tree11, 0);
8818		tree111 = TREE_OPERAND (tree11, 1);
8819		STRIP_NOPS (tree110);
8820		STRIP_NOPS (tree111);
8821		if (TREE_CODE (tree110) == INTEGER_CST
8822		    && 0 == compare_tree_int (tree110,
8823					      TYPE_PRECISION
8824					      (TREE_TYPE (TREE_OPERAND
8825							  (arg0, 0))))
8826		    && operand_equal_p (tree01, tree111, 0))
8827		  return build2 ((code0 == LSHIFT_EXPR
8828				  ? LROTATE_EXPR
8829				  : RROTATE_EXPR),
8830				 type, TREE_OPERAND (arg0, 0), tree01);
8831	      }
8832	    else if (code01 == MINUS_EXPR)
8833	      {
8834		tree tree010, tree011;
8835		tree010 = TREE_OPERAND (tree01, 0);
8836		tree011 = TREE_OPERAND (tree01, 1);
8837		STRIP_NOPS (tree010);
8838		STRIP_NOPS (tree011);
8839		if (TREE_CODE (tree010) == INTEGER_CST
8840		    && 0 == compare_tree_int (tree010,
8841					      TYPE_PRECISION
8842					      (TREE_TYPE (TREE_OPERAND
8843							  (arg0, 0))))
8844		    && operand_equal_p (tree11, tree011, 0))
8845		  return build2 ((code0 != LSHIFT_EXPR
8846				  ? LROTATE_EXPR
8847				  : RROTATE_EXPR),
8848				 type, TREE_OPERAND (arg0, 0), tree11);
8849	      }
8850	  }
8851      }
8852
8853    associate:
8854      /* In most languages, can't associate operations on floats through
8855	 parentheses.  Rather than remember where the parentheses were, we
8856	 don't associate floats at all, unless the user has specified
8857	 -funsafe-math-optimizations.  */
8858
8859      if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8860	{
8861	  tree var0, con0, lit0, minus_lit0;
8862	  tree var1, con1, lit1, minus_lit1;
8863	  bool ok = true;
8864
8865	  /* Split both trees into variables, constants, and literals.  Then
8866	     associate each group together, the constants with literals,
8867	     then the result with variables.  This increases the chances of
8868	     literals being recombined later and of generating relocatable
8869	     expressions for the sum of a constant and literal.  */
8870	  var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
8871	  var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
8872			     code == MINUS_EXPR);
8873
8874	  /* With undefined overflow we can only associate constants
8875	     with one variable.  */
8876	  if ((POINTER_TYPE_P (type)
8877	       || (INTEGRAL_TYPE_P (type)
8878		   && !(TYPE_UNSIGNED (type) || flag_wrapv)))
8879	      && var0 && var1)
8880	    {
8881	      tree tmp0 = var0;
8882	      tree tmp1 = var1;
8883
8884	      if (TREE_CODE (tmp0) == NEGATE_EXPR)
8885	        tmp0 = TREE_OPERAND (tmp0, 0);
8886	      if (TREE_CODE (tmp1) == NEGATE_EXPR)
8887	        tmp1 = TREE_OPERAND (tmp1, 0);
8888	      /* The only case we can still associate with two variables
8889		 is if they are the same, modulo negation.  */
8890	      if (!operand_equal_p (tmp0, tmp1, 0))
8891	        ok = false;
8892	    }
8893
8894	  /* Only do something if we found more than two objects.  Otherwise,
8895	     nothing has changed and we risk infinite recursion.  */
8896	  if (ok
8897	      && (2 < ((var0 != 0) + (var1 != 0)
8898		       + (con0 != 0) + (con1 != 0)
8899		       + (lit0 != 0) + (lit1 != 0)
8900		       + (minus_lit0 != 0) + (minus_lit1 != 0))))
8901	    {
8902	      /* Recombine MINUS_EXPR operands by using PLUS_EXPR.  */
8903	      if (code == MINUS_EXPR)
8904		code = PLUS_EXPR;
8905
8906	      var0 = associate_trees (var0, var1, code, type);
8907	      con0 = associate_trees (con0, con1, code, type);
8908	      lit0 = associate_trees (lit0, lit1, code, type);
8909	      minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
8910
8911	      /* Preserve the MINUS_EXPR if the negative part of the literal is
8912		 greater than the positive part.  Otherwise, the multiplicative
8913		 folding code (i.e extract_muldiv) may be fooled in case
8914		 unsigned constants are subtracted, like in the following
8915		 example: ((X*2 + 4) - 8U)/2.  */
8916	      if (minus_lit0 && lit0)
8917		{
8918		  if (TREE_CODE (lit0) == INTEGER_CST
8919		      && TREE_CODE (minus_lit0) == INTEGER_CST
8920		      && tree_int_cst_lt (lit0, minus_lit0))
8921		    {
8922		      minus_lit0 = associate_trees (minus_lit0, lit0,
8923						    MINUS_EXPR, type);
8924		      lit0 = 0;
8925		    }
8926		  else
8927		    {
8928		      lit0 = associate_trees (lit0, minus_lit0,
8929					      MINUS_EXPR, type);
8930		      minus_lit0 = 0;
8931		    }
8932		}
8933	      if (minus_lit0)
8934		{
8935		  if (con0 == 0)
8936		    return fold_convert (type,
8937					 associate_trees (var0, minus_lit0,
8938							  MINUS_EXPR, type));
8939		  else
8940		    {
8941		      con0 = associate_trees (con0, minus_lit0,
8942					      MINUS_EXPR, type);
8943		      return fold_convert (type,
8944					   associate_trees (var0, con0,
8945							    PLUS_EXPR, type));
8946		    }
8947		}
8948
8949	      con0 = associate_trees (con0, lit0, code, type);
8950	      return fold_convert (type, associate_trees (var0, con0,
8951							  code, type));
8952	    }
8953	}
8954
8955      return NULL_TREE;
8956
8957    case MINUS_EXPR:
8958      /* A - (-B) -> A + B */
8959      if (TREE_CODE (arg1) == NEGATE_EXPR)
8960	return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
8961      /* (-A) - B -> (-B) - A  where B is easily negated and we can swap.  */
8962      if (TREE_CODE (arg0) == NEGATE_EXPR
8963	  && (FLOAT_TYPE_P (type)
8964	      || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
8965	  && negate_expr_p (arg1)
8966	  && reorder_operands_p (arg0, arg1))
8967	return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
8968			    TREE_OPERAND (arg0, 0));
8969      /* Convert -A - 1 to ~A.  */
8970      if (INTEGRAL_TYPE_P (type)
8971	  && TREE_CODE (arg0) == NEGATE_EXPR
8972	  && integer_onep (arg1))
8973	return fold_build1 (BIT_NOT_EXPR, type,
8974			    fold_convert (type, TREE_OPERAND (arg0, 0)));
8975
8976      /* Convert -1 - A to ~A.  */
8977      if (INTEGRAL_TYPE_P (type)
8978	  && integer_all_onesp (arg0))
8979	return fold_build1 (BIT_NOT_EXPR, type, arg1);
8980
8981      if (! FLOAT_TYPE_P (type))
8982	{
8983	  if (integer_zerop (arg0))
8984	    return negate_expr (fold_convert (type, arg1));
8985	  if (integer_zerop (arg1))
8986	    return non_lvalue (fold_convert (type, arg0));
8987
8988	  /* Fold A - (A & B) into ~B & A.  */
8989	  if (!TREE_SIDE_EFFECTS (arg0)
8990	      && TREE_CODE (arg1) == BIT_AND_EXPR)
8991	    {
8992	      if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
8993		return fold_build2 (BIT_AND_EXPR, type,
8994				    fold_build1 (BIT_NOT_EXPR, type,
8995						 TREE_OPERAND (arg1, 0)),
8996				    arg0);
8997	      if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8998		return fold_build2 (BIT_AND_EXPR, type,
8999				    fold_build1 (BIT_NOT_EXPR, type,
9000						 TREE_OPERAND (arg1, 1)),
9001				    arg0);
9002	    }
9003
9004	  /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9005	     any power of 2 minus 1.  */
9006	  if (TREE_CODE (arg0) == BIT_AND_EXPR
9007	      && TREE_CODE (arg1) == BIT_AND_EXPR
9008	      && operand_equal_p (TREE_OPERAND (arg0, 0),
9009				  TREE_OPERAND (arg1, 0), 0))
9010	    {
9011	      tree mask0 = TREE_OPERAND (arg0, 1);
9012	      tree mask1 = TREE_OPERAND (arg1, 1);
9013	      tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9014
9015	      if (operand_equal_p (tem, mask1, 0))
9016		{
9017		  tem = fold_build2 (BIT_XOR_EXPR, type,
9018				     TREE_OPERAND (arg0, 0), mask1);
9019		  return fold_build2 (MINUS_EXPR, type, tem, mask1);
9020		}
9021	    }
9022	}
9023
9024      /* See if ARG1 is zero and X - ARG1 reduces to X.  */
9025      else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9026	return non_lvalue (fold_convert (type, arg0));
9027
9028      /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0).  So check whether
9029	 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9030	 (-ARG1 + ARG0) reduces to -ARG1.  */
9031      else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9032	return negate_expr (fold_convert (type, arg1));
9033
9034      /* Fold &x - &x.  This can happen from &x.foo - &x.
9035	 This is unsafe for certain floats even in non-IEEE formats.
9036	 In IEEE, it is unsafe because it does wrong for NaNs.
9037	 Also note that operand_equal_p is always false if an operand
9038	 is volatile.  */
9039
9040      if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9041	  && operand_equal_p (arg0, arg1, 0))
9042	return fold_convert (type, integer_zero_node);
9043
9044      /* A - B -> A + (-B) if B is easily negatable.  */
9045      if (negate_expr_p (arg1)
9046	  && ((FLOAT_TYPE_P (type)
9047               /* Avoid this transformation if B is a positive REAL_CST.  */
9048	       && (TREE_CODE (arg1) != REAL_CST
9049		   ||  REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9050	      || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
9051	return fold_build2 (PLUS_EXPR, type,
9052			    fold_convert (type, arg0),
9053			    fold_convert (type, negate_expr (arg1)));
9054
9055      /* Try folding difference of addresses.  */
9056      {
9057	HOST_WIDE_INT diff;
9058
9059	if ((TREE_CODE (arg0) == ADDR_EXPR
9060	     || TREE_CODE (arg1) == ADDR_EXPR)
9061	    && ptr_difference_const (arg0, arg1, &diff))
9062	  return build_int_cst_type (type, diff);
9063      }
9064
9065      /* Fold &a[i] - &a[j] to i-j.  */
9066      if (TREE_CODE (arg0) == ADDR_EXPR
9067	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9068	  && TREE_CODE (arg1) == ADDR_EXPR
9069	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9070        {
9071	  tree aref0 = TREE_OPERAND (arg0, 0);
9072	  tree aref1 = TREE_OPERAND (arg1, 0);
9073	  if (operand_equal_p (TREE_OPERAND (aref0, 0),
9074			       TREE_OPERAND (aref1, 0), 0))
9075	    {
9076	      tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9077	      tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9078	      tree esz = array_ref_element_size (aref0);
9079	      tree diff = build2 (MINUS_EXPR, type, op0, op1);
9080	      return fold_build2 (MULT_EXPR, type, diff,
9081			          fold_convert (type, esz));
9082
9083	    }
9084	}
9085
9086      /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9087	 of the array.  Loop optimizer sometimes produce this type of
9088	 expressions.  */
9089      if (TREE_CODE (arg0) == ADDR_EXPR)
9090	{
9091	  tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9092	  if (tem)
9093	    return fold_convert (type, tem);
9094	}
9095
9096      if (flag_unsafe_math_optimizations
9097	  && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9098	  && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9099	  && (tem = distribute_real_division (code, type, arg0, arg1)))
9100	return tem;
9101
9102      /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9103	 same or one.  */
9104      if ((TREE_CODE (arg0) == MULT_EXPR
9105	   || TREE_CODE (arg1) == MULT_EXPR)
9106	  && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9107        {
9108	  tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9109	  if (tem)
9110	    return tem;
9111	}
9112
9113      goto associate;
9114
9115    case MULT_EXPR:
9116      /* (-A) * (-B) -> A * B  */
9117      if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9118	return fold_build2 (MULT_EXPR, type,
9119			    fold_convert (type, TREE_OPERAND (arg0, 0)),
9120			    fold_convert (type, negate_expr (arg1)));
9121      if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9122	return fold_build2 (MULT_EXPR, type,
9123			    fold_convert (type, negate_expr (arg0)),
9124			    fold_convert (type, TREE_OPERAND (arg1, 0)));
9125
9126      if (! FLOAT_TYPE_P (type))
9127	{
9128	  if (integer_zerop (arg1))
9129	    return omit_one_operand (type, arg1, arg0);
9130	  if (integer_onep (arg1))
9131	    return non_lvalue (fold_convert (type, arg0));
9132	  /* Transform x * -1 into -x.  */
9133	  if (integer_all_onesp (arg1))
9134	    return fold_convert (type, negate_expr (arg0));
9135
9136	  /* (a * (1 << b)) is (a << b)  */
9137	  if (TREE_CODE (arg1) == LSHIFT_EXPR
9138	      && integer_onep (TREE_OPERAND (arg1, 0)))
9139	    return fold_build2 (LSHIFT_EXPR, type, arg0,
9140				TREE_OPERAND (arg1, 1));
9141	  if (TREE_CODE (arg0) == LSHIFT_EXPR
9142	      && integer_onep (TREE_OPERAND (arg0, 0)))
9143	    return fold_build2 (LSHIFT_EXPR, type, arg1,
9144				TREE_OPERAND (arg0, 1));
9145
9146	  strict_overflow_p = false;
9147	  if (TREE_CODE (arg1) == INTEGER_CST
9148	      && 0 != (tem = extract_muldiv (op0,
9149					     fold_convert (type, arg1),
9150					     code, NULL_TREE,
9151					     &strict_overflow_p)))
9152	    {
9153	      if (strict_overflow_p)
9154		fold_overflow_warning (("assuming signed overflow does not "
9155					"occur when simplifying "
9156					"multiplication"),
9157				       WARN_STRICT_OVERFLOW_MISC);
9158	      return fold_convert (type, tem);
9159	    }
9160
9161	  /* Optimize z * conj(z) for integer complex numbers.  */
9162	  if (TREE_CODE (arg0) == CONJ_EXPR
9163	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9164	    return fold_mult_zconjz (type, arg1);
9165	  if (TREE_CODE (arg1) == CONJ_EXPR
9166	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9167	    return fold_mult_zconjz (type, arg0);
9168	}
9169      else
9170	{
9171	  /* Maybe fold x * 0 to 0.  The expressions aren't the same
9172	     when x is NaN, since x * 0 is also NaN.  Nor are they the
9173	     same in modes with signed zeros, since multiplying a
9174	     negative value by 0 gives -0, not +0.  */
9175	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9176	      && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9177	      && real_zerop (arg1))
9178	    return omit_one_operand (type, arg1, arg0);
9179	  /* In IEEE floating point, x*1 is not equivalent to x for snans.  */
9180	  if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9181	      && real_onep (arg1))
9182	    return non_lvalue (fold_convert (type, arg0));
9183
9184	  /* Transform x * -1.0 into -x.  */
9185	  if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9186	      && real_minus_onep (arg1))
9187	    return fold_convert (type, negate_expr (arg0));
9188
9189	  /* Convert (C1/X)*C2 into (C1*C2)/X.  */
9190	  if (flag_unsafe_math_optimizations
9191	      && TREE_CODE (arg0) == RDIV_EXPR
9192	      && TREE_CODE (arg1) == REAL_CST
9193	      && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9194	    {
9195	      tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9196				      arg1, 0);
9197	      if (tem)
9198		return fold_build2 (RDIV_EXPR, type, tem,
9199				    TREE_OPERAND (arg0, 1));
9200	    }
9201
9202          /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y.  */
9203	  if (operand_equal_p (arg0, arg1, 0))
9204	    {
9205	      tree tem = fold_strip_sign_ops (arg0);
9206	      if (tem != NULL_TREE)
9207		{
9208		  tem = fold_convert (type, tem);
9209		  return fold_build2 (MULT_EXPR, type, tem, tem);
9210		}
9211	    }
9212
9213	  /* Optimize z * conj(z) for floating point complex numbers.
9214	     Guarded by flag_unsafe_math_optimizations as non-finite
9215	     imaginary components don't produce scalar results.  */
9216	  if (flag_unsafe_math_optimizations
9217	      && TREE_CODE (arg0) == CONJ_EXPR
9218	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9219	    return fold_mult_zconjz (type, arg1);
9220	  if (flag_unsafe_math_optimizations
9221	      && TREE_CODE (arg1) == CONJ_EXPR
9222	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9223	    return fold_mult_zconjz (type, arg0);
9224
9225	  if (flag_unsafe_math_optimizations)
9226	    {
9227	      enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9228	      enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9229
9230	      /* Optimizations of root(...)*root(...).  */
9231	      if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9232		{
9233		  tree rootfn, arg, arglist;
9234		  tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9235		  tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9236
9237		  /* Optimize sqrt(x)*sqrt(x) as x.  */
9238		  if (BUILTIN_SQRT_P (fcode0)
9239		      && operand_equal_p (arg00, arg10, 0)
9240		      && ! HONOR_SNANS (TYPE_MODE (type)))
9241		    return arg00;
9242
9243	          /* Optimize root(x)*root(y) as root(x*y).  */
9244		  rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9245		  arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9246		  arglist = build_tree_list (NULL_TREE, arg);
9247		  return build_function_call_expr (rootfn, arglist);
9248		}
9249
9250	      /* Optimize expN(x)*expN(y) as expN(x+y).  */
9251	      if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9252		{
9253		  tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9254		  tree arg = fold_build2 (PLUS_EXPR, type,
9255					  TREE_VALUE (TREE_OPERAND (arg0, 1)),
9256					  TREE_VALUE (TREE_OPERAND (arg1, 1)));
9257		  tree arglist = build_tree_list (NULL_TREE, arg);
9258		  return build_function_call_expr (expfn, arglist);
9259		}
9260
9261	      /* Optimizations of pow(...)*pow(...).  */
9262	      if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9263		  || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9264		  || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9265		{
9266		  tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9267		  tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9268								     1)));
9269		  tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9270		  tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9271								     1)));
9272
9273		  /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y).  */
9274		  if (operand_equal_p (arg01, arg11, 0))
9275		    {
9276		      tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9277		      tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9278		      tree arglist = tree_cons (NULL_TREE, arg,
9279						build_tree_list (NULL_TREE,
9280								 arg01));
9281		      return build_function_call_expr (powfn, arglist);
9282		    }
9283
9284		  /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z).  */
9285		  if (operand_equal_p (arg00, arg10, 0))
9286		    {
9287		      tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9288		      tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9289		      tree arglist = tree_cons (NULL_TREE, arg00,
9290						build_tree_list (NULL_TREE,
9291								 arg));
9292		      return build_function_call_expr (powfn, arglist);
9293		    }
9294		}
9295
9296	      /* Optimize tan(x)*cos(x) as sin(x).  */
9297	      if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9298		   || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9299		   || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9300		   || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9301		   || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9302		   || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9303		  && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9304				      TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9305		{
9306		  tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9307
9308		  if (sinfn != NULL_TREE)
9309		    return build_function_call_expr (sinfn,
9310						     TREE_OPERAND (arg0, 1));
9311		}
9312
9313	      /* Optimize x*pow(x,c) as pow(x,c+1).  */
9314	      if (fcode1 == BUILT_IN_POW
9315		  || fcode1 == BUILT_IN_POWF
9316		  || fcode1 == BUILT_IN_POWL)
9317		{
9318		  tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9319		  tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9320								     1)));
9321		  if (TREE_CODE (arg11) == REAL_CST
9322		      && ! TREE_CONSTANT_OVERFLOW (arg11)
9323		      && operand_equal_p (arg0, arg10, 0))
9324		    {
9325		      tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9326		      REAL_VALUE_TYPE c;
9327		      tree arg, arglist;
9328
9329		      c = TREE_REAL_CST (arg11);
9330		      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9331		      arg = build_real (type, c);
9332		      arglist = build_tree_list (NULL_TREE, arg);
9333		      arglist = tree_cons (NULL_TREE, arg0, arglist);
9334		      return build_function_call_expr (powfn, arglist);
9335		    }
9336		}
9337
9338	      /* Optimize pow(x,c)*x as pow(x,c+1).  */
9339	      if (fcode0 == BUILT_IN_POW
9340		  || fcode0 == BUILT_IN_POWF
9341		  || fcode0 == BUILT_IN_POWL)
9342		{
9343		  tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9344		  tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9345								     1)));
9346		  if (TREE_CODE (arg01) == REAL_CST
9347		      && ! TREE_CONSTANT_OVERFLOW (arg01)
9348		      && operand_equal_p (arg1, arg00, 0))
9349		    {
9350		      tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9351		      REAL_VALUE_TYPE c;
9352		      tree arg, arglist;
9353
9354		      c = TREE_REAL_CST (arg01);
9355		      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9356		      arg = build_real (type, c);
9357		      arglist = build_tree_list (NULL_TREE, arg);
9358		      arglist = tree_cons (NULL_TREE, arg1, arglist);
9359		      return build_function_call_expr (powfn, arglist);
9360		    }
9361		}
9362
9363	      /* Optimize x*x as pow(x,2.0), which is expanded as x*x.  */
9364	      if (! optimize_size
9365		  && operand_equal_p (arg0, arg1, 0))
9366		{
9367		  tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9368
9369		  if (powfn)
9370		    {
9371		      tree arg = build_real (type, dconst2);
9372		      tree arglist = build_tree_list (NULL_TREE, arg);
9373		      arglist = tree_cons (NULL_TREE, arg0, arglist);
9374		      return build_function_call_expr (powfn, arglist);
9375		    }
9376		}
9377	    }
9378	}
9379      goto associate;
9380
9381    case BIT_IOR_EXPR:
9382    bit_ior:
9383      if (integer_all_onesp (arg1))
9384	return omit_one_operand (type, arg1, arg0);
9385      if (integer_zerop (arg1))
9386	return non_lvalue (fold_convert (type, arg0));
9387      if (operand_equal_p (arg0, arg1, 0))
9388	return non_lvalue (fold_convert (type, arg0));
9389
9390      /* ~X | X is -1.  */
9391      if (TREE_CODE (arg0) == BIT_NOT_EXPR
9392	  && INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9393	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9394	{
9395	  t1 = build_int_cst (type, -1);
9396	  t1 = force_fit_type (t1, 0, false, false);
9397	  return omit_one_operand (type, t1, arg1);
9398	}
9399
9400      /* X | ~X is -1.  */
9401      if (TREE_CODE (arg1) == BIT_NOT_EXPR
9402	  && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9403	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9404	{
9405	  t1 = build_int_cst (type, -1);
9406	  t1 = force_fit_type (t1, 0, false, false);
9407	  return omit_one_operand (type, t1, arg0);
9408	}
9409
9410      /* Canonicalize (X & C1) | C2.  */
9411      if (TREE_CODE (arg0) == BIT_AND_EXPR
9412	  && TREE_CODE (arg1) == INTEGER_CST
9413	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9414	{
9415	  unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9416	  int width = TYPE_PRECISION (type);
9417	  hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9418	  lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9419	  hi2 = TREE_INT_CST_HIGH (arg1);
9420	  lo2 = TREE_INT_CST_LOW (arg1);
9421
9422	  /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2).  */
9423	  if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9424	    return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9425
9426	  if (width > HOST_BITS_PER_WIDE_INT)
9427	    {
9428	      mhi = (unsigned HOST_WIDE_INT) -1
9429		    >> (2 * HOST_BITS_PER_WIDE_INT - width);
9430	      mlo = -1;
9431	    }
9432	  else
9433	    {
9434	      mhi = 0;
9435	      mlo = (unsigned HOST_WIDE_INT) -1
9436		    >> (HOST_BITS_PER_WIDE_INT - width);
9437	    }
9438
9439	  /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2.  */
9440	  if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9441	    return fold_build2 (BIT_IOR_EXPR, type,
9442				TREE_OPERAND (arg0, 0), arg1);
9443
9444	  /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2.  */
9445	  hi1 &= mhi;
9446	  lo1 &= mlo;
9447	  if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9448	    return fold_build2 (BIT_IOR_EXPR, type,
9449				fold_build2 (BIT_AND_EXPR, type,
9450					     TREE_OPERAND (arg0, 0),
9451					     build_int_cst_wide (type,
9452								 lo1 & ~lo2,
9453								 hi1 & ~hi2)),
9454				arg1);
9455	}
9456
9457      /* (X & Y) | Y is (X, Y).  */
9458      if (TREE_CODE (arg0) == BIT_AND_EXPR
9459	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9460	return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9461      /* (X & Y) | X is (Y, X).  */
9462      if (TREE_CODE (arg0) == BIT_AND_EXPR
9463	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9464	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9465	return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9466      /* X | (X & Y) is (Y, X).  */
9467      if (TREE_CODE (arg1) == BIT_AND_EXPR
9468	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9469	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9470	return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9471      /* X | (Y & X) is (Y, X).  */
9472      if (TREE_CODE (arg1) == BIT_AND_EXPR
9473	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9474	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9475	return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9476
9477      t1 = distribute_bit_expr (code, type, arg0, arg1);
9478      if (t1 != NULL_TREE)
9479	return t1;
9480
9481      /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9482
9483	 This results in more efficient code for machines without a NAND
9484	 instruction.  Combine will canonicalize to the first form
9485	 which will allow use of NAND instructions provided by the
9486	 backend if they exist.  */
9487      if (TREE_CODE (arg0) == BIT_NOT_EXPR
9488	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
9489	{
9490	  return fold_build1 (BIT_NOT_EXPR, type,
9491			      build2 (BIT_AND_EXPR, type,
9492				      TREE_OPERAND (arg0, 0),
9493				      TREE_OPERAND (arg1, 0)));
9494	}
9495
9496      /* See if this can be simplified into a rotate first.  If that
9497	 is unsuccessful continue in the association code.  */
9498      goto bit_rotate;
9499
9500    case BIT_XOR_EXPR:
9501      if (integer_zerop (arg1))
9502	return non_lvalue (fold_convert (type, arg0));
9503      if (integer_all_onesp (arg1))
9504	return fold_build1 (BIT_NOT_EXPR, type, arg0);
9505      if (operand_equal_p (arg0, arg1, 0))
9506	return omit_one_operand (type, integer_zero_node, arg0);
9507
9508      /* ~X ^ X is -1.  */
9509      if (TREE_CODE (arg0) == BIT_NOT_EXPR
9510	  && INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9511	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9512	{
9513	  t1 = build_int_cst (type, -1);
9514	  t1 = force_fit_type (t1, 0, false, false);
9515	  return omit_one_operand (type, t1, arg1);
9516	}
9517
9518      /* X ^ ~X is -1.  */
9519      if (TREE_CODE (arg1) == BIT_NOT_EXPR
9520	  && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9521	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9522	{
9523	  t1 = build_int_cst (type, -1);
9524	  t1 = force_fit_type (t1, 0, false, false);
9525	  return omit_one_operand (type, t1, arg0);
9526	}
9527
9528      /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9529         with a constant, and the two constants have no bits in common,
9530	 we should treat this as a BIT_IOR_EXPR since this may produce more
9531	 simplifications.  */
9532      if (TREE_CODE (arg0) == BIT_AND_EXPR
9533	  && TREE_CODE (arg1) == BIT_AND_EXPR
9534	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9535	  && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9536	  && integer_zerop (const_binop (BIT_AND_EXPR,
9537					 TREE_OPERAND (arg0, 1),
9538					 TREE_OPERAND (arg1, 1), 0)))
9539	{
9540	  code = BIT_IOR_EXPR;
9541	  goto bit_ior;
9542	}
9543
9544      /* (X | Y) ^ X -> Y & ~ X*/
9545      if (TREE_CODE (arg0) == BIT_IOR_EXPR
9546          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9547        {
9548	  tree t2 = TREE_OPERAND (arg0, 1);
9549	  t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9550			    arg1);
9551	  t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9552			    fold_convert (type, t1));
9553	  return t1;
9554	}
9555
9556      /* (Y | X) ^ X -> Y & ~ X*/
9557      if (TREE_CODE (arg0) == BIT_IOR_EXPR
9558          && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9559        {
9560	  tree t2 = TREE_OPERAND (arg0, 0);
9561	  t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9562			    arg1);
9563	  t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9564			    fold_convert (type, t1));
9565	  return t1;
9566	}
9567
9568      /* X ^ (X | Y) -> Y & ~ X*/
9569      if (TREE_CODE (arg1) == BIT_IOR_EXPR
9570          && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9571        {
9572	  tree t2 = TREE_OPERAND (arg1, 1);
9573	  t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9574			    arg0);
9575	  t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9576			    fold_convert (type, t1));
9577	  return t1;
9578	}
9579
9580      /* X ^ (Y | X) -> Y & ~ X*/
9581      if (TREE_CODE (arg1) == BIT_IOR_EXPR
9582          && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9583        {
9584	  tree t2 = TREE_OPERAND (arg1, 0);
9585	  t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9586			    arg0);
9587	  t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9588			    fold_convert (type, t1));
9589	  return t1;
9590	}
9591
9592      /* Convert ~X ^ ~Y to X ^ Y.  */
9593      if (TREE_CODE (arg0) == BIT_NOT_EXPR
9594	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
9595	return fold_build2 (code, type,
9596			    fold_convert (type, TREE_OPERAND (arg0, 0)),
9597			    fold_convert (type, TREE_OPERAND (arg1, 0)));
9598
9599      /* Fold (X & 1) ^ 1 as (X & 1) == 0.  */
9600      if (TREE_CODE (arg0) == BIT_AND_EXPR
9601	  && integer_onep (TREE_OPERAND (arg0, 1))
9602	  && integer_onep (arg1))
9603	return fold_build2 (EQ_EXPR, type, arg0,
9604			    build_int_cst (TREE_TYPE (arg0), 0));
9605
9606      /* Fold (X & Y) ^ Y as ~X & Y.  */
9607      if (TREE_CODE (arg0) == BIT_AND_EXPR
9608	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9609	{
9610	  tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9611	  return fold_build2 (BIT_AND_EXPR, type,
9612			      fold_build1 (BIT_NOT_EXPR, type, tem),
9613			      fold_convert (type, arg1));
9614	}
9615      /* Fold (X & Y) ^ X as ~Y & X.  */
9616      if (TREE_CODE (arg0) == BIT_AND_EXPR
9617	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9618	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9619	{
9620	  tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9621	  return fold_build2 (BIT_AND_EXPR, type,
9622			      fold_build1 (BIT_NOT_EXPR, type, tem),
9623			      fold_convert (type, arg1));
9624	}
9625      /* Fold X ^ (X & Y) as X & ~Y.  */
9626      if (TREE_CODE (arg1) == BIT_AND_EXPR
9627	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9628	{
9629	  tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9630	  return fold_build2 (BIT_AND_EXPR, type,
9631			      fold_convert (type, arg0),
9632			      fold_build1 (BIT_NOT_EXPR, type, tem));
9633	}
9634      /* Fold X ^ (Y & X) as ~Y & X.  */
9635      if (TREE_CODE (arg1) == BIT_AND_EXPR
9636	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9637	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9638	{
9639	  tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9640	  return fold_build2 (BIT_AND_EXPR, type,
9641			      fold_build1 (BIT_NOT_EXPR, type, tem),
9642			      fold_convert (type, arg0));
9643	}
9644
9645      /* See if this can be simplified into a rotate first.  If that
9646	 is unsuccessful continue in the association code.  */
9647      goto bit_rotate;
9648
9649    case BIT_AND_EXPR:
9650      if (integer_all_onesp (arg1))
9651	return non_lvalue (fold_convert (type, arg0));
9652      if (integer_zerop (arg1))
9653	return omit_one_operand (type, arg1, arg0);
9654      if (operand_equal_p (arg0, arg1, 0))
9655	return non_lvalue (fold_convert (type, arg0));
9656
9657      /* ~X & X is always zero.  */
9658      if (TREE_CODE (arg0) == BIT_NOT_EXPR
9659	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9660	return omit_one_operand (type, integer_zero_node, arg1);
9661
9662      /* X & ~X is always zero.  */
9663      if (TREE_CODE (arg1) == BIT_NOT_EXPR
9664	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9665	return omit_one_operand (type, integer_zero_node, arg0);
9666
9667      /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2).  */
9668      if (TREE_CODE (arg0) == BIT_IOR_EXPR
9669	  && TREE_CODE (arg1) == INTEGER_CST
9670	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9671	return fold_build2 (BIT_IOR_EXPR, type,
9672			    fold_build2 (BIT_AND_EXPR, type,
9673					 TREE_OPERAND (arg0, 0), arg1),
9674			    fold_build2 (BIT_AND_EXPR, type,
9675					 TREE_OPERAND (arg0, 1), arg1));
9676
9677      /* (X | Y) & Y is (X, Y).  */
9678      if (TREE_CODE (arg0) == BIT_IOR_EXPR
9679	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9680	return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9681      /* (X | Y) & X is (Y, X).  */
9682      if (TREE_CODE (arg0) == BIT_IOR_EXPR
9683	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9684	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9685	return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9686      /* X & (X | Y) is (Y, X).  */
9687      if (TREE_CODE (arg1) == BIT_IOR_EXPR
9688	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9689	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9690	return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9691      /* X & (Y | X) is (Y, X).  */
9692      if (TREE_CODE (arg1) == BIT_IOR_EXPR
9693	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9694	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9695	return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9696
9697      /* Fold (X ^ 1) & 1 as (X & 1) == 0.  */
9698      if (TREE_CODE (arg0) == BIT_XOR_EXPR
9699	  && integer_onep (TREE_OPERAND (arg0, 1))
9700	  && integer_onep (arg1))
9701	{
9702	  tem = TREE_OPERAND (arg0, 0);
9703	  return fold_build2 (EQ_EXPR, type,
9704			      fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9705					   build_int_cst (TREE_TYPE (tem), 1)),
9706			      build_int_cst (TREE_TYPE (tem), 0));
9707	}
9708      /* Fold ~X & 1 as (X & 1) == 0.  */
9709      if (TREE_CODE (arg0) == BIT_NOT_EXPR
9710	  && integer_onep (arg1))
9711	{
9712	  tem = TREE_OPERAND (arg0, 0);
9713	  return fold_build2 (EQ_EXPR, type,
9714			      fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9715					   build_int_cst (TREE_TYPE (tem), 1)),
9716			      build_int_cst (TREE_TYPE (tem), 0));
9717	}
9718
9719      /* Fold (X ^ Y) & Y as ~X & Y.  */
9720      if (TREE_CODE (arg0) == BIT_XOR_EXPR
9721	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9722	{
9723	  tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9724	  return fold_build2 (BIT_AND_EXPR, type,
9725			      fold_build1 (BIT_NOT_EXPR, type, tem),
9726			      fold_convert (type, arg1));
9727	}
9728      /* Fold (X ^ Y) & X as ~Y & X.  */
9729      if (TREE_CODE (arg0) == BIT_XOR_EXPR
9730	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9731	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9732	{
9733	  tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9734	  return fold_build2 (BIT_AND_EXPR, type,
9735			      fold_build1 (BIT_NOT_EXPR, type, tem),
9736			      fold_convert (type, arg1));
9737	}
9738      /* Fold X & (X ^ Y) as X & ~Y.  */
9739      if (TREE_CODE (arg1) == BIT_XOR_EXPR
9740	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9741	{
9742	  tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9743	  return fold_build2 (BIT_AND_EXPR, type,
9744			      fold_convert (type, arg0),
9745			      fold_build1 (BIT_NOT_EXPR, type, tem));
9746	}
9747      /* Fold X & (Y ^ X) as ~Y & X.  */
9748      if (TREE_CODE (arg1) == BIT_XOR_EXPR
9749	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9750	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9751	{
9752	  tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9753	  return fold_build2 (BIT_AND_EXPR, type,
9754			      fold_build1 (BIT_NOT_EXPR, type, tem),
9755			      fold_convert (type, arg0));
9756	}
9757
9758      t1 = distribute_bit_expr (code, type, arg0, arg1);
9759      if (t1 != NULL_TREE)
9760	return t1;
9761      /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char.  */
9762      if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9763	  && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9764	{
9765	  unsigned int prec
9766	    = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9767
9768	  if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9769	      && (~TREE_INT_CST_LOW (arg1)
9770		  & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9771	    return fold_convert (type, TREE_OPERAND (arg0, 0));
9772	}
9773
9774      /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9775
9776	 This results in more efficient code for machines without a NOR
9777	 instruction.  Combine will canonicalize to the first form
9778	 which will allow use of NOR instructions provided by the
9779	 backend if they exist.  */
9780      if (TREE_CODE (arg0) == BIT_NOT_EXPR
9781	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
9782	{
9783	  return fold_build1 (BIT_NOT_EXPR, type,
9784			      build2 (BIT_IOR_EXPR, type,
9785				      TREE_OPERAND (arg0, 0),
9786				      TREE_OPERAND (arg1, 0)));
9787	}
9788
9789      goto associate;
9790
9791    case RDIV_EXPR:
9792      /* Don't touch a floating-point divide by zero unless the mode
9793	 of the constant can represent infinity.  */
9794      if (TREE_CODE (arg1) == REAL_CST
9795	  && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9796	  && real_zerop (arg1))
9797	return NULL_TREE;
9798
9799      /* Optimize A / A to 1.0 if we don't care about
9800	 NaNs or Infinities.  Skip the transformation
9801	 for non-real operands.  */
9802      if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9803	  && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9804	  && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9805	  && operand_equal_p (arg0, arg1, 0))
9806	{
9807	  tree r = build_real (TREE_TYPE (arg0), dconst1);
9808
9809	  return omit_two_operands (type, r, arg0, arg1);
9810	}
9811
9812      /* The complex version of the above A / A optimization.  */
9813      if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9814	  && operand_equal_p (arg0, arg1, 0))
9815	{
9816	  tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
9817	  if (! HONOR_NANS (TYPE_MODE (elem_type))
9818	      && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
9819	    {
9820	      tree r = build_real (elem_type, dconst1);
9821	      /* omit_two_operands will call fold_convert for us.  */
9822	      return omit_two_operands (type, r, arg0, arg1);
9823	    }
9824	}
9825
9826      /* (-A) / (-B) -> A / B  */
9827      if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9828	return fold_build2 (RDIV_EXPR, type,
9829			    TREE_OPERAND (arg0, 0),
9830			    negate_expr (arg1));
9831      if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9832	return fold_build2 (RDIV_EXPR, type,
9833			    negate_expr (arg0),
9834			    TREE_OPERAND (arg1, 0));
9835
9836      /* In IEEE floating point, x/1 is not equivalent to x for snans.  */
9837      if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9838	  && real_onep (arg1))
9839	return non_lvalue (fold_convert (type, arg0));
9840
9841      /* In IEEE floating point, x/-1 is not equivalent to -x for snans.  */
9842      if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9843	  && real_minus_onep (arg1))
9844	return non_lvalue (fold_convert (type, negate_expr (arg0)));
9845
9846      /* If ARG1 is a constant, we can convert this to a multiply by the
9847	 reciprocal.  This does not have the same rounding properties,
9848	 so only do this if -funsafe-math-optimizations.  We can actually
9849	 always safely do it if ARG1 is a power of two, but it's hard to
9850	 tell if it is or not in a portable manner.  */
9851      if (TREE_CODE (arg1) == REAL_CST)
9852	{
9853	  if (flag_unsafe_math_optimizations
9854	      && 0 != (tem = const_binop (code, build_real (type, dconst1),
9855					  arg1, 0)))
9856	    return fold_build2 (MULT_EXPR, type, arg0, tem);
9857	  /* Find the reciprocal if optimizing and the result is exact.  */
9858	  if (optimize)
9859	    {
9860	      REAL_VALUE_TYPE r;
9861	      r = TREE_REAL_CST (arg1);
9862	      if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
9863		{
9864		  tem = build_real (type, r);
9865		  return fold_build2 (MULT_EXPR, type,
9866				      fold_convert (type, arg0), tem);
9867		}
9868	    }
9869	}
9870      /* Convert A/B/C to A/(B*C).  */
9871      if (flag_unsafe_math_optimizations
9872	  && TREE_CODE (arg0) == RDIV_EXPR)
9873	return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
9874			    fold_build2 (MULT_EXPR, type,
9875					 TREE_OPERAND (arg0, 1), arg1));
9876
9877      /* Convert A/(B/C) to (A/B)*C.  */
9878      if (flag_unsafe_math_optimizations
9879	  && TREE_CODE (arg1) == RDIV_EXPR)
9880	return fold_build2 (MULT_EXPR, type,
9881			    fold_build2 (RDIV_EXPR, type, arg0,
9882					 TREE_OPERAND (arg1, 0)),
9883			    TREE_OPERAND (arg1, 1));
9884
9885      /* Convert C1/(X*C2) into (C1/C2)/X.  */
9886      if (flag_unsafe_math_optimizations
9887	  && TREE_CODE (arg1) == MULT_EXPR
9888	  && TREE_CODE (arg0) == REAL_CST
9889	  && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
9890	{
9891	  tree tem = const_binop (RDIV_EXPR, arg0,
9892				  TREE_OPERAND (arg1, 1), 0);
9893	  if (tem)
9894	    return fold_build2 (RDIV_EXPR, type, tem,
9895				TREE_OPERAND (arg1, 0));
9896	}
9897
9898      if (flag_unsafe_math_optimizations)
9899	{
9900	  enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9901	  enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9902
9903	  /* Optimize sin(x)/cos(x) as tan(x).  */
9904	  if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
9905	       || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
9906	       || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
9907	      && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9908				  TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9909	    {
9910	      tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9911
9912	      if (tanfn != NULL_TREE)
9913		return build_function_call_expr (tanfn,
9914						 TREE_OPERAND (arg0, 1));
9915	    }
9916
9917	  /* Optimize cos(x)/sin(x) as 1.0/tan(x).  */
9918	  if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
9919	       || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
9920	       || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
9921	      && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9922				  TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9923	    {
9924	      tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9925
9926	      if (tanfn != NULL_TREE)
9927		{
9928		  tree tmp = TREE_OPERAND (arg0, 1);
9929		  tmp = build_function_call_expr (tanfn, tmp);
9930		  return fold_build2 (RDIV_EXPR, type,
9931				      build_real (type, dconst1), tmp);
9932		}
9933	    }
9934
9935 	  /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
9936	     NaNs or Infinities.  */
9937 	  if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
9938 	       || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
9939 	       || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
9940	    {
9941	      tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9942	      tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9943
9944	      if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9945		  && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9946		  && operand_equal_p (arg00, arg01, 0))
9947		{
9948		  tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9949
9950		  if (cosfn != NULL_TREE)
9951		    return build_function_call_expr (cosfn,
9952						     TREE_OPERAND (arg0, 1));
9953		}
9954	    }
9955
9956 	  /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
9957	     NaNs or Infinities.  */
9958 	  if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
9959 	       || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
9960 	       || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
9961	    {
9962	      tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9963	      tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9964
9965	      if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9966		  && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9967		  && operand_equal_p (arg00, arg01, 0))
9968		{
9969		  tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9970
9971		  if (cosfn != NULL_TREE)
9972		    {
9973		      tree tmp = TREE_OPERAND (arg0, 1);
9974		      tmp = build_function_call_expr (cosfn, tmp);
9975		      return fold_build2 (RDIV_EXPR, type,
9976					  build_real (type, dconst1),
9977					  tmp);
9978		    }
9979		}
9980	    }
9981
9982	  /* Optimize pow(x,c)/x as pow(x,c-1).  */
9983	  if (fcode0 == BUILT_IN_POW
9984	      || fcode0 == BUILT_IN_POWF
9985	      || fcode0 == BUILT_IN_POWL)
9986	    {
9987	      tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9988	      tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
9989	      if (TREE_CODE (arg01) == REAL_CST
9990		  && ! TREE_CONSTANT_OVERFLOW (arg01)
9991		  && operand_equal_p (arg1, arg00, 0))
9992		{
9993		  tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9994		  REAL_VALUE_TYPE c;
9995		  tree arg, arglist;
9996
9997		  c = TREE_REAL_CST (arg01);
9998		  real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
9999		  arg = build_real (type, c);
10000		  arglist = build_tree_list (NULL_TREE, arg);
10001		  arglist = tree_cons (NULL_TREE, arg1, arglist);
10002		  return build_function_call_expr (powfn, arglist);
10003		}
10004	    }
10005
10006	  /* Optimize x/expN(y) into x*expN(-y).  */
10007	  if (BUILTIN_EXPONENT_P (fcode1))
10008	    {
10009	      tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10010	      tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
10011	      tree arglist = build_tree_list (NULL_TREE,
10012					      fold_convert (type, arg));
10013	      arg1 = build_function_call_expr (expfn, arglist);
10014	      return fold_build2 (MULT_EXPR, type, arg0, arg1);
10015	    }
10016
10017	  /* Optimize x/pow(y,z) into x*pow(y,-z).  */
10018	  if (fcode1 == BUILT_IN_POW
10019	      || fcode1 == BUILT_IN_POWF
10020	      || fcode1 == BUILT_IN_POWL)
10021	    {
10022	      tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10023	      tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10024	      tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
10025	      tree neg11 = fold_convert (type, negate_expr (arg11));
10026	      tree arglist = tree_cons(NULL_TREE, arg10,
10027				       build_tree_list (NULL_TREE, neg11));
10028	      arg1 = build_function_call_expr (powfn, arglist);
10029	      return fold_build2 (MULT_EXPR, type, arg0, arg1);
10030	    }
10031	}
10032      return NULL_TREE;
10033
10034    case TRUNC_DIV_EXPR:
10035    case FLOOR_DIV_EXPR:
10036      /* Simplify A / (B << N) where A and B are positive and B is
10037	 a power of 2, to A >> (N + log2(B)).  */
10038      strict_overflow_p = false;
10039      if (TREE_CODE (arg1) == LSHIFT_EXPR
10040	  && (TYPE_UNSIGNED (type)
10041	      || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10042	{
10043	  tree sval = TREE_OPERAND (arg1, 0);
10044	  if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10045	    {
10046	      tree sh_cnt = TREE_OPERAND (arg1, 1);
10047	      unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10048
10049	      if (strict_overflow_p)
10050		fold_overflow_warning (("assuming signed overflow does not "
10051					"occur when simplifying A / (B << N)"),
10052				       WARN_STRICT_OVERFLOW_MISC);
10053
10054	      sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10055				    sh_cnt, build_int_cst (NULL_TREE, pow2));
10056	      return fold_build2 (RSHIFT_EXPR, type,
10057				  fold_convert (type, arg0), sh_cnt);
10058	    }
10059	}
10060      /* Fall thru */
10061
10062    case ROUND_DIV_EXPR:
10063    case CEIL_DIV_EXPR:
10064    case EXACT_DIV_EXPR:
10065      if (integer_onep (arg1))
10066	return non_lvalue (fold_convert (type, arg0));
10067      if (integer_zerop (arg1))
10068	return NULL_TREE;
10069      /* X / -1 is -X.  */
10070      if (!TYPE_UNSIGNED (type)
10071	  && TREE_CODE (arg1) == INTEGER_CST
10072	  && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10073	  && TREE_INT_CST_HIGH (arg1) == -1)
10074	return fold_convert (type, negate_expr (arg0));
10075
10076      /* Convert -A / -B to A / B when the type is signed and overflow is
10077	 undefined.  */
10078      if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10079	  && TREE_CODE (arg0) == NEGATE_EXPR
10080	  && negate_expr_p (arg1))
10081	{
10082	  if (INTEGRAL_TYPE_P (type))
10083	    fold_overflow_warning (("assuming signed overflow does not occur "
10084				    "when distributing negation across "
10085				    "division"),
10086				   WARN_STRICT_OVERFLOW_MISC);
10087	  return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10088			      negate_expr (arg1));
10089	}
10090      if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10091	  && TREE_CODE (arg1) == NEGATE_EXPR
10092	  && negate_expr_p (arg0))
10093	{
10094	  if (INTEGRAL_TYPE_P (type))
10095	    fold_overflow_warning (("assuming signed overflow does not occur "
10096				    "when distributing negation across "
10097				    "division"),
10098				   WARN_STRICT_OVERFLOW_MISC);
10099	  return fold_build2 (code, type, negate_expr (arg0),
10100			      TREE_OPERAND (arg1, 0));
10101	}
10102
10103      /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10104	 operation, EXACT_DIV_EXPR.
10105
10106	 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10107	 At one time others generated faster code, it's not clear if they do
10108	 after the last round to changes to the DIV code in expmed.c.  */
10109      if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10110	  && multiple_of_p (type, arg0, arg1))
10111	return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10112
10113      strict_overflow_p = false;
10114      if (TREE_CODE (arg1) == INTEGER_CST
10115	  && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10116					 &strict_overflow_p)))
10117	{
10118	  if (strict_overflow_p)
10119	    fold_overflow_warning (("assuming signed overflow does not occur "
10120				    "when simplifying division"),
10121				   WARN_STRICT_OVERFLOW_MISC);
10122	  return fold_convert (type, tem);
10123	}
10124
10125      return NULL_TREE;
10126
10127    case CEIL_MOD_EXPR:
10128    case FLOOR_MOD_EXPR:
10129    case ROUND_MOD_EXPR:
10130    case TRUNC_MOD_EXPR:
10131      /* X % 1 is always zero, but be sure to preserve any side
10132	 effects in X.  */
10133      if (integer_onep (arg1))
10134	return omit_one_operand (type, integer_zero_node, arg0);
10135
10136      /* X % 0, return X % 0 unchanged so that we can get the
10137	 proper warnings and errors.  */
10138      if (integer_zerop (arg1))
10139	return NULL_TREE;
10140
10141      /* 0 % X is always zero, but be sure to preserve any side
10142	 effects in X.  Place this after checking for X == 0.  */
10143      if (integer_zerop (arg0))
10144	return omit_one_operand (type, integer_zero_node, arg1);
10145
10146      /* X % -1 is zero.  */
10147      if (!TYPE_UNSIGNED (type)
10148	  && TREE_CODE (arg1) == INTEGER_CST
10149	  && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10150	  && TREE_INT_CST_HIGH (arg1) == -1)
10151	return omit_one_operand (type, integer_zero_node, arg0);
10152
10153      /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10154         i.e. "X % C" into "X & (C - 1)", if X and C are positive.  */
10155      strict_overflow_p = false;
10156      if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10157	  && (TYPE_UNSIGNED (type)
10158	      || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10159	{
10160	  tree c = arg1;
10161	  /* Also optimize A % (C << N)  where C is a power of 2,
10162	     to A & ((C << N) - 1).  */
10163	  if (TREE_CODE (arg1) == LSHIFT_EXPR)
10164	    c = TREE_OPERAND (arg1, 0);
10165
10166	  if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10167	    {
10168	      tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1),
10169				       arg1, integer_one_node);
10170	      if (strict_overflow_p)
10171		fold_overflow_warning (("assuming signed overflow does not "
10172					"occur when simplifying "
10173					"X % (power of two)"),
10174				       WARN_STRICT_OVERFLOW_MISC);
10175	      return fold_build2 (BIT_AND_EXPR, type,
10176				  fold_convert (type, arg0),
10177				  fold_convert (type, mask));
10178	    }
10179	}
10180
10181      /* X % -C is the same as X % C.  */
10182      if (code == TRUNC_MOD_EXPR
10183	  && !TYPE_UNSIGNED (type)
10184	  && TREE_CODE (arg1) == INTEGER_CST
10185	  && !TREE_CONSTANT_OVERFLOW (arg1)
10186	  && TREE_INT_CST_HIGH (arg1) < 0
10187	  && !TYPE_OVERFLOW_TRAPS (type)
10188	  /* Avoid this transformation if C is INT_MIN, i.e. C == -C.  */
10189	  && !sign_bit_p (arg1, arg1))
10190	return fold_build2 (code, type, fold_convert (type, arg0),
10191			    fold_convert (type, negate_expr (arg1)));
10192
10193      /* X % -Y is the same as X % Y.  */
10194      if (code == TRUNC_MOD_EXPR
10195	  && !TYPE_UNSIGNED (type)
10196	  && TREE_CODE (arg1) == NEGATE_EXPR
10197	  && !TYPE_OVERFLOW_TRAPS (type))
10198	return fold_build2 (code, type, fold_convert (type, arg0),
10199			    fold_convert (type, TREE_OPERAND (arg1, 0)));
10200
10201      if (TREE_CODE (arg1) == INTEGER_CST
10202	  && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10203					 &strict_overflow_p)))
10204	{
10205	  if (strict_overflow_p)
10206	    fold_overflow_warning (("assuming signed overflow does not occur "
10207				    "when simplifying modulos"),
10208				   WARN_STRICT_OVERFLOW_MISC);
10209	  return fold_convert (type, tem);
10210	}
10211
10212      return NULL_TREE;
10213
10214    case LROTATE_EXPR:
10215    case RROTATE_EXPR:
10216      if (integer_all_onesp (arg0))
10217	return omit_one_operand (type, arg0, arg1);
10218      goto shift;
10219
10220    case RSHIFT_EXPR:
10221      /* Optimize -1 >> x for arithmetic right shifts.  */
10222      if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10223	return omit_one_operand (type, arg0, arg1);
10224      /* ... fall through ...  */
10225
10226    case LSHIFT_EXPR:
10227    shift:
10228      if (integer_zerop (arg1))
10229	return non_lvalue (fold_convert (type, arg0));
10230      if (integer_zerop (arg0))
10231	return omit_one_operand (type, arg0, arg1);
10232
10233      /* Since negative shift count is not well-defined,
10234	 don't try to compute it in the compiler.  */
10235      if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10236	return NULL_TREE;
10237
10238      /* Turn (a OP c1) OP c2 into a OP (c1+c2).  */
10239      if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10240	  && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10241	  && host_integerp (TREE_OPERAND (arg0, 1), false)
10242	  && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10243	{
10244	  HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10245			       + TREE_INT_CST_LOW (arg1));
10246
10247	  /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10248	     being well defined.  */
10249	  if (low >= TYPE_PRECISION (type))
10250	    {
10251	      if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10252	        low = low % TYPE_PRECISION (type);
10253	      else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10254	        return build_int_cst (type, 0);
10255	      else
10256		low = TYPE_PRECISION (type) - 1;
10257	    }
10258
10259	  return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10260			      build_int_cst (type, low));
10261	}
10262
10263      /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10264         into x & ((unsigned)-1 >> c) for unsigned types.  */
10265      if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10266           || (TYPE_UNSIGNED (type)
10267	       && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10268	  && host_integerp (arg1, false)
10269	  && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10270	  && host_integerp (TREE_OPERAND (arg0, 1), false)
10271	  && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10272	{
10273	  HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10274	  HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10275	  tree lshift;
10276	  tree arg00;
10277
10278	  if (low0 == low1)
10279	    {
10280	      arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10281
10282	      lshift = build_int_cst (type, -1);
10283	      lshift = int_const_binop (code, lshift, arg1, 0);
10284
10285	      return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10286	    }
10287	}
10288
10289      /* Rewrite an LROTATE_EXPR by a constant into an
10290	 RROTATE_EXPR by a new constant.  */
10291      if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10292	{
10293	  tree tem = build_int_cst (NULL_TREE,
10294				    GET_MODE_BITSIZE (TYPE_MODE (type)));
10295	  tem = fold_convert (TREE_TYPE (arg1), tem);
10296	  tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10297	  return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10298	}
10299
10300      /* If we have a rotate of a bit operation with the rotate count and
10301	 the second operand of the bit operation both constant,
10302	 permute the two operations.  */
10303      if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10304	  && (TREE_CODE (arg0) == BIT_AND_EXPR
10305	      || TREE_CODE (arg0) == BIT_IOR_EXPR
10306	      || TREE_CODE (arg0) == BIT_XOR_EXPR)
10307	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10308	return fold_build2 (TREE_CODE (arg0), type,
10309			    fold_build2 (code, type,
10310					 TREE_OPERAND (arg0, 0), arg1),
10311			    fold_build2 (code, type,
10312					 TREE_OPERAND (arg0, 1), arg1));
10313
10314      /* Two consecutive rotates adding up to the width of the mode can
10315	 be ignored.  */
10316      if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10317	  && TREE_CODE (arg0) == RROTATE_EXPR
10318	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10319	  && TREE_INT_CST_HIGH (arg1) == 0
10320	  && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10321	  && ((TREE_INT_CST_LOW (arg1)
10322	       + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10323	      == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10324	return TREE_OPERAND (arg0, 0);
10325
10326      return NULL_TREE;
10327
10328    case MIN_EXPR:
10329      if (operand_equal_p (arg0, arg1, 0))
10330	return omit_one_operand (type, arg0, arg1);
10331      if (INTEGRAL_TYPE_P (type)
10332	  && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10333	return omit_one_operand (type, arg1, arg0);
10334      tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10335      if (tem)
10336	return tem;
10337      goto associate;
10338
10339    case MAX_EXPR:
10340      if (operand_equal_p (arg0, arg1, 0))
10341	return omit_one_operand (type, arg0, arg1);
10342      if (INTEGRAL_TYPE_P (type)
10343	  && TYPE_MAX_VALUE (type)
10344	  && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10345	return omit_one_operand (type, arg1, arg0);
10346      tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10347      if (tem)
10348	return tem;
10349      goto associate;
10350
10351    case TRUTH_ANDIF_EXPR:
10352      /* Note that the operands of this must be ints
10353	 and their values must be 0 or 1.
10354	 ("true" is a fixed value perhaps depending on the language.)  */
10355      /* If first arg is constant zero, return it.  */
10356      if (integer_zerop (arg0))
10357	return fold_convert (type, arg0);
10358    case TRUTH_AND_EXPR:
10359      /* If either arg is constant true, drop it.  */
10360      if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10361	return non_lvalue (fold_convert (type, arg1));
10362      if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10363	  /* Preserve sequence points.  */
10364	  && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10365	return non_lvalue (fold_convert (type, arg0));
10366      /* If second arg is constant zero, result is zero, but first arg
10367	 must be evaluated.  */
10368      if (integer_zerop (arg1))
10369	return omit_one_operand (type, arg1, arg0);
10370      /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10371	 case will be handled here.  */
10372      if (integer_zerop (arg0))
10373	return omit_one_operand (type, arg0, arg1);
10374
10375      /* !X && X is always false.  */
10376      if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10377	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10378	return omit_one_operand (type, integer_zero_node, arg1);
10379      /* X && !X is always false.  */
10380      if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10381	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10382	return omit_one_operand (type, integer_zero_node, arg0);
10383
10384      /* A < X && A + 1 > Y ==> A < X && A >= Y.  Normally A + 1 > Y
10385	 means A >= Y && A != MAX, but in this case we know that
10386	 A < X <= MAX.  */
10387
10388      if (!TREE_SIDE_EFFECTS (arg0)
10389	  && !TREE_SIDE_EFFECTS (arg1))
10390	{
10391	  tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10392	  if (tem && !operand_equal_p (tem, arg0, 0))
10393	    return fold_build2 (code, type, tem, arg1);
10394
10395	  tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10396	  if (tem && !operand_equal_p (tem, arg1, 0))
10397	    return fold_build2 (code, type, arg0, tem);
10398	}
10399
10400    truth_andor:
10401      /* We only do these simplifications if we are optimizing.  */
10402      if (!optimize)
10403	return NULL_TREE;
10404
10405      /* Check for things like (A || B) && (A || C).  We can convert this
10406	 to A || (B && C).  Note that either operator can be any of the four
10407	 truth and/or operations and the transformation will still be
10408	 valid.   Also note that we only care about order for the
10409	 ANDIF and ORIF operators.  If B contains side effects, this
10410	 might change the truth-value of A.  */
10411      if (TREE_CODE (arg0) == TREE_CODE (arg1)
10412	  && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10413	      || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10414	      || TREE_CODE (arg0) == TRUTH_AND_EXPR
10415	      || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10416	  && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10417	{
10418	  tree a00 = TREE_OPERAND (arg0, 0);
10419	  tree a01 = TREE_OPERAND (arg0, 1);
10420	  tree a10 = TREE_OPERAND (arg1, 0);
10421	  tree a11 = TREE_OPERAND (arg1, 1);
10422	  int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10423			      || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10424			     && (code == TRUTH_AND_EXPR
10425				 || code == TRUTH_OR_EXPR));
10426
10427	  if (operand_equal_p (a00, a10, 0))
10428	    return fold_build2 (TREE_CODE (arg0), type, a00,
10429				fold_build2 (code, type, a01, a11));
10430	  else if (commutative && operand_equal_p (a00, a11, 0))
10431	    return fold_build2 (TREE_CODE (arg0), type, a00,
10432				fold_build2 (code, type, a01, a10));
10433	  else if (commutative && operand_equal_p (a01, a10, 0))
10434	    return fold_build2 (TREE_CODE (arg0), type, a01,
10435				fold_build2 (code, type, a00, a11));
10436
10437	  /* This case if tricky because we must either have commutative
10438	     operators or else A10 must not have side-effects.  */
10439
10440	  else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10441		   && operand_equal_p (a01, a11, 0))
10442	    return fold_build2 (TREE_CODE (arg0), type,
10443				fold_build2 (code, type, a00, a10),
10444				a01);
10445	}
10446
10447      /* See if we can build a range comparison.  */
10448      if (0 != (tem = fold_range_test (code, type, op0, op1)))
10449	return tem;
10450
10451      /* Check for the possibility of merging component references.  If our
10452	 lhs is another similar operation, try to merge its rhs with our
10453	 rhs.  Then try to merge our lhs and rhs.  */
10454      if (TREE_CODE (arg0) == code
10455	  && 0 != (tem = fold_truthop (code, type,
10456				       TREE_OPERAND (arg0, 1), arg1)))
10457	return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10458
10459      if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10460	return tem;
10461
10462      return NULL_TREE;
10463
10464    case TRUTH_ORIF_EXPR:
10465      /* Note that the operands of this must be ints
10466	 and their values must be 0 or true.
10467	 ("true" is a fixed value perhaps depending on the language.)  */
10468      /* If first arg is constant true, return it.  */
10469      if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10470	return fold_convert (type, arg0);
10471    case TRUTH_OR_EXPR:
10472      /* If either arg is constant zero, drop it.  */
10473      if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10474	return non_lvalue (fold_convert (type, arg1));
10475      if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10476	  /* Preserve sequence points.  */
10477	  && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10478	return non_lvalue (fold_convert (type, arg0));
10479      /* If second arg is constant true, result is true, but we must
10480	 evaluate first arg.  */
10481      if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10482	return omit_one_operand (type, arg1, arg0);
10483      /* Likewise for first arg, but note this only occurs here for
10484	 TRUTH_OR_EXPR.  */
10485      if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10486	return omit_one_operand (type, arg0, arg1);
10487
10488      /* !X || X is always true.  */
10489      if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10490	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10491	return omit_one_operand (type, integer_one_node, arg1);
10492      /* X || !X is always true.  */
10493      if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10494	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10495	return omit_one_operand (type, integer_one_node, arg0);
10496
10497      goto truth_andor;
10498
10499    case TRUTH_XOR_EXPR:
10500      /* If the second arg is constant zero, drop it.  */
10501      if (integer_zerop (arg1))
10502	return non_lvalue (fold_convert (type, arg0));
10503      /* If the second arg is constant true, this is a logical inversion.  */
10504      if (integer_onep (arg1))
10505	{
10506	  /* Only call invert_truthvalue if operand is a truth value.  */
10507	  if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10508	    tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10509	  else
10510	    tem = invert_truthvalue (arg0);
10511	  return non_lvalue (fold_convert (type, tem));
10512	}
10513      /* Identical arguments cancel to zero.  */
10514      if (operand_equal_p (arg0, arg1, 0))
10515	return omit_one_operand (type, integer_zero_node, arg0);
10516
10517      /* !X ^ X is always true.  */
10518      if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10519	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10520	return omit_one_operand (type, integer_one_node, arg1);
10521
10522      /* X ^ !X is always true.  */
10523      if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10524	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10525	return omit_one_operand (type, integer_one_node, arg0);
10526
10527      return NULL_TREE;
10528
10529    case EQ_EXPR:
10530    case NE_EXPR:
10531      tem = fold_comparison (code, type, op0, op1);
10532      if (tem != NULL_TREE)
10533	return tem;
10534
10535      /* bool_var != 0 becomes bool_var. */
10536      if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10537          && code == NE_EXPR)
10538        return non_lvalue (fold_convert (type, arg0));
10539
10540      /* bool_var == 1 becomes bool_var. */
10541      if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10542          && code == EQ_EXPR)
10543        return non_lvalue (fold_convert (type, arg0));
10544
10545      /* bool_var != 1 becomes !bool_var. */
10546      if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10547          && code == NE_EXPR)
10548        return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10549
10550      /* bool_var == 0 becomes !bool_var. */
10551      if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10552          && code == EQ_EXPR)
10553        return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10554
10555      /*  ~a != C becomes a != ~C where C is a constant.  Likewise for ==.  */
10556      if (TREE_CODE (arg0) == BIT_NOT_EXPR
10557	  && TREE_CODE (arg1) == INTEGER_CST)
10558	{
10559	  tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
10560	  return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10561			      fold_build1 (BIT_NOT_EXPR, cmp_type,
10562					   fold_convert (cmp_type, arg1)));
10563	}
10564
10565      /* If this is an equality comparison of the address of a non-weak
10566	 object against zero, then we know the result.  */
10567      if (TREE_CODE (arg0) == ADDR_EXPR
10568	  && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10569	  && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10570	  && integer_zerop (arg1))
10571	return constant_boolean_node (code != EQ_EXPR, type);
10572
10573      /* If this is an equality comparison of the address of two non-weak,
10574	 unaliased symbols neither of which are extern (since we do not
10575	 have access to attributes for externs), then we know the result.  */
10576      if (TREE_CODE (arg0) == ADDR_EXPR
10577	  && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10578	  && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10579	  && ! lookup_attribute ("alias",
10580				 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10581	  && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10582	  && TREE_CODE (arg1) == ADDR_EXPR
10583	  && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10584	  && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10585	  && ! lookup_attribute ("alias",
10586				 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10587	  && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10588	{
10589	  /* We know that we're looking at the address of two
10590	     non-weak, unaliased, static _DECL nodes.
10591
10592	     It is both wasteful and incorrect to call operand_equal_p
10593	     to compare the two ADDR_EXPR nodes.  It is wasteful in that
10594	     all we need to do is test pointer equality for the arguments
10595	     to the two ADDR_EXPR nodes.  It is incorrect to use
10596	     operand_equal_p as that function is NOT equivalent to a
10597	     C equality test.  It can in fact return false for two
10598	     objects which would test as equal using the C equality
10599	     operator.  */
10600	  bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10601	  return constant_boolean_node (equal
10602				        ? code == EQ_EXPR : code != EQ_EXPR,
10603				        type);
10604	}
10605
10606      /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10607	 a MINUS_EXPR of a constant, we can convert it into a comparison with
10608	 a revised constant as long as no overflow occurs.  */
10609      if (TREE_CODE (arg1) == INTEGER_CST
10610	  && (TREE_CODE (arg0) == PLUS_EXPR
10611	      || TREE_CODE (arg0) == MINUS_EXPR)
10612	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10613	  && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10614				      ? MINUS_EXPR : PLUS_EXPR,
10615				      fold_convert (TREE_TYPE (arg0), arg1),
10616				      TREE_OPERAND (arg0, 1), 0))
10617	  && ! TREE_CONSTANT_OVERFLOW (tem))
10618	return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10619
10620      /* Similarly for a NEGATE_EXPR.  */
10621      if (TREE_CODE (arg0) == NEGATE_EXPR
10622	  && TREE_CODE (arg1) == INTEGER_CST
10623	  && 0 != (tem = negate_expr (arg1))
10624	  && TREE_CODE (tem) == INTEGER_CST
10625	  && ! TREE_CONSTANT_OVERFLOW (tem))
10626	return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10627
10628      /* If we have X - Y == 0, we can convert that to X == Y and similarly
10629	 for !=.  Don't do this for ordered comparisons due to overflow.  */
10630      if (TREE_CODE (arg0) == MINUS_EXPR
10631	  && integer_zerop (arg1))
10632	return fold_build2 (code, type,
10633			    TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10634
10635      /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0.  */
10636      if (TREE_CODE (arg0) == ABS_EXPR
10637	  && (integer_zerop (arg1) || real_zerop (arg1)))
10638	return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10639
10640      /* If this is an EQ or NE comparison with zero and ARG0 is
10641	 (1 << foo) & bar, convert it to (bar >> foo) & 1.  Both require
10642	 two operations, but the latter can be done in one less insn
10643	 on machines that have only two-operand insns or on which a
10644	 constant cannot be the first operand.  */
10645      if (TREE_CODE (arg0) == BIT_AND_EXPR
10646	  && integer_zerop (arg1))
10647	{
10648	  tree arg00 = TREE_OPERAND (arg0, 0);
10649	  tree arg01 = TREE_OPERAND (arg0, 1);
10650	  if (TREE_CODE (arg00) == LSHIFT_EXPR
10651	      && integer_onep (TREE_OPERAND (arg00, 0)))
10652	    {
10653	      tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10654				      arg01, TREE_OPERAND (arg00, 1));
10655	      tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10656				 build_int_cst (TREE_TYPE (arg0), 1));
10657	      return fold_build2 (code, type,
10658				  fold_convert (TREE_TYPE (arg1), tem), arg1);
10659	    }
10660	  else if (TREE_CODE (arg01) == LSHIFT_EXPR
10661		   && integer_onep (TREE_OPERAND (arg01, 0)))
10662	    {
10663	      tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10664				      arg00, TREE_OPERAND (arg01, 1));
10665	      tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10666				 build_int_cst (TREE_TYPE (arg0), 1));
10667	      return fold_build2 (code, type,
10668				  fold_convert (TREE_TYPE (arg1), tem), arg1);
10669	    }
10670	}
10671
10672      /* If this is an NE or EQ comparison of zero against the result of a
10673	 signed MOD operation whose second operand is a power of 2, make
10674	 the MOD operation unsigned since it is simpler and equivalent.  */
10675      if (integer_zerop (arg1)
10676	  && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10677	  && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10678	      || TREE_CODE (arg0) == CEIL_MOD_EXPR
10679	      || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10680	      || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10681	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
10682	{
10683	  tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10684	  tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10685				     fold_convert (newtype,
10686						   TREE_OPERAND (arg0, 0)),
10687				     fold_convert (newtype,
10688						   TREE_OPERAND (arg0, 1)));
10689
10690	  return fold_build2 (code, type, newmod,
10691			      fold_convert (newtype, arg1));
10692	}
10693
10694      /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10695	 C1 is a valid shift constant, and C2 is a power of two, i.e.
10696	 a single bit.  */
10697      if (TREE_CODE (arg0) == BIT_AND_EXPR
10698	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10699	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10700	     == INTEGER_CST
10701	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10702	  && integer_zerop (arg1))
10703	{
10704	  tree itype = TREE_TYPE (arg0);
10705	  unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10706	  tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10707
10708	  /* Check for a valid shift count.  */
10709	  if (TREE_INT_CST_HIGH (arg001) == 0
10710	      && TREE_INT_CST_LOW (arg001) < prec)
10711	    {
10712	      tree arg01 = TREE_OPERAND (arg0, 1);
10713	      tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10714	      unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10715	      /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10716		 can be rewritten as (X & (C2 << C1)) != 0.  */
10717	      if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10718		{
10719		  tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10720		  tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10721		  return fold_build2 (code, type, tem, arg1);
10722		}
10723	      /* Otherwise, for signed (arithmetic) shifts,
10724		 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10725		 ((X >> C1) & C2) == 0 is rewritten as X >= 0.  */
10726	      else if (!TYPE_UNSIGNED (itype))
10727		return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10728				    arg000, build_int_cst (itype, 0));
10729	      /* Otherwise, of unsigned (logical) shifts,
10730		 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10731		 ((X >> C1) & C2) == 0 is rewritten as (X,true).  */
10732	      else
10733		return omit_one_operand (type,
10734					 code == EQ_EXPR ? integer_one_node
10735							 : integer_zero_node,
10736					 arg000);
10737	    }
10738	}
10739
10740      /* If this is an NE comparison of zero with an AND of one, remove the
10741	 comparison since the AND will give the correct value.  */
10742      if (code == NE_EXPR
10743	  && integer_zerop (arg1)
10744	  && TREE_CODE (arg0) == BIT_AND_EXPR
10745	  && integer_onep (TREE_OPERAND (arg0, 1)))
10746	return fold_convert (type, arg0);
10747
10748      /* If we have (A & C) == C where C is a power of 2, convert this into
10749	 (A & C) != 0.  Similarly for NE_EXPR.  */
10750      if (TREE_CODE (arg0) == BIT_AND_EXPR
10751	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10752	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10753	return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10754			    arg0, fold_convert (TREE_TYPE (arg0),
10755						integer_zero_node));
10756
10757      /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10758	 bit, then fold the expression into A < 0 or A >= 0.  */
10759      tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10760      if (tem)
10761	return tem;
10762
10763      /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10764	 Similarly for NE_EXPR.  */
10765      if (TREE_CODE (arg0) == BIT_AND_EXPR
10766	  && TREE_CODE (arg1) == INTEGER_CST
10767	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10768	{
10769	  tree notc = fold_build1 (BIT_NOT_EXPR,
10770				   TREE_TYPE (TREE_OPERAND (arg0, 1)),
10771				   TREE_OPERAND (arg0, 1));
10772	  tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10773				       arg1, notc);
10774	  tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10775	  if (integer_nonzerop (dandnotc))
10776	    return omit_one_operand (type, rslt, arg0);
10777	}
10778
10779      /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10780	 Similarly for NE_EXPR.  */
10781      if (TREE_CODE (arg0) == BIT_IOR_EXPR
10782	  && TREE_CODE (arg1) == INTEGER_CST
10783	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10784	{
10785	  tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10786	  tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10787				       TREE_OPERAND (arg0, 1), notd);
10788	  tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10789	  if (integer_nonzerop (candnotd))
10790	    return omit_one_operand (type, rslt, arg0);
10791	}
10792
10793      /* If this is a comparison of a field, we may be able to simplify it.  */
10794      if (((TREE_CODE (arg0) == COMPONENT_REF
10795	    && lang_hooks.can_use_bit_fields_p ())
10796	   || TREE_CODE (arg0) == BIT_FIELD_REF)
10797	  /* Handle the constant case even without -O
10798	     to make sure the warnings are given.  */
10799	  && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10800	{
10801	  t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10802	  if (t1)
10803	    return t1;
10804	}
10805
10806      /* Optimize comparisons of strlen vs zero to a compare of the
10807	 first character of the string vs zero.  To wit,
10808		strlen(ptr) == 0   =>  *ptr == 0
10809		strlen(ptr) != 0   =>  *ptr != 0
10810	 Other cases should reduce to one of these two (or a constant)
10811	 due to the return value of strlen being unsigned.  */
10812      if (TREE_CODE (arg0) == CALL_EXPR
10813	  && integer_zerop (arg1))
10814	{
10815	  tree fndecl = get_callee_fndecl (arg0);
10816	  tree arglist;
10817
10818	  if (fndecl
10819	      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10820	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10821	      && (arglist = TREE_OPERAND (arg0, 1))
10822	      && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10823	      && ! TREE_CHAIN (arglist))
10824	    {
10825	      tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10826	      return fold_build2 (code, type, iref,
10827				  build_int_cst (TREE_TYPE (iref), 0));
10828	    }
10829	}
10830
10831      /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10832	 of X.  Similarly fold (X >> C) == 0 into X >= 0.  */
10833      if (TREE_CODE (arg0) == RSHIFT_EXPR
10834	  && integer_zerop (arg1)
10835	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10836	{
10837	  tree arg00 = TREE_OPERAND (arg0, 0);
10838	  tree arg01 = TREE_OPERAND (arg0, 1);
10839	  tree itype = TREE_TYPE (arg00);
10840	  if (TREE_INT_CST_HIGH (arg01) == 0
10841	      && TREE_INT_CST_LOW (arg01)
10842		 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10843	    {
10844	      if (TYPE_UNSIGNED (itype))
10845		{
10846		  itype = lang_hooks.types.signed_type (itype);
10847		  arg00 = fold_convert (itype, arg00);
10848		}
10849	      return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10850				  type, arg00, build_int_cst (itype, 0));
10851	    }
10852	}
10853
10854      /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y.  */
10855      if (integer_zerop (arg1)
10856	  && TREE_CODE (arg0) == BIT_XOR_EXPR)
10857	return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10858			    TREE_OPERAND (arg0, 1));
10859
10860      /* (X ^ Y) == Y becomes X == 0.  We know that Y has no side-effects.  */
10861      if (TREE_CODE (arg0) == BIT_XOR_EXPR
10862	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10863	return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10864			    build_int_cst (TREE_TYPE (arg1), 0));
10865      /* Likewise (X ^ Y) == X becomes Y == 0.  X has no side-effects.  */
10866      if (TREE_CODE (arg0) == BIT_XOR_EXPR
10867	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10868	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10869	return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
10870			    build_int_cst (TREE_TYPE (arg1), 0));
10871
10872      /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2).  */
10873      if (TREE_CODE (arg0) == BIT_XOR_EXPR
10874	  && TREE_CODE (arg1) == INTEGER_CST
10875	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10876	return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10877			    fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
10878					 TREE_OPERAND (arg0, 1), arg1));
10879
10880      /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10881	 (X & C) == 0 when C is a single bit.  */
10882      if (TREE_CODE (arg0) == BIT_AND_EXPR
10883	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10884	  && integer_zerop (arg1)
10885	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
10886	{
10887	  tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10888			     TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10889			     TREE_OPERAND (arg0, 1));
10890	  return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10891			      type, tem, arg1);
10892	}
10893
10894      /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10895	 constant C is a power of two, i.e. a single bit.  */
10896      if (TREE_CODE (arg0) == BIT_XOR_EXPR
10897	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10898	  && integer_zerop (arg1)
10899	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10900	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10901			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10902	{
10903	  tree arg00 = TREE_OPERAND (arg0, 0);
10904	  return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10905			      arg00, build_int_cst (TREE_TYPE (arg00), 0));
10906	}
10907
10908      /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10909	 when is C is a power of two, i.e. a single bit.  */
10910      if (TREE_CODE (arg0) == BIT_AND_EXPR
10911	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10912	  && integer_zerop (arg1)
10913	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10914	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10915			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10916	{
10917	  tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10918	  tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
10919			     arg000, TREE_OPERAND (arg0, 1));
10920	  return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10921			      tem, build_int_cst (TREE_TYPE (tem), 0));
10922	}
10923
10924      if (integer_zerop (arg1)
10925	  && tree_expr_nonzero_p (arg0))
10926        {
10927	  tree res = constant_boolean_node (code==NE_EXPR, type);
10928	  return omit_one_operand (type, res, arg0);
10929	}
10930      return NULL_TREE;
10931
10932    case LT_EXPR:
10933    case GT_EXPR:
10934    case LE_EXPR:
10935    case GE_EXPR:
10936      tem = fold_comparison (code, type, op0, op1);
10937      if (tem != NULL_TREE)
10938	return tem;
10939
10940      /* Transform comparisons of the form X +- C CMP X.  */
10941      if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10942	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10943	  && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10944	       && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
10945	      || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10946		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
10947	{
10948	  tree arg01 = TREE_OPERAND (arg0, 1);
10949	  enum tree_code code0 = TREE_CODE (arg0);
10950	  int is_positive;
10951
10952	  if (TREE_CODE (arg01) == REAL_CST)
10953	    is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10954	  else
10955	    is_positive = tree_int_cst_sgn (arg01);
10956
10957	  /* (X - c) > X becomes false.  */
10958	  if (code == GT_EXPR
10959	      && ((code0 == MINUS_EXPR && is_positive >= 0)
10960		  || (code0 == PLUS_EXPR && is_positive <= 0)))
10961	    {
10962	      if (TREE_CODE (arg01) == INTEGER_CST
10963		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10964		fold_overflow_warning (("assuming signed overflow does not "
10965					"occur when assuming that (X - c) > X "
10966					"is always false"),
10967				       WARN_STRICT_OVERFLOW_ALL);
10968	      return constant_boolean_node (0, type);
10969	    }
10970
10971	  /* Likewise (X + c) < X becomes false.  */
10972	  if (code == LT_EXPR
10973	      && ((code0 == PLUS_EXPR && is_positive >= 0)
10974		  || (code0 == MINUS_EXPR && is_positive <= 0)))
10975	    {
10976	      if (TREE_CODE (arg01) == INTEGER_CST
10977		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10978		fold_overflow_warning (("assuming signed overflow does not "
10979					"occur when assuming that "
10980					"(X + c) < X is always false"),
10981				       WARN_STRICT_OVERFLOW_ALL);
10982	      return constant_boolean_node (0, type);
10983	    }
10984
10985	  /* Convert (X - c) <= X to true.  */
10986	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10987	      && code == LE_EXPR
10988	      && ((code0 == MINUS_EXPR && is_positive >= 0)
10989		  || (code0 == PLUS_EXPR && is_positive <= 0)))
10990	    {
10991	      if (TREE_CODE (arg01) == INTEGER_CST
10992		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
10993		fold_overflow_warning (("assuming signed overflow does not "
10994					"occur when assuming that "
10995					"(X - c) <= X is always true"),
10996				       WARN_STRICT_OVERFLOW_ALL);
10997	      return constant_boolean_node (1, type);
10998	    }
10999
11000	  /* Convert (X + c) >= X to true.  */
11001	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11002	      && code == GE_EXPR
11003	      && ((code0 == PLUS_EXPR && is_positive >= 0)
11004		  || (code0 == MINUS_EXPR && is_positive <= 0)))
11005	    {
11006	      if (TREE_CODE (arg01) == INTEGER_CST
11007		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11008		fold_overflow_warning (("assuming signed overflow does not "
11009					"occur when assuming that "
11010					"(X + c) >= X is always true"),
11011				       WARN_STRICT_OVERFLOW_ALL);
11012	      return constant_boolean_node (1, type);
11013	    }
11014
11015	  if (TREE_CODE (arg01) == INTEGER_CST)
11016	    {
11017	      /* Convert X + c > X and X - c < X to true for integers.  */
11018	      if (code == GT_EXPR
11019	          && ((code0 == PLUS_EXPR && is_positive > 0)
11020		      || (code0 == MINUS_EXPR && is_positive < 0)))
11021		{
11022		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11023		    fold_overflow_warning (("assuming signed overflow does "
11024					    "not occur when assuming that "
11025					    "(X + c) > X is always true"),
11026					   WARN_STRICT_OVERFLOW_ALL);
11027		  return constant_boolean_node (1, type);
11028		}
11029
11030	      if (code == LT_EXPR
11031	          && ((code0 == MINUS_EXPR && is_positive > 0)
11032		      || (code0 == PLUS_EXPR && is_positive < 0)))
11033		{
11034		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11035		    fold_overflow_warning (("assuming signed overflow does "
11036					    "not occur when assuming that "
11037					    "(X - c) < X is always true"),
11038					   WARN_STRICT_OVERFLOW_ALL);
11039		  return constant_boolean_node (1, type);
11040		}
11041
11042	      /* Convert X + c <= X and X - c >= X to false for integers.  */
11043	      if (code == LE_EXPR
11044	          && ((code0 == PLUS_EXPR && is_positive > 0)
11045		      || (code0 == MINUS_EXPR && is_positive < 0)))
11046		{
11047		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11048		    fold_overflow_warning (("assuming signed overflow does "
11049					    "not occur when assuming that "
11050					    "(X + c) <= X is always false"),
11051					   WARN_STRICT_OVERFLOW_ALL);
11052		  return constant_boolean_node (0, type);
11053		}
11054
11055	      if (code == GE_EXPR
11056	          && ((code0 == MINUS_EXPR && is_positive > 0)
11057		      || (code0 == PLUS_EXPR && is_positive < 0)))
11058		{
11059		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11060		    fold_overflow_warning (("assuming signed overflow does "
11061					    "not occur when assuming that "
11062					    "(X - c) >= X is always true"),
11063					   WARN_STRICT_OVERFLOW_ALL);
11064		  return constant_boolean_node (0, type);
11065		}
11066	    }
11067	}
11068
11069      /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11070	 This transformation affects the cases which are handled in later
11071	 optimizations involving comparisons with non-negative constants.  */
11072      if (TREE_CODE (arg1) == INTEGER_CST
11073	  && TREE_CODE (arg0) != INTEGER_CST
11074	  && tree_int_cst_sgn (arg1) > 0)
11075	{
11076	  if (code == GE_EXPR)
11077	    {
11078	      arg1 = const_binop (MINUS_EXPR, arg1,
11079			          build_int_cst (TREE_TYPE (arg1), 1), 0);
11080	      return fold_build2 (GT_EXPR, type, arg0,
11081				  fold_convert (TREE_TYPE (arg0), arg1));
11082	    }
11083	  if (code == LT_EXPR)
11084	    {
11085	      arg1 = const_binop (MINUS_EXPR, arg1,
11086			          build_int_cst (TREE_TYPE (arg1), 1), 0);
11087	      return fold_build2 (LE_EXPR, type, arg0,
11088				  fold_convert (TREE_TYPE (arg0), arg1));
11089	    }
11090	}
11091
11092      /* Comparisons with the highest or lowest possible integer of
11093	 the specified size will have known values.  */
11094      {
11095	int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
11096
11097	if (TREE_CODE (arg1) == INTEGER_CST
11098	    && ! TREE_CONSTANT_OVERFLOW (arg1)
11099	    && width <= 2 * HOST_BITS_PER_WIDE_INT
11100	    && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11101		|| POINTER_TYPE_P (TREE_TYPE (arg1))))
11102	  {
11103	    HOST_WIDE_INT signed_max_hi;
11104	    unsigned HOST_WIDE_INT signed_max_lo;
11105	    unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11106
11107	    if (width <= HOST_BITS_PER_WIDE_INT)
11108	      {
11109		signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11110				- 1;
11111		signed_max_hi = 0;
11112		max_hi = 0;
11113
11114		if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
11115		  {
11116		    max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11117		    min_lo = 0;
11118		    min_hi = 0;
11119		  }
11120		else
11121		  {
11122		    max_lo = signed_max_lo;
11123		    min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11124		    min_hi = -1;
11125		  }
11126	      }
11127	    else
11128	      {
11129		width -= HOST_BITS_PER_WIDE_INT;
11130		signed_max_lo = -1;
11131		signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11132				- 1;
11133		max_lo = -1;
11134		min_lo = 0;
11135
11136		if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
11137		  {
11138		    max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11139		    min_hi = 0;
11140		  }
11141		else
11142		  {
11143		    max_hi = signed_max_hi;
11144		    min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11145		  }
11146	      }
11147
11148	    if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11149		&& TREE_INT_CST_LOW (arg1) == max_lo)
11150	      switch (code)
11151		{
11152		case GT_EXPR:
11153		  return omit_one_operand (type, integer_zero_node, arg0);
11154
11155		case GE_EXPR:
11156		  return fold_build2 (EQ_EXPR, type, op0, op1);
11157
11158		case LE_EXPR:
11159		  return omit_one_operand (type, integer_one_node, arg0);
11160
11161		case LT_EXPR:
11162		  return fold_build2 (NE_EXPR, type, op0, op1);
11163
11164		/* The GE_EXPR and LT_EXPR cases above are not normally
11165		   reached because of previous transformations.  */
11166
11167		default:
11168		  break;
11169		}
11170	    else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11171		     == max_hi
11172		     && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11173	      switch (code)
11174		{
11175		case GT_EXPR:
11176		  arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
11177		  return fold_build2 (EQ_EXPR, type,
11178				      fold_convert (TREE_TYPE (arg1), arg0),
11179				      arg1);
11180		case LE_EXPR:
11181		  arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
11182		  return fold_build2 (NE_EXPR, type,
11183				      fold_convert (TREE_TYPE (arg1), arg0),
11184				      arg1);
11185		default:
11186		  break;
11187		}
11188	    else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11189		     == min_hi
11190		     && TREE_INT_CST_LOW (arg1) == min_lo)
11191	      switch (code)
11192		{
11193		case LT_EXPR:
11194		  return omit_one_operand (type, integer_zero_node, arg0);
11195
11196		case LE_EXPR:
11197		  return fold_build2 (EQ_EXPR, type, op0, op1);
11198
11199		case GE_EXPR:
11200		  return omit_one_operand (type, integer_one_node, arg0);
11201
11202		case GT_EXPR:
11203		  return fold_build2 (NE_EXPR, type, op0, op1);
11204
11205		default:
11206		  break;
11207		}
11208	    else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11209		     == min_hi
11210		     && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11211	      switch (code)
11212		{
11213		case GE_EXPR:
11214		  arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11215		  return fold_build2 (NE_EXPR, type,
11216				      fold_convert (TREE_TYPE (arg1), arg0),
11217				      arg1);
11218		case LT_EXPR:
11219		  arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11220		  return fold_build2 (EQ_EXPR, type,
11221				      fold_convert (TREE_TYPE (arg1), arg0),
11222				      arg1);
11223		default:
11224		  break;
11225		}
11226
11227	    else if (!in_gimple_form
11228		     && TREE_INT_CST_HIGH (arg1) == signed_max_hi
11229		     && TREE_INT_CST_LOW (arg1) == signed_max_lo
11230		     && TYPE_UNSIGNED (TREE_TYPE (arg1))
11231		     /* signed_type does not work on pointer types.  */
11232		     && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
11233	      {
11234		/* The following case also applies to X < signed_max+1
11235		   and X >= signed_max+1 because previous transformations.  */
11236		if (code == LE_EXPR || code == GT_EXPR)
11237		  {
11238		    tree st;
11239		    st = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11240		    return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
11241					type, fold_convert (st, arg0),
11242					build_int_cst (st, 0));
11243		  }
11244	      }
11245	  }
11246      }
11247
11248      /* If we are comparing an ABS_EXPR with a constant, we can
11249	 convert all the cases into explicit comparisons, but they may
11250	 well not be faster than doing the ABS and one comparison.
11251	 But ABS (X) <= C is a range comparison, which becomes a subtraction
11252	 and a comparison, and is probably faster.  */
11253      if (code == LE_EXPR
11254	  && TREE_CODE (arg1) == INTEGER_CST
11255	  && TREE_CODE (arg0) == ABS_EXPR
11256	  && ! TREE_SIDE_EFFECTS (arg0)
11257	  && (0 != (tem = negate_expr (arg1)))
11258	  && TREE_CODE (tem) == INTEGER_CST
11259	  && ! TREE_CONSTANT_OVERFLOW (tem))
11260	return fold_build2 (TRUTH_ANDIF_EXPR, type,
11261			    build2 (GE_EXPR, type,
11262				    TREE_OPERAND (arg0, 0), tem),
11263			    build2 (LE_EXPR, type,
11264				    TREE_OPERAND (arg0, 0), arg1));
11265
11266      /* Convert ABS_EXPR<x> >= 0 to true.  */
11267      strict_overflow_p = false;
11268      if (code == GE_EXPR
11269	  && (integer_zerop (arg1)
11270	      || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11271		  && real_zerop (arg1)))
11272	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11273	{
11274	  if (strict_overflow_p)
11275	    fold_overflow_warning (("assuming signed overflow does not occur "
11276				    "when simplifying comparison of "
11277				    "absolute value and zero"),
11278				   WARN_STRICT_OVERFLOW_CONDITIONAL);
11279	  return omit_one_operand (type, integer_one_node, arg0);
11280	}
11281
11282      /* Convert ABS_EXPR<x> < 0 to false.  */
11283      strict_overflow_p = false;
11284      if (code == LT_EXPR
11285	  && (integer_zerop (arg1) || real_zerop (arg1))
11286	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11287	{
11288	  if (strict_overflow_p)
11289	    fold_overflow_warning (("assuming signed overflow does not occur "
11290				    "when simplifying comparison of "
11291				    "absolute value and zero"),
11292				   WARN_STRICT_OVERFLOW_CONDITIONAL);
11293	  return omit_one_operand (type, integer_zero_node, arg0);
11294	}
11295
11296      /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11297	 and similarly for >= into !=.  */
11298      if ((code == LT_EXPR || code == GE_EXPR)
11299	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
11300	  && TREE_CODE (arg1) == LSHIFT_EXPR
11301	  && integer_onep (TREE_OPERAND (arg1, 0)))
11302	return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11303		       build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11304			       TREE_OPERAND (arg1, 1)),
11305		       build_int_cst (TREE_TYPE (arg0), 0));
11306
11307      if ((code == LT_EXPR || code == GE_EXPR)
11308	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
11309	  && (TREE_CODE (arg1) == NOP_EXPR
11310	      || TREE_CODE (arg1) == CONVERT_EXPR)
11311	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11312	  && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11313	return
11314	  build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11315		  fold_convert (TREE_TYPE (arg0),
11316				build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11317					TREE_OPERAND (TREE_OPERAND (arg1, 0),
11318						      1))),
11319		  build_int_cst (TREE_TYPE (arg0), 0));
11320
11321      return NULL_TREE;
11322
11323    case UNORDERED_EXPR:
11324    case ORDERED_EXPR:
11325    case UNLT_EXPR:
11326    case UNLE_EXPR:
11327    case UNGT_EXPR:
11328    case UNGE_EXPR:
11329    case UNEQ_EXPR:
11330    case LTGT_EXPR:
11331      if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11332	{
11333	  t1 = fold_relational_const (code, type, arg0, arg1);
11334	  if (t1 != NULL_TREE)
11335	    return t1;
11336	}
11337
11338      /* If the first operand is NaN, the result is constant.  */
11339      if (TREE_CODE (arg0) == REAL_CST
11340	  && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11341	  && (code != LTGT_EXPR || ! flag_trapping_math))
11342	{
11343	  t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11344	       ? integer_zero_node
11345	       : integer_one_node;
11346	  return omit_one_operand (type, t1, arg1);
11347	}
11348
11349      /* If the second operand is NaN, the result is constant.  */
11350      if (TREE_CODE (arg1) == REAL_CST
11351	  && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
11352	  && (code != LTGT_EXPR || ! flag_trapping_math))
11353	{
11354	  t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11355	       ? integer_zero_node
11356	       : integer_one_node;
11357	  return omit_one_operand (type, t1, arg0);
11358	}
11359
11360      /* Simplify unordered comparison of something with itself.  */
11361      if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
11362	  && operand_equal_p (arg0, arg1, 0))
11363	return constant_boolean_node (1, type);
11364
11365      if (code == LTGT_EXPR
11366	  && !flag_trapping_math
11367	  && operand_equal_p (arg0, arg1, 0))
11368	return constant_boolean_node (0, type);
11369
11370      /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
11371      {
11372	tree targ0 = strip_float_extensions (arg0);
11373	tree targ1 = strip_float_extensions (arg1);
11374	tree newtype = TREE_TYPE (targ0);
11375
11376	if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11377	  newtype = TREE_TYPE (targ1);
11378
11379	if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11380	  return fold_build2 (code, type, fold_convert (newtype, targ0),
11381			      fold_convert (newtype, targ1));
11382      }
11383
11384      return NULL_TREE;
11385
11386    case COMPOUND_EXPR:
11387      /* When pedantic, a compound expression can be neither an lvalue
11388	 nor an integer constant expression.  */
11389      if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11390	return NULL_TREE;
11391      /* Don't let (0, 0) be null pointer constant.  */
11392      tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11393				 : fold_convert (type, arg1);
11394      return pedantic_non_lvalue (tem);
11395
11396    case COMPLEX_EXPR:
11397      if ((TREE_CODE (arg0) == REAL_CST
11398	   && TREE_CODE (arg1) == REAL_CST)
11399	  || (TREE_CODE (arg0) == INTEGER_CST
11400	      && TREE_CODE (arg1) == INTEGER_CST))
11401	return build_complex (type, arg0, arg1);
11402      return NULL_TREE;
11403
11404    case ASSERT_EXPR:
11405      /* An ASSERT_EXPR should never be passed to fold_binary.  */
11406      gcc_unreachable ();
11407
11408    default:
11409      return NULL_TREE;
11410    } /* switch (code) */
11411}
11412
11413/* Callback for walk_tree, looking for LABEL_EXPR.
11414   Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11415   Do not check the sub-tree of GOTO_EXPR.  */
11416
11417static tree
11418contains_label_1 (tree *tp,
11419                  int *walk_subtrees,
11420                  void *data ATTRIBUTE_UNUSED)
11421{
11422  switch (TREE_CODE (*tp))
11423    {
11424    case LABEL_EXPR:
11425      return *tp;
11426    case GOTO_EXPR:
11427      *walk_subtrees = 0;
11428    /* no break */
11429    default:
11430      return NULL_TREE;
11431    }
11432}
11433
11434/* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11435   accessible from outside the sub-tree. Returns NULL_TREE if no
11436   addressable label is found.  */
11437
11438static bool
11439contains_label_p (tree st)
11440{
11441  return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11442}
11443
11444/* Fold a ternary expression of code CODE and type TYPE with operands
11445   OP0, OP1, and OP2.  Return the folded expression if folding is
11446   successful.  Otherwise, return NULL_TREE.  */
11447
11448tree
11449fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11450{
11451  tree tem;
11452  tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11453  enum tree_code_class kind = TREE_CODE_CLASS (code);
11454
11455  gcc_assert (IS_EXPR_CODE_CLASS (kind)
11456	      && TREE_CODE_LENGTH (code) == 3);
11457
11458  /* Strip any conversions that don't change the mode.  This is safe
11459     for every expression, except for a comparison expression because
11460     its signedness is derived from its operands.  So, in the latter
11461     case, only strip conversions that don't change the signedness.
11462
11463     Note that this is done as an internal manipulation within the
11464     constant folder, in order to find the simplest representation of
11465     the arguments so that their form can be studied.  In any cases,
11466     the appropriate type conversions should be put back in the tree
11467     that will get out of the constant folder.  */
11468  if (op0)
11469    {
11470      arg0 = op0;
11471      STRIP_NOPS (arg0);
11472    }
11473
11474  if (op1)
11475    {
11476      arg1 = op1;
11477      STRIP_NOPS (arg1);
11478    }
11479
11480  switch (code)
11481    {
11482    case COMPONENT_REF:
11483      if (TREE_CODE (arg0) == CONSTRUCTOR
11484	  && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11485	{
11486	  unsigned HOST_WIDE_INT idx;
11487	  tree field, value;
11488	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11489	    if (field == arg1)
11490	      return value;
11491	}
11492      return NULL_TREE;
11493
11494    case COND_EXPR:
11495      /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11496	 so all simple results must be passed through pedantic_non_lvalue.  */
11497      if (TREE_CODE (arg0) == INTEGER_CST)
11498	{
11499	  tree unused_op = integer_zerop (arg0) ? op1 : op2;
11500	  tem = integer_zerop (arg0) ? op2 : op1;
11501	  /* Only optimize constant conditions when the selected branch
11502	     has the same type as the COND_EXPR.  This avoids optimizing
11503             away "c ? x : throw", where the throw has a void type.
11504             Avoid throwing away that operand which contains label.  */
11505          if ((!TREE_SIDE_EFFECTS (unused_op)
11506               || !contains_label_p (unused_op))
11507              && (! VOID_TYPE_P (TREE_TYPE (tem))
11508                  || VOID_TYPE_P (type)))
11509	    return pedantic_non_lvalue (tem);
11510	  return NULL_TREE;
11511	}
11512      if (operand_equal_p (arg1, op2, 0))
11513	return pedantic_omit_one_operand (type, arg1, arg0);
11514
11515      /* If we have A op B ? A : C, we may be able to convert this to a
11516	 simpler expression, depending on the operation and the values
11517	 of B and C.  Signed zeros prevent all of these transformations,
11518	 for reasons given above each one.
11519
11520         Also try swapping the arguments and inverting the conditional.  */
11521      if (COMPARISON_CLASS_P (arg0)
11522	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11523					     arg1, TREE_OPERAND (arg0, 1))
11524	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11525	{
11526	  tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11527	  if (tem)
11528	    return tem;
11529	}
11530
11531      if (COMPARISON_CLASS_P (arg0)
11532	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11533					     op2,
11534					     TREE_OPERAND (arg0, 1))
11535	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11536	{
11537	  tem = fold_truth_not_expr (arg0);
11538	  if (tem && COMPARISON_CLASS_P (tem))
11539	    {
11540	      tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11541	      if (tem)
11542		return tem;
11543	    }
11544	}
11545
11546      /* If the second operand is simpler than the third, swap them
11547	 since that produces better jump optimization results.  */
11548      if (truth_value_p (TREE_CODE (arg0))
11549	  && tree_swap_operands_p (op1, op2, false))
11550	{
11551	  /* See if this can be inverted.  If it can't, possibly because
11552	     it was a floating-point inequality comparison, don't do
11553	     anything.  */
11554	  tem = fold_truth_not_expr (arg0);
11555	  if (tem)
11556	    return fold_build3 (code, type, tem, op2, op1);
11557	}
11558
11559      /* Convert A ? 1 : 0 to simply A.  */
11560      if (integer_onep (op1)
11561	  && integer_zerop (op2)
11562	  /* If we try to convert OP0 to our type, the
11563	     call to fold will try to move the conversion inside
11564	     a COND, which will recurse.  In that case, the COND_EXPR
11565	     is probably the best choice, so leave it alone.  */
11566	  && type == TREE_TYPE (arg0))
11567	return pedantic_non_lvalue (arg0);
11568
11569      /* Convert A ? 0 : 1 to !A.  This prefers the use of NOT_EXPR
11570	 over COND_EXPR in cases such as floating point comparisons.  */
11571      if (integer_zerop (op1)
11572	  && integer_onep (op2)
11573	  && truth_value_p (TREE_CODE (arg0)))
11574	return pedantic_non_lvalue (fold_convert (type,
11575						  invert_truthvalue (arg0)));
11576
11577      /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>).  */
11578      if (TREE_CODE (arg0) == LT_EXPR
11579	  && integer_zerop (TREE_OPERAND (arg0, 1))
11580	  && integer_zerop (op2)
11581	  && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11582	{
11583	  /* sign_bit_p only checks ARG1 bits within A's precision.
11584	     If <sign bit of A> has wider type than A, bits outside
11585	     of A's precision in <sign bit of A> need to be checked.
11586	     If they are all 0, this optimization needs to be done
11587	     in unsigned A's type, if they are all 1 in signed A's type,
11588	     otherwise this can't be done.  */
11589	  if (TYPE_PRECISION (TREE_TYPE (tem))
11590	      < TYPE_PRECISION (TREE_TYPE (arg1))
11591	      && TYPE_PRECISION (TREE_TYPE (tem))
11592		 < TYPE_PRECISION (type))
11593	    {
11594	      unsigned HOST_WIDE_INT mask_lo;
11595	      HOST_WIDE_INT mask_hi;
11596	      int inner_width, outer_width;
11597	      tree tem_type;
11598
11599	      inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11600	      outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11601	      if (outer_width > TYPE_PRECISION (type))
11602		outer_width = TYPE_PRECISION (type);
11603
11604	      if (outer_width > HOST_BITS_PER_WIDE_INT)
11605		{
11606		  mask_hi = ((unsigned HOST_WIDE_INT) -1
11607			     >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
11608		  mask_lo = -1;
11609		}
11610	      else
11611		{
11612		  mask_hi = 0;
11613		  mask_lo = ((unsigned HOST_WIDE_INT) -1
11614			     >> (HOST_BITS_PER_WIDE_INT - outer_width));
11615		}
11616	      if (inner_width > HOST_BITS_PER_WIDE_INT)
11617		{
11618		  mask_hi &= ~((unsigned HOST_WIDE_INT) -1
11619			       >> (HOST_BITS_PER_WIDE_INT - inner_width));
11620		  mask_lo = 0;
11621		}
11622	      else
11623		mask_lo &= ~((unsigned HOST_WIDE_INT) -1
11624			     >> (HOST_BITS_PER_WIDE_INT - inner_width));
11625
11626	      if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
11627		  && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
11628		{
11629		  tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
11630		  tem = fold_convert (tem_type, tem);
11631		}
11632	      else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
11633		       && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
11634		{
11635		  tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
11636		  tem = fold_convert (tem_type, tem);
11637		}
11638	      else
11639		tem = NULL;
11640	    }
11641
11642	  if (tem)
11643	    return fold_convert (type,
11644				 fold_build2 (BIT_AND_EXPR,
11645					      TREE_TYPE (tem), tem,
11646					      fold_convert (TREE_TYPE (tem),
11647							    arg1)));
11648	}
11649
11650      /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N).  A & 1 was
11651	 already handled above.  */
11652      if (TREE_CODE (arg0) == BIT_AND_EXPR
11653	  && integer_onep (TREE_OPERAND (arg0, 1))
11654	  && integer_zerop (op2)
11655	  && integer_pow2p (arg1))
11656	{
11657	  tree tem = TREE_OPERAND (arg0, 0);
11658	  STRIP_NOPS (tem);
11659	  if (TREE_CODE (tem) == RSHIFT_EXPR
11660              && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11661              && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11662	         TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11663	    return fold_build2 (BIT_AND_EXPR, type,
11664				TREE_OPERAND (tem, 0), arg1);
11665	}
11666
11667      /* A & N ? N : 0 is simply A & N if N is a power of two.  This
11668	 is probably obsolete because the first operand should be a
11669	 truth value (that's why we have the two cases above), but let's
11670	 leave it in until we can confirm this for all front-ends.  */
11671      if (integer_zerop (op2)
11672	  && TREE_CODE (arg0) == NE_EXPR
11673	  && integer_zerop (TREE_OPERAND (arg0, 1))
11674	  && integer_pow2p (arg1)
11675	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11676	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11677			      arg1, OEP_ONLY_CONST))
11678	return pedantic_non_lvalue (fold_convert (type,
11679						  TREE_OPERAND (arg0, 0)));
11680
11681      /* Convert A ? B : 0 into A && B if A and B are truth values.  */
11682      if (integer_zerop (op2)
11683	  && truth_value_p (TREE_CODE (arg0))
11684	  && truth_value_p (TREE_CODE (arg1)))
11685	return fold_build2 (TRUTH_ANDIF_EXPR, type,
11686			    fold_convert (type, arg0),
11687			    arg1);
11688
11689      /* Convert A ? B : 1 into !A || B if A and B are truth values.  */
11690      if (integer_onep (op2)
11691	  && truth_value_p (TREE_CODE (arg0))
11692	  && truth_value_p (TREE_CODE (arg1)))
11693	{
11694	  /* Only perform transformation if ARG0 is easily inverted.  */
11695	  tem = fold_truth_not_expr (arg0);
11696	  if (tem)
11697	    return fold_build2 (TRUTH_ORIF_EXPR, type,
11698				fold_convert (type, tem),
11699				arg1);
11700	}
11701
11702      /* Convert A ? 0 : B into !A && B if A and B are truth values.  */
11703      if (integer_zerop (arg1)
11704	  && truth_value_p (TREE_CODE (arg0))
11705	  && truth_value_p (TREE_CODE (op2)))
11706	{
11707	  /* Only perform transformation if ARG0 is easily inverted.  */
11708	  tem = fold_truth_not_expr (arg0);
11709	  if (tem)
11710	    return fold_build2 (TRUTH_ANDIF_EXPR, type,
11711				fold_convert (type, tem),
11712				op2);
11713	}
11714
11715      /* Convert A ? 1 : B into A || B if A and B are truth values.  */
11716      if (integer_onep (arg1)
11717	  && truth_value_p (TREE_CODE (arg0))
11718	  && truth_value_p (TREE_CODE (op2)))
11719	return fold_build2 (TRUTH_ORIF_EXPR, type,
11720			    fold_convert (type, arg0),
11721			    op2);
11722
11723      return NULL_TREE;
11724
11725    case CALL_EXPR:
11726      /* Check for a built-in function.  */
11727      if (TREE_CODE (op0) == ADDR_EXPR
11728	  && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11729	  && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11730	return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11731      return NULL_TREE;
11732
11733    case BIT_FIELD_REF:
11734      if (TREE_CODE (arg0) == VECTOR_CST
11735	  && type == TREE_TYPE (TREE_TYPE (arg0))
11736	  && host_integerp (arg1, 1)
11737	  && host_integerp (op2, 1))
11738	{
11739	  unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11740	  unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11741
11742	  if (width != 0
11743	      && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11744	      && (idx % width) == 0
11745	      && (idx = idx / width)
11746		 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11747	    {
11748	      tree elements = TREE_VECTOR_CST_ELTS (arg0);
11749	      while (idx-- > 0 && elements)
11750		elements = TREE_CHAIN (elements);
11751	      if (elements)
11752		return TREE_VALUE (elements);
11753	      else
11754		return fold_convert (type, integer_zero_node);
11755	    }
11756	}
11757      return NULL_TREE;
11758
11759    default:
11760      return NULL_TREE;
11761    } /* switch (code) */
11762}
11763
11764/* Perform constant folding and related simplification of EXPR.
11765   The related simplifications include x*1 => x, x*0 => 0, etc.,
11766   and application of the associative law.
11767   NOP_EXPR conversions may be removed freely (as long as we
11768   are careful not to change the type of the overall expression).
11769   We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11770   but we can constant-fold them if they have constant operands.  */
11771
11772#ifdef ENABLE_FOLD_CHECKING
11773# define fold(x) fold_1 (x)
11774static tree fold_1 (tree);
11775static
11776#endif
11777tree
11778fold (tree expr)
11779{
11780  const tree t = expr;
11781  enum tree_code code = TREE_CODE (t);
11782  enum tree_code_class kind = TREE_CODE_CLASS (code);
11783  tree tem;
11784
11785  /* Return right away if a constant.  */
11786  if (kind == tcc_constant)
11787    return t;
11788
11789  if (IS_EXPR_CODE_CLASS (kind))
11790    {
11791      tree type = TREE_TYPE (t);
11792      tree op0, op1, op2;
11793
11794      switch (TREE_CODE_LENGTH (code))
11795	{
11796	case 1:
11797	  op0 = TREE_OPERAND (t, 0);
11798	  tem = fold_unary (code, type, op0);
11799	  return tem ? tem : expr;
11800	case 2:
11801	  op0 = TREE_OPERAND (t, 0);
11802	  op1 = TREE_OPERAND (t, 1);
11803	  tem = fold_binary (code, type, op0, op1);
11804	  return tem ? tem : expr;
11805	case 3:
11806	  op0 = TREE_OPERAND (t, 0);
11807	  op1 = TREE_OPERAND (t, 1);
11808	  op2 = TREE_OPERAND (t, 2);
11809	  tem = fold_ternary (code, type, op0, op1, op2);
11810	  return tem ? tem : expr;
11811	default:
11812	  break;
11813	}
11814    }
11815
11816  switch (code)
11817    {
11818    case CONST_DECL:
11819      return fold (DECL_INITIAL (t));
11820
11821    default:
11822      return t;
11823    } /* switch (code) */
11824}
11825
11826#ifdef ENABLE_FOLD_CHECKING
11827#undef fold
11828
11829static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11830static void fold_check_failed (tree, tree);
11831void print_fold_checksum (tree);
11832
11833/* When --enable-checking=fold, compute a digest of expr before
11834   and after actual fold call to see if fold did not accidentally
11835   change original expr.  */
11836
11837tree
11838fold (tree expr)
11839{
11840  tree ret;
11841  struct md5_ctx ctx;
11842  unsigned char checksum_before[16], checksum_after[16];
11843  htab_t ht;
11844
11845  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11846  md5_init_ctx (&ctx);
11847  fold_checksum_tree (expr, &ctx, ht);
11848  md5_finish_ctx (&ctx, checksum_before);
11849  htab_empty (ht);
11850
11851  ret = fold_1 (expr);
11852
11853  md5_init_ctx (&ctx);
11854  fold_checksum_tree (expr, &ctx, ht);
11855  md5_finish_ctx (&ctx, checksum_after);
11856  htab_delete (ht);
11857
11858  if (memcmp (checksum_before, checksum_after, 16))
11859    fold_check_failed (expr, ret);
11860
11861  return ret;
11862}
11863
11864void
11865print_fold_checksum (tree expr)
11866{
11867  struct md5_ctx ctx;
11868  unsigned char checksum[16], cnt;
11869  htab_t ht;
11870
11871  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11872  md5_init_ctx (&ctx);
11873  fold_checksum_tree (expr, &ctx, ht);
11874  md5_finish_ctx (&ctx, checksum);
11875  htab_delete (ht);
11876  for (cnt = 0; cnt < 16; ++cnt)
11877    fprintf (stderr, "%02x", checksum[cnt]);
11878  putc ('\n', stderr);
11879}
11880
11881static void
11882fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
11883{
11884  internal_error ("fold check: original tree changed by fold");
11885}
11886
11887static void
11888fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
11889{
11890  void **slot;
11891  enum tree_code code;
11892  struct tree_function_decl buf;
11893  int i, len;
11894
11895recursive_label:
11896
11897  gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
11898	       <= sizeof (struct tree_function_decl))
11899	      && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
11900  if (expr == NULL)
11901    return;
11902  slot = htab_find_slot (ht, expr, INSERT);
11903  if (*slot != NULL)
11904    return;
11905  *slot = expr;
11906  code = TREE_CODE (expr);
11907  if (TREE_CODE_CLASS (code) == tcc_declaration
11908      && DECL_ASSEMBLER_NAME_SET_P (expr))
11909    {
11910      /* Allow DECL_ASSEMBLER_NAME to be modified.  */
11911      memcpy ((char *) &buf, expr, tree_size (expr));
11912      expr = (tree) &buf;
11913      SET_DECL_ASSEMBLER_NAME (expr, NULL);
11914    }
11915  else if (TREE_CODE_CLASS (code) == tcc_type
11916	   && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
11917	       || TYPE_CACHED_VALUES_P (expr)
11918	       || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
11919    {
11920      /* Allow these fields to be modified.  */
11921      memcpy ((char *) &buf, expr, tree_size (expr));
11922      expr = (tree) &buf;
11923      TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
11924      TYPE_POINTER_TO (expr) = NULL;
11925      TYPE_REFERENCE_TO (expr) = NULL;
11926      if (TYPE_CACHED_VALUES_P (expr))
11927	{
11928	  TYPE_CACHED_VALUES_P (expr) = 0;
11929	  TYPE_CACHED_VALUES (expr) = NULL;
11930	}
11931    }
11932  md5_process_bytes (expr, tree_size (expr), ctx);
11933  fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11934  if (TREE_CODE_CLASS (code) != tcc_type
11935      && TREE_CODE_CLASS (code) != tcc_declaration
11936      && code != TREE_LIST)
11937    fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11938  switch (TREE_CODE_CLASS (code))
11939    {
11940    case tcc_constant:
11941      switch (code)
11942	{
11943	case STRING_CST:
11944	  md5_process_bytes (TREE_STRING_POINTER (expr),
11945			     TREE_STRING_LENGTH (expr), ctx);
11946	  break;
11947	case COMPLEX_CST:
11948	  fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11949	  fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11950	  break;
11951	case VECTOR_CST:
11952	  fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
11953	  break;
11954	default:
11955	  break;
11956	}
11957      break;
11958    case tcc_exceptional:
11959      switch (code)
11960	{
11961	case TREE_LIST:
11962	  fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11963	  fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11964	  expr = TREE_CHAIN (expr);
11965	  goto recursive_label;
11966	  break;
11967	case TREE_VEC:
11968	  for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11969	    fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11970	  break;
11971	default:
11972	  break;
11973	}
11974      break;
11975    case tcc_expression:
11976    case tcc_reference:
11977    case tcc_comparison:
11978    case tcc_unary:
11979    case tcc_binary:
11980    case tcc_statement:
11981      len = TREE_CODE_LENGTH (code);
11982      for (i = 0; i < len; ++i)
11983	fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11984      break;
11985    case tcc_declaration:
11986      fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11987      fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11988      if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11989	{
11990	  fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11991	  fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11992	  fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11993	  fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
11994	  fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
11995	}
11996      if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
11997	fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
11998
11999      if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12000	{
12001	  fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12002	  fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12003	  fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12004	}
12005      break;
12006    case tcc_type:
12007      if (TREE_CODE (expr) == ENUMERAL_TYPE)
12008        fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12009      fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12010      fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12011      fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12012      fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12013      if (INTEGRAL_TYPE_P (expr)
12014          || SCALAR_FLOAT_TYPE_P (expr))
12015	{
12016	  fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12017	  fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12018	}
12019      fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12020      if (TREE_CODE (expr) == RECORD_TYPE
12021	  || TREE_CODE (expr) == UNION_TYPE
12022	  || TREE_CODE (expr) == QUAL_UNION_TYPE)
12023	fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12024      fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12025      break;
12026    default:
12027      break;
12028    }
12029}
12030
12031#endif
12032
12033/* Fold a unary tree expression with code CODE of type TYPE with an
12034   operand OP0.  Return a folded expression if successful.  Otherwise,
12035   return a tree expression with code CODE of type TYPE with an
12036   operand OP0.  */
12037
12038tree
12039fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12040{
12041  tree tem;
12042#ifdef ENABLE_FOLD_CHECKING
12043  unsigned char checksum_before[16], checksum_after[16];
12044  struct md5_ctx ctx;
12045  htab_t ht;
12046
12047  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12048  md5_init_ctx (&ctx);
12049  fold_checksum_tree (op0, &ctx, ht);
12050  md5_finish_ctx (&ctx, checksum_before);
12051  htab_empty (ht);
12052#endif
12053
12054  tem = fold_unary (code, type, op0);
12055  if (!tem)
12056    tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12057
12058#ifdef ENABLE_FOLD_CHECKING
12059  md5_init_ctx (&ctx);
12060  fold_checksum_tree (op0, &ctx, ht);
12061  md5_finish_ctx (&ctx, checksum_after);
12062  htab_delete (ht);
12063
12064  if (memcmp (checksum_before, checksum_after, 16))
12065    fold_check_failed (op0, tem);
12066#endif
12067  return tem;
12068}
12069
12070/* Fold a binary tree expression with code CODE of type TYPE with
12071   operands OP0 and OP1.  Return a folded expression if successful.
12072   Otherwise, return a tree expression with code CODE of type TYPE
12073   with operands OP0 and OP1.  */
12074
12075tree
12076fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12077		  MEM_STAT_DECL)
12078{
12079  tree tem;
12080#ifdef ENABLE_FOLD_CHECKING
12081  unsigned char checksum_before_op0[16],
12082                checksum_before_op1[16],
12083		checksum_after_op0[16],
12084		checksum_after_op1[16];
12085  struct md5_ctx ctx;
12086  htab_t ht;
12087
12088  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12089  md5_init_ctx (&ctx);
12090  fold_checksum_tree (op0, &ctx, ht);
12091  md5_finish_ctx (&ctx, checksum_before_op0);
12092  htab_empty (ht);
12093
12094  md5_init_ctx (&ctx);
12095  fold_checksum_tree (op1, &ctx, ht);
12096  md5_finish_ctx (&ctx, checksum_before_op1);
12097  htab_empty (ht);
12098#endif
12099
12100  tem = fold_binary (code, type, op0, op1);
12101  if (!tem)
12102    tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12103
12104#ifdef ENABLE_FOLD_CHECKING
12105  md5_init_ctx (&ctx);
12106  fold_checksum_tree (op0, &ctx, ht);
12107  md5_finish_ctx (&ctx, checksum_after_op0);
12108  htab_empty (ht);
12109
12110  if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12111    fold_check_failed (op0, tem);
12112
12113  md5_init_ctx (&ctx);
12114  fold_checksum_tree (op1, &ctx, ht);
12115  md5_finish_ctx (&ctx, checksum_after_op1);
12116  htab_delete (ht);
12117
12118  if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12119    fold_check_failed (op1, tem);
12120#endif
12121  return tem;
12122}
12123
12124/* Fold a ternary tree expression with code CODE of type TYPE with
12125   operands OP0, OP1, and OP2.  Return a folded expression if
12126   successful.  Otherwise, return a tree expression with code CODE of
12127   type TYPE with operands OP0, OP1, and OP2.  */
12128
12129tree
12130fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12131	     MEM_STAT_DECL)
12132{
12133  tree tem;
12134#ifdef ENABLE_FOLD_CHECKING
12135  unsigned char checksum_before_op0[16],
12136                checksum_before_op1[16],
12137                checksum_before_op2[16],
12138		checksum_after_op0[16],
12139		checksum_after_op1[16],
12140		checksum_after_op2[16];
12141  struct md5_ctx ctx;
12142  htab_t ht;
12143
12144  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12145  md5_init_ctx (&ctx);
12146  fold_checksum_tree (op0, &ctx, ht);
12147  md5_finish_ctx (&ctx, checksum_before_op0);
12148  htab_empty (ht);
12149
12150  md5_init_ctx (&ctx);
12151  fold_checksum_tree (op1, &ctx, ht);
12152  md5_finish_ctx (&ctx, checksum_before_op1);
12153  htab_empty (ht);
12154
12155  md5_init_ctx (&ctx);
12156  fold_checksum_tree (op2, &ctx, ht);
12157  md5_finish_ctx (&ctx, checksum_before_op2);
12158  htab_empty (ht);
12159#endif
12160
12161  tem = fold_ternary (code, type, op0, op1, op2);
12162  if (!tem)
12163    tem =  build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12164
12165#ifdef ENABLE_FOLD_CHECKING
12166  md5_init_ctx (&ctx);
12167  fold_checksum_tree (op0, &ctx, ht);
12168  md5_finish_ctx (&ctx, checksum_after_op0);
12169  htab_empty (ht);
12170
12171  if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12172    fold_check_failed (op0, tem);
12173
12174  md5_init_ctx (&ctx);
12175  fold_checksum_tree (op1, &ctx, ht);
12176  md5_finish_ctx (&ctx, checksum_after_op1);
12177  htab_empty (ht);
12178
12179  if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12180    fold_check_failed (op1, tem);
12181
12182  md5_init_ctx (&ctx);
12183  fold_checksum_tree (op2, &ctx, ht);
12184  md5_finish_ctx (&ctx, checksum_after_op2);
12185  htab_delete (ht);
12186
12187  if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12188    fold_check_failed (op2, tem);
12189#endif
12190  return tem;
12191}
12192
12193/* Perform constant folding and related simplification of initializer
12194   expression EXPR.  These behave identically to "fold_buildN" but ignore
12195   potential run-time traps and exceptions that fold must preserve.  */
12196
12197#define START_FOLD_INIT \
12198  int saved_signaling_nans = flag_signaling_nans;\
12199  int saved_trapping_math = flag_trapping_math;\
12200  int saved_rounding_math = flag_rounding_math;\
12201  int saved_trapv = flag_trapv;\
12202  int saved_folding_initializer = folding_initializer;\
12203  flag_signaling_nans = 0;\
12204  flag_trapping_math = 0;\
12205  flag_rounding_math = 0;\
12206  flag_trapv = 0;\
12207  folding_initializer = 1;
12208
12209#define END_FOLD_INIT \
12210  flag_signaling_nans = saved_signaling_nans;\
12211  flag_trapping_math = saved_trapping_math;\
12212  flag_rounding_math = saved_rounding_math;\
12213  flag_trapv = saved_trapv;\
12214  folding_initializer = saved_folding_initializer;
12215
12216tree
12217fold_build1_initializer (enum tree_code code, tree type, tree op)
12218{
12219  tree result;
12220  START_FOLD_INIT;
12221
12222  result = fold_build1 (code, type, op);
12223
12224  END_FOLD_INIT;
12225  return result;
12226}
12227
12228tree
12229fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12230{
12231  tree result;
12232  START_FOLD_INIT;
12233
12234  result = fold_build2 (code, type, op0, op1);
12235
12236  END_FOLD_INIT;
12237  return result;
12238}
12239
12240tree
12241fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12242			 tree op2)
12243{
12244  tree result;
12245  START_FOLD_INIT;
12246
12247  result = fold_build3 (code, type, op0, op1, op2);
12248
12249  END_FOLD_INIT;
12250  return result;
12251}
12252
12253#undef START_FOLD_INIT
12254#undef END_FOLD_INIT
12255
12256/* Determine if first argument is a multiple of second argument.  Return 0 if
12257   it is not, or we cannot easily determined it to be.
12258
12259   An example of the sort of thing we care about (at this point; this routine
12260   could surely be made more general, and expanded to do what the *_DIV_EXPR's
12261   fold cases do now) is discovering that
12262
12263     SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12264
12265   is a multiple of
12266
12267     SAVE_EXPR (J * 8)
12268
12269   when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12270
12271   This code also handles discovering that
12272
12273     SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12274
12275   is a multiple of 8 so we don't have to worry about dealing with a
12276   possible remainder.
12277
12278   Note that we *look* inside a SAVE_EXPR only to determine how it was
12279   calculated; it is not safe for fold to do much of anything else with the
12280   internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12281   at run time.  For example, the latter example above *cannot* be implemented
12282   as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12283   evaluation time of the original SAVE_EXPR is not necessarily the same at
12284   the time the new expression is evaluated.  The only optimization of this
12285   sort that would be valid is changing
12286
12287     SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12288
12289   divided by 8 to
12290
12291     SAVE_EXPR (I) * SAVE_EXPR (J)
12292
12293   (where the same SAVE_EXPR (J) is used in the original and the
12294   transformed version).  */
12295
12296static int
12297multiple_of_p (tree type, tree top, tree bottom)
12298{
12299  if (operand_equal_p (top, bottom, 0))
12300    return 1;
12301
12302  if (TREE_CODE (type) != INTEGER_TYPE)
12303    return 0;
12304
12305  switch (TREE_CODE (top))
12306    {
12307    case BIT_AND_EXPR:
12308      /* Bitwise and provides a power of two multiple.  If the mask is
12309	 a multiple of BOTTOM then TOP is a multiple of BOTTOM.  */
12310      if (!integer_pow2p (bottom))
12311	return 0;
12312      /* FALLTHRU */
12313
12314    case MULT_EXPR:
12315      return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12316	      || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12317
12318    case PLUS_EXPR:
12319    case MINUS_EXPR:
12320      return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12321	      && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12322
12323    case LSHIFT_EXPR:
12324      if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12325	{
12326	  tree op1, t1;
12327
12328	  op1 = TREE_OPERAND (top, 1);
12329	  /* const_binop may not detect overflow correctly,
12330	     so check for it explicitly here.  */
12331	  if (TYPE_PRECISION (TREE_TYPE (size_one_node))
12332	      > TREE_INT_CST_LOW (op1)
12333	      && TREE_INT_CST_HIGH (op1) == 0
12334	      && 0 != (t1 = fold_convert (type,
12335					  const_binop (LSHIFT_EXPR,
12336						       size_one_node,
12337						       op1, 0)))
12338	      && ! TREE_OVERFLOW (t1))
12339	    return multiple_of_p (type, t1, bottom);
12340	}
12341      return 0;
12342
12343    case NOP_EXPR:
12344      /* Can't handle conversions from non-integral or wider integral type.  */
12345      if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12346	  || (TYPE_PRECISION (type)
12347	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12348	return 0;
12349
12350      /* .. fall through ...  */
12351
12352    case SAVE_EXPR:
12353      return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12354
12355    case INTEGER_CST:
12356      if (TREE_CODE (bottom) != INTEGER_CST
12357	  || (TYPE_UNSIGNED (type)
12358	      && (tree_int_cst_sgn (top) < 0
12359		  || tree_int_cst_sgn (bottom) < 0)))
12360	return 0;
12361      return integer_zerop (const_binop (TRUNC_MOD_EXPR,
12362					 top, bottom, 0));
12363
12364    default:
12365      return 0;
12366    }
12367}
12368
12369/* Return true if `t' is known to be non-negative.  If the return
12370   value is based on the assumption that signed overflow is undefined,
12371   set *STRICT_OVERFLOW_P to true; otherwise, don't change
12372   *STRICT_OVERFLOW_P.  */
12373
12374int
12375tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
12376{
12377  if (t == error_mark_node)
12378    return 0;
12379
12380  if (TYPE_UNSIGNED (TREE_TYPE (t)))
12381    return 1;
12382
12383  switch (TREE_CODE (t))
12384    {
12385    case SSA_NAME:
12386      /* Query VRP to see if it has recorded any information about
12387	 the range of this object.  */
12388      return ssa_name_nonnegative_p (t);
12389
12390    case ABS_EXPR:
12391      /* We can't return 1 if flag_wrapv is set because
12392	 ABS_EXPR<INT_MIN> = INT_MIN.  */
12393      if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
12394	return 1;
12395      if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
12396	{
12397	  *strict_overflow_p = true;
12398	  return 1;
12399	}
12400      break;
12401
12402    case INTEGER_CST:
12403      return tree_int_cst_sgn (t) >= 0;
12404
12405    case REAL_CST:
12406      return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12407
12408    case PLUS_EXPR:
12409      if (FLOAT_TYPE_P (TREE_TYPE (t)))
12410	return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12411					       strict_overflow_p)
12412		&& tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12413						  strict_overflow_p));
12414
12415      /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12416	 both unsigned and at least 2 bits shorter than the result.  */
12417      if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12418	  && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12419	  && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12420	{
12421	  tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12422	  tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12423	  if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12424	      && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12425	    {
12426	      unsigned int prec = MAX (TYPE_PRECISION (inner1),
12427				       TYPE_PRECISION (inner2)) + 1;
12428	      return prec < TYPE_PRECISION (TREE_TYPE (t));
12429	    }
12430	}
12431      break;
12432
12433    case MULT_EXPR:
12434      if (FLOAT_TYPE_P (TREE_TYPE (t)))
12435	{
12436	  /* x * x for floating point x is always non-negative.  */
12437	  if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
12438	    return 1;
12439	  return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12440						 strict_overflow_p)
12441		  && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12442						    strict_overflow_p));
12443	}
12444
12445      /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12446	 both unsigned and their total bits is shorter than the result.  */
12447      if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12448	  && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12449	  && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12450	{
12451	  tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12452	  tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12453	  if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12454	      && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12455	    return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
12456		   < TYPE_PRECISION (TREE_TYPE (t));
12457	}
12458      return 0;
12459
12460    case BIT_AND_EXPR:
12461    case MAX_EXPR:
12462      return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12463					     strict_overflow_p)
12464	      || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12465						strict_overflow_p));
12466
12467    case BIT_IOR_EXPR:
12468    case BIT_XOR_EXPR:
12469    case MIN_EXPR:
12470    case RDIV_EXPR:
12471    case TRUNC_DIV_EXPR:
12472    case CEIL_DIV_EXPR:
12473    case FLOOR_DIV_EXPR:
12474    case ROUND_DIV_EXPR:
12475      return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12476					     strict_overflow_p)
12477	      && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12478						strict_overflow_p));
12479
12480    case TRUNC_MOD_EXPR:
12481    case CEIL_MOD_EXPR:
12482    case FLOOR_MOD_EXPR:
12483    case ROUND_MOD_EXPR:
12484    case SAVE_EXPR:
12485    case NON_LVALUE_EXPR:
12486    case FLOAT_EXPR:
12487    case FIX_TRUNC_EXPR:
12488      return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12489					    strict_overflow_p);
12490
12491    case COMPOUND_EXPR:
12492    case MODIFY_EXPR:
12493      return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12494					    strict_overflow_p);
12495
12496    case BIND_EXPR:
12497      return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
12498					    strict_overflow_p);
12499
12500    case COND_EXPR:
12501      return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12502					     strict_overflow_p)
12503	      && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
12504						strict_overflow_p));
12505
12506    case NOP_EXPR:
12507      {
12508	tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12509	tree outer_type = TREE_TYPE (t);
12510
12511	if (TREE_CODE (outer_type) == REAL_TYPE)
12512	  {
12513	    if (TREE_CODE (inner_type) == REAL_TYPE)
12514	      return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12515						    strict_overflow_p);
12516	    if (TREE_CODE (inner_type) == INTEGER_TYPE)
12517	      {
12518		if (TYPE_UNSIGNED (inner_type))
12519		  return 1;
12520		return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12521						      strict_overflow_p);
12522	      }
12523	  }
12524	else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12525	  {
12526	    if (TREE_CODE (inner_type) == REAL_TYPE)
12527	      return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
12528						    strict_overflow_p);
12529	    if (TREE_CODE (inner_type) == INTEGER_TYPE)
12530	      return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12531		      && TYPE_UNSIGNED (inner_type);
12532	  }
12533      }
12534      break;
12535
12536    case TARGET_EXPR:
12537      {
12538	tree temp = TARGET_EXPR_SLOT (t);
12539	t = TARGET_EXPR_INITIAL (t);
12540
12541	/* If the initializer is non-void, then it's a normal expression
12542	   that will be assigned to the slot.  */
12543	if (!VOID_TYPE_P (t))
12544	  return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
12545
12546	/* Otherwise, the initializer sets the slot in some way.  One common
12547	   way is an assignment statement at the end of the initializer.  */
12548	while (1)
12549	  {
12550	    if (TREE_CODE (t) == BIND_EXPR)
12551	      t = expr_last (BIND_EXPR_BODY (t));
12552	    else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12553		     || TREE_CODE (t) == TRY_CATCH_EXPR)
12554	      t = expr_last (TREE_OPERAND (t, 0));
12555	    else if (TREE_CODE (t) == STATEMENT_LIST)
12556	      t = expr_last (t);
12557	    else
12558	      break;
12559	  }
12560	if (TREE_CODE (t) == MODIFY_EXPR
12561	    && TREE_OPERAND (t, 0) == temp)
12562	  return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12563						strict_overflow_p);
12564
12565	return 0;
12566      }
12567
12568    case CALL_EXPR:
12569      {
12570	tree fndecl = get_callee_fndecl (t);
12571	tree arglist = TREE_OPERAND (t, 1);
12572	if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12573	  switch (DECL_FUNCTION_CODE (fndecl))
12574	    {
12575	    CASE_FLT_FN (BUILT_IN_ACOS):
12576	    CASE_FLT_FN (BUILT_IN_ACOSH):
12577	    CASE_FLT_FN (BUILT_IN_CABS):
12578	    CASE_FLT_FN (BUILT_IN_COSH):
12579	    CASE_FLT_FN (BUILT_IN_ERFC):
12580	    CASE_FLT_FN (BUILT_IN_EXP):
12581	    CASE_FLT_FN (BUILT_IN_EXP10):
12582	    CASE_FLT_FN (BUILT_IN_EXP2):
12583	    CASE_FLT_FN (BUILT_IN_FABS):
12584	    CASE_FLT_FN (BUILT_IN_FDIM):
12585	    CASE_FLT_FN (BUILT_IN_HYPOT):
12586	    CASE_FLT_FN (BUILT_IN_POW10):
12587	    CASE_INT_FN (BUILT_IN_FFS):
12588	    CASE_INT_FN (BUILT_IN_PARITY):
12589	    CASE_INT_FN (BUILT_IN_POPCOUNT):
12590	      /* Always true.  */
12591	      return 1;
12592
12593	    CASE_FLT_FN (BUILT_IN_SQRT):
12594	      /* sqrt(-0.0) is -0.0.  */
12595	      if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12596		return 1;
12597	      return tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12598						    strict_overflow_p);
12599
12600	    CASE_FLT_FN (BUILT_IN_ASINH):
12601	    CASE_FLT_FN (BUILT_IN_ATAN):
12602	    CASE_FLT_FN (BUILT_IN_ATANH):
12603	    CASE_FLT_FN (BUILT_IN_CBRT):
12604	    CASE_FLT_FN (BUILT_IN_CEIL):
12605	    CASE_FLT_FN (BUILT_IN_ERF):
12606	    CASE_FLT_FN (BUILT_IN_EXPM1):
12607	    CASE_FLT_FN (BUILT_IN_FLOOR):
12608	    CASE_FLT_FN (BUILT_IN_FMOD):
12609	    CASE_FLT_FN (BUILT_IN_FREXP):
12610	    CASE_FLT_FN (BUILT_IN_LCEIL):
12611	    CASE_FLT_FN (BUILT_IN_LDEXP):
12612	    CASE_FLT_FN (BUILT_IN_LFLOOR):
12613	    CASE_FLT_FN (BUILT_IN_LLCEIL):
12614	    CASE_FLT_FN (BUILT_IN_LLFLOOR):
12615	    CASE_FLT_FN (BUILT_IN_LLRINT):
12616	    CASE_FLT_FN (BUILT_IN_LLROUND):
12617	    CASE_FLT_FN (BUILT_IN_LRINT):
12618	    CASE_FLT_FN (BUILT_IN_LROUND):
12619	    CASE_FLT_FN (BUILT_IN_MODF):
12620	    CASE_FLT_FN (BUILT_IN_NEARBYINT):
12621	    CASE_FLT_FN (BUILT_IN_POW):
12622	    CASE_FLT_FN (BUILT_IN_RINT):
12623	    CASE_FLT_FN (BUILT_IN_ROUND):
12624	    CASE_FLT_FN (BUILT_IN_SIGNBIT):
12625	    CASE_FLT_FN (BUILT_IN_SINH):
12626	    CASE_FLT_FN (BUILT_IN_TANH):
12627	    CASE_FLT_FN (BUILT_IN_TRUNC):
12628	      /* True if the 1st argument is nonnegative.  */
12629	      return tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12630						    strict_overflow_p);
12631
12632	    CASE_FLT_FN (BUILT_IN_FMAX):
12633	      /* True if the 1st OR 2nd arguments are nonnegative.  */
12634	      return (tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12635						     strict_overflow_p)
12636		      || (tree_expr_nonnegative_warnv_p
12637			  (TREE_VALUE (TREE_CHAIN (arglist)),
12638			   strict_overflow_p)));
12639
12640	    CASE_FLT_FN (BUILT_IN_FMIN):
12641	      /* True if the 1st AND 2nd arguments are nonnegative.  */
12642	      return (tree_expr_nonnegative_warnv_p (TREE_VALUE (arglist),
12643						     strict_overflow_p)
12644		      && (tree_expr_nonnegative_warnv_p
12645			  (TREE_VALUE (TREE_CHAIN (arglist)),
12646			   strict_overflow_p)));
12647
12648	    CASE_FLT_FN (BUILT_IN_COPYSIGN):
12649	      /* True if the 2nd argument is nonnegative.  */
12650	      return (tree_expr_nonnegative_warnv_p
12651		      (TREE_VALUE (TREE_CHAIN (arglist)),
12652		       strict_overflow_p));
12653
12654	    default:
12655	      break;
12656	    }
12657      }
12658
12659      /* ... fall through ...  */
12660
12661    default:
12662      {
12663	tree type = TREE_TYPE (t);
12664	if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12665	    && truth_value_p (TREE_CODE (t)))
12666	  /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12667             have a signed:1 type (where the value is -1 and 0).  */
12668	  return true;
12669      }
12670    }
12671
12672  /* We don't know sign of `t', so be conservative and return false.  */
12673  return 0;
12674}
12675
12676/* Return true if `t' is known to be non-negative.  Handle warnings
12677   about undefined signed overflow.  */
12678
12679int
12680tree_expr_nonnegative_p (tree t)
12681{
12682  int ret;
12683  bool strict_overflow_p;
12684
12685  strict_overflow_p = false;
12686  ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
12687  if (strict_overflow_p)
12688    fold_overflow_warning (("assuming signed overflow does not occur when "
12689			    "determining that expression is always "
12690			    "non-negative"),
12691			   WARN_STRICT_OVERFLOW_MISC);
12692  return ret;
12693}
12694
12695/* Return true when T is an address and is known to be nonzero.
12696   For floating point we further ensure that T is not denormal.
12697   Similar logic is present in nonzero_address in rtlanal.h.
12698
12699   If the return value is based on the assumption that signed overflow
12700   is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
12701   change *STRICT_OVERFLOW_P.  */
12702
12703bool
12704tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
12705{
12706  tree type = TREE_TYPE (t);
12707  bool sub_strict_overflow_p;
12708
12709  /* Doing something useful for floating point would need more work.  */
12710  if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12711    return false;
12712
12713  switch (TREE_CODE (t))
12714    {
12715    case SSA_NAME:
12716      /* Query VRP to see if it has recorded any information about
12717	 the range of this object.  */
12718      return ssa_name_nonzero_p (t);
12719
12720    case ABS_EXPR:
12721      return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12722					strict_overflow_p);
12723
12724    case INTEGER_CST:
12725      /* We used to test for !integer_zerop here.  This does not work correctly
12726	 if TREE_CONSTANT_OVERFLOW (t).  */
12727      return (TREE_INT_CST_LOW (t) != 0
12728	      || TREE_INT_CST_HIGH (t) != 0);
12729
12730    case PLUS_EXPR:
12731      if (TYPE_OVERFLOW_UNDEFINED (type))
12732	{
12733	  /* With the presence of negative values it is hard
12734	     to say something.  */
12735	  sub_strict_overflow_p = false;
12736	  if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12737					      &sub_strict_overflow_p)
12738	      || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12739						 &sub_strict_overflow_p))
12740	    return false;
12741	  /* One of operands must be positive and the other non-negative.  */
12742	  /* We don't set *STRICT_OVERFLOW_P here: even if this value
12743	     overflows, on a twos-complement machine the sum of two
12744	     nonnegative numbers can never be zero.  */
12745	  return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12746					     strict_overflow_p)
12747	          || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12748						strict_overflow_p));
12749	}
12750      break;
12751
12752    case MULT_EXPR:
12753      if (TYPE_OVERFLOW_UNDEFINED (type))
12754	{
12755	  if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12756					 strict_overflow_p)
12757	      && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12758					    strict_overflow_p))
12759	    {
12760	      *strict_overflow_p = true;
12761	      return true;
12762	    }
12763	}
12764      break;
12765
12766    case NOP_EXPR:
12767      {
12768	tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12769	tree outer_type = TREE_TYPE (t);
12770
12771	return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12772		&& tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12773					      strict_overflow_p));
12774      }
12775      break;
12776
12777   case ADDR_EXPR:
12778      {
12779	tree base = get_base_address (TREE_OPERAND (t, 0));
12780
12781	if (!base)
12782	  return false;
12783
12784	/* Weak declarations may link to NULL.  */
12785	if (VAR_OR_FUNCTION_DECL_P (base))
12786	  return !DECL_WEAK (base);
12787
12788	/* Constants are never weak.  */
12789	if (CONSTANT_CLASS_P (base))
12790	  return true;
12791
12792	return false;
12793      }
12794
12795    case COND_EXPR:
12796      sub_strict_overflow_p = false;
12797      if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12798				     &sub_strict_overflow_p)
12799	  && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
12800					&sub_strict_overflow_p))
12801	{
12802	  if (sub_strict_overflow_p)
12803	    *strict_overflow_p = true;
12804	  return true;
12805	}
12806      break;
12807
12808    case MIN_EXPR:
12809      sub_strict_overflow_p = false;
12810      if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12811				     &sub_strict_overflow_p)
12812	  && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12813					&sub_strict_overflow_p))
12814	{
12815	  if (sub_strict_overflow_p)
12816	    *strict_overflow_p = true;
12817	}
12818      break;
12819
12820    case MAX_EXPR:
12821      sub_strict_overflow_p = false;
12822      if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12823				     &sub_strict_overflow_p))
12824	{
12825	  if (sub_strict_overflow_p)
12826	    *strict_overflow_p = true;
12827
12828	  /* When both operands are nonzero, then MAX must be too.  */
12829	  if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12830					 strict_overflow_p))
12831	    return true;
12832
12833	  /* MAX where operand 0 is positive is positive.  */
12834	  return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
12835					       strict_overflow_p);
12836	}
12837      /* MAX where operand 1 is positive is positive.  */
12838      else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12839					  &sub_strict_overflow_p)
12840	       && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
12841						 &sub_strict_overflow_p))
12842	{
12843	  if (sub_strict_overflow_p)
12844	    *strict_overflow_p = true;
12845	  return true;
12846	}
12847      break;
12848
12849    case COMPOUND_EXPR:
12850    case MODIFY_EXPR:
12851    case BIND_EXPR:
12852      return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12853					strict_overflow_p);
12854
12855    case SAVE_EXPR:
12856    case NON_LVALUE_EXPR:
12857      return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12858					strict_overflow_p);
12859
12860    case BIT_IOR_EXPR:
12861      return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
12862					strict_overflow_p)
12863	      || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
12864					    strict_overflow_p));
12865
12866    case CALL_EXPR:
12867      return alloca_call_p (t);
12868
12869    default:
12870      break;
12871    }
12872  return false;
12873}
12874
12875/* Return true when T is an address and is known to be nonzero.
12876   Handle warnings about undefined signed overflow.  */
12877
12878bool
12879tree_expr_nonzero_p (tree t)
12880{
12881  bool ret, strict_overflow_p;
12882
12883  strict_overflow_p = false;
12884  ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
12885  if (strict_overflow_p)
12886    fold_overflow_warning (("assuming signed overflow does not occur when "
12887			    "determining that expression is always "
12888			    "non-zero"),
12889			   WARN_STRICT_OVERFLOW_MISC);
12890  return ret;
12891}
12892
12893/* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12894   attempt to fold the expression to a constant without modifying TYPE,
12895   OP0 or OP1.
12896
12897   If the expression could be simplified to a constant, then return
12898   the constant.  If the expression would not be simplified to a
12899   constant, then return NULL_TREE.  */
12900
12901tree
12902fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12903{
12904  tree tem = fold_binary (code, type, op0, op1);
12905  return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12906}
12907
12908/* Given the components of a unary expression CODE, TYPE and OP0,
12909   attempt to fold the expression to a constant without modifying
12910   TYPE or OP0.
12911
12912   If the expression could be simplified to a constant, then return
12913   the constant.  If the expression would not be simplified to a
12914   constant, then return NULL_TREE.  */
12915
12916tree
12917fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12918{
12919  tree tem = fold_unary (code, type, op0);
12920  return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12921}
12922
12923/* If EXP represents referencing an element in a constant string
12924   (either via pointer arithmetic or array indexing), return the
12925   tree representing the value accessed, otherwise return NULL.  */
12926
12927tree
12928fold_read_from_constant_string (tree exp)
12929{
12930  if ((TREE_CODE (exp) == INDIRECT_REF
12931       || TREE_CODE (exp) == ARRAY_REF)
12932      && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
12933    {
12934      tree exp1 = TREE_OPERAND (exp, 0);
12935      tree index;
12936      tree string;
12937
12938      if (TREE_CODE (exp) == INDIRECT_REF)
12939	string = string_constant (exp1, &index);
12940      else
12941	{
12942	  tree low_bound = array_ref_low_bound (exp);
12943	  index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12944
12945	  /* Optimize the special-case of a zero lower bound.
12946
12947	     We convert the low_bound to sizetype to avoid some problems
12948	     with constant folding.  (E.g. suppose the lower bound is 1,
12949	     and its mode is QI.  Without the conversion,l (ARRAY
12950	     +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
12951	     +INDEX), which becomes (ARRAY+255+INDEX).  Opps!)  */
12952	  if (! integer_zerop (low_bound))
12953	    index = size_diffop (index, fold_convert (sizetype, low_bound));
12954
12955	  string = exp1;
12956	}
12957
12958      if (string
12959	  && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
12960	  && TREE_CODE (string) == STRING_CST
12961	  && TREE_CODE (index) == INTEGER_CST
12962	  && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
12963	  && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
12964	      == MODE_INT)
12965	  && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
12966	return fold_convert (TREE_TYPE (exp),
12967			     build_int_cst (NULL_TREE,
12968					    (TREE_STRING_POINTER (string)
12969					     [TREE_INT_CST_LOW (index)])));
12970    }
12971  return NULL;
12972}
12973
12974/* Return the tree for neg (ARG0) when ARG0 is known to be either
12975   an integer constant or real constant.
12976
12977   TYPE is the type of the result.  */
12978
12979static tree
12980fold_negate_const (tree arg0, tree type)
12981{
12982  tree t = NULL_TREE;
12983
12984  switch (TREE_CODE (arg0))
12985    {
12986    case INTEGER_CST:
12987      {
12988	unsigned HOST_WIDE_INT low;
12989	HOST_WIDE_INT high;
12990	int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12991				   TREE_INT_CST_HIGH (arg0),
12992				   &low, &high);
12993	t = build_int_cst_wide (type, low, high);
12994	t = force_fit_type (t, 1,
12995			    (overflow | TREE_OVERFLOW (arg0))
12996			    && !TYPE_UNSIGNED (type),
12997			    TREE_CONSTANT_OVERFLOW (arg0));
12998	break;
12999      }
13000
13001    case REAL_CST:
13002      t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13003      break;
13004
13005    default:
13006      gcc_unreachable ();
13007    }
13008
13009  return t;
13010}
13011
13012/* Return the tree for abs (ARG0) when ARG0 is known to be either
13013   an integer constant or real constant.
13014
13015   TYPE is the type of the result.  */
13016
13017tree
13018fold_abs_const (tree arg0, tree type)
13019{
13020  tree t = NULL_TREE;
13021
13022  switch (TREE_CODE (arg0))
13023    {
13024    case INTEGER_CST:
13025      /* If the value is unsigned, then the absolute value is
13026	 the same as the ordinary value.  */
13027      if (TYPE_UNSIGNED (type))
13028	t = arg0;
13029      /* Similarly, if the value is non-negative.  */
13030      else if (INT_CST_LT (integer_minus_one_node, arg0))
13031	t = arg0;
13032      /* If the value is negative, then the absolute value is
13033	 its negation.  */
13034      else
13035	{
13036	  unsigned HOST_WIDE_INT low;
13037	  HOST_WIDE_INT high;
13038	  int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13039				     TREE_INT_CST_HIGH (arg0),
13040				     &low, &high);
13041	  t = build_int_cst_wide (type, low, high);
13042	  t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
13043			      TREE_CONSTANT_OVERFLOW (arg0));
13044	}
13045      break;
13046
13047    case REAL_CST:
13048      if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13049	t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13050      else
13051	t =  arg0;
13052      break;
13053
13054    default:
13055      gcc_unreachable ();
13056    }
13057
13058  return t;
13059}
13060
13061/* Return the tree for not (ARG0) when ARG0 is known to be an integer
13062   constant.  TYPE is the type of the result.  */
13063
13064static tree
13065fold_not_const (tree arg0, tree type)
13066{
13067  tree t = NULL_TREE;
13068
13069  gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13070
13071  t = build_int_cst_wide (type,
13072			  ~ TREE_INT_CST_LOW (arg0),
13073			  ~ TREE_INT_CST_HIGH (arg0));
13074  t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
13075		      TREE_CONSTANT_OVERFLOW (arg0));
13076
13077  return t;
13078}
13079
13080/* Given CODE, a relational operator, the target type, TYPE and two
13081   constant operands OP0 and OP1, return the result of the
13082   relational operation.  If the result is not a compile time
13083   constant, then return NULL_TREE.  */
13084
13085static tree
13086fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13087{
13088  int result, invert;
13089
13090  /* From here on, the only cases we handle are when the result is
13091     known to be a constant.  */
13092
13093  if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13094    {
13095      const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13096      const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13097
13098      /* Handle the cases where either operand is a NaN.  */
13099      if (real_isnan (c0) || real_isnan (c1))
13100	{
13101	  switch (code)
13102	    {
13103	    case EQ_EXPR:
13104	    case ORDERED_EXPR:
13105	      result = 0;
13106	      break;
13107
13108	    case NE_EXPR:
13109	    case UNORDERED_EXPR:
13110	    case UNLT_EXPR:
13111	    case UNLE_EXPR:
13112	    case UNGT_EXPR:
13113	    case UNGE_EXPR:
13114	    case UNEQ_EXPR:
13115              result = 1;
13116	      break;
13117
13118	    case LT_EXPR:
13119	    case LE_EXPR:
13120	    case GT_EXPR:
13121	    case GE_EXPR:
13122	    case LTGT_EXPR:
13123	      if (flag_trapping_math)
13124		return NULL_TREE;
13125	      result = 0;
13126	      break;
13127
13128	    default:
13129	      gcc_unreachable ();
13130	    }
13131
13132	  return constant_boolean_node (result, type);
13133	}
13134
13135      return constant_boolean_node (real_compare (code, c0, c1), type);
13136    }
13137
13138  /* Handle equality/inequality of complex constants.  */
13139  if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13140    {
13141      tree rcond = fold_relational_const (code, type,
13142					  TREE_REALPART (op0),
13143					  TREE_REALPART (op1));
13144      tree icond = fold_relational_const (code, type,
13145					  TREE_IMAGPART (op0),
13146					  TREE_IMAGPART (op1));
13147      if (code == EQ_EXPR)
13148	return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13149      else if (code == NE_EXPR)
13150	return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13151      else
13152	return NULL_TREE;
13153    }
13154
13155  /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13156
13157     To compute GT, swap the arguments and do LT.
13158     To compute GE, do LT and invert the result.
13159     To compute LE, swap the arguments, do LT and invert the result.
13160     To compute NE, do EQ and invert the result.
13161
13162     Therefore, the code below must handle only EQ and LT.  */
13163
13164  if (code == LE_EXPR || code == GT_EXPR)
13165    {
13166      tree tem = op0;
13167      op0 = op1;
13168      op1 = tem;
13169      code = swap_tree_comparison (code);
13170    }
13171
13172  /* Note that it is safe to invert for real values here because we
13173     have already handled the one case that it matters.  */
13174
13175  invert = 0;
13176  if (code == NE_EXPR || code == GE_EXPR)
13177    {
13178      invert = 1;
13179      code = invert_tree_comparison (code, false);
13180    }
13181
13182  /* Compute a result for LT or EQ if args permit;
13183     Otherwise return T.  */
13184  if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13185    {
13186      if (code == EQ_EXPR)
13187	result = tree_int_cst_equal (op0, op1);
13188      else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
13189	result = INT_CST_LT_UNSIGNED (op0, op1);
13190      else
13191	result = INT_CST_LT (op0, op1);
13192    }
13193  else
13194    return NULL_TREE;
13195
13196  if (invert)
13197    result ^= 1;
13198  return constant_boolean_node (result, type);
13199}
13200
13201/* Build an expression for the a clean point containing EXPR with type TYPE.
13202   Don't build a cleanup point expression for EXPR which don't have side
13203   effects.  */
13204
13205tree
13206fold_build_cleanup_point_expr (tree type, tree expr)
13207{
13208  /* If the expression does not have side effects then we don't have to wrap
13209     it with a cleanup point expression.  */
13210  if (!TREE_SIDE_EFFECTS (expr))
13211    return expr;
13212
13213  /* If the expression is a return, check to see if the expression inside the
13214     return has no side effects or the right hand side of the modify expression
13215     inside the return. If either don't have side effects set we don't need to
13216     wrap the expression in a cleanup point expression.  Note we don't check the
13217     left hand side of the modify because it should always be a return decl.  */
13218  if (TREE_CODE (expr) == RETURN_EXPR)
13219    {
13220      tree op = TREE_OPERAND (expr, 0);
13221      if (!op || !TREE_SIDE_EFFECTS (op))
13222        return expr;
13223      op = TREE_OPERAND (op, 1);
13224      if (!TREE_SIDE_EFFECTS (op))
13225        return expr;
13226    }
13227
13228  return build1 (CLEANUP_POINT_EXPR, type, expr);
13229}
13230
13231/* Build an expression for the address of T.  Folds away INDIRECT_REF to
13232   avoid confusing the gimplify process.  */
13233
13234tree
13235build_fold_addr_expr_with_type (tree t, tree ptrtype)
13236{
13237  /* The size of the object is not relevant when talking about its address.  */
13238  if (TREE_CODE (t) == WITH_SIZE_EXPR)
13239    t = TREE_OPERAND (t, 0);
13240
13241  /* Note: doesn't apply to ALIGN_INDIRECT_REF */
13242  if (TREE_CODE (t) == INDIRECT_REF
13243      || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
13244    {
13245      t = TREE_OPERAND (t, 0);
13246      if (TREE_TYPE (t) != ptrtype)
13247	t = build1 (NOP_EXPR, ptrtype, t);
13248    }
13249  else
13250    {
13251      tree base = t;
13252
13253      while (handled_component_p (base))
13254	base = TREE_OPERAND (base, 0);
13255      if (DECL_P (base))
13256	TREE_ADDRESSABLE (base) = 1;
13257
13258      t = build1 (ADDR_EXPR, ptrtype, t);
13259    }
13260
13261  return t;
13262}
13263
13264tree
13265build_fold_addr_expr (tree t)
13266{
13267  return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
13268}
13269
13270/* Given a pointer value OP0 and a type TYPE, return a simplified version
13271   of an indirection through OP0, or NULL_TREE if no simplification is
13272   possible.  */
13273
13274tree
13275fold_indirect_ref_1 (tree type, tree op0)
13276{
13277  tree sub = op0;
13278  tree subtype;
13279
13280  STRIP_NOPS (sub);
13281  subtype = TREE_TYPE (sub);
13282  if (!POINTER_TYPE_P (subtype))
13283    return NULL_TREE;
13284
13285  if (TREE_CODE (sub) == ADDR_EXPR)
13286    {
13287      tree op = TREE_OPERAND (sub, 0);
13288      tree optype = TREE_TYPE (op);
13289      /* *&CONST_DECL -> to the value of the const decl.  */
13290      if (TREE_CODE (op) == CONST_DECL)
13291	return DECL_INITIAL (op);
13292      /* *&p => p;  make sure to handle *&"str"[cst] here.  */
13293      if (type == optype)
13294	{
13295	  tree fop = fold_read_from_constant_string (op);
13296	  if (fop)
13297	    return fop;
13298	  else
13299	    return op;
13300	}
13301      /* *(foo *)&fooarray => fooarray[0] */
13302      else if (TREE_CODE (optype) == ARRAY_TYPE
13303	       && type == TREE_TYPE (optype))
13304	{
13305	  tree type_domain = TYPE_DOMAIN (optype);
13306	  tree min_val = size_zero_node;
13307	  if (type_domain && TYPE_MIN_VALUE (type_domain))
13308	    min_val = TYPE_MIN_VALUE (type_domain);
13309	  return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
13310	}
13311      /* *(foo *)&complexfoo => __real__ complexfoo */
13312      else if (TREE_CODE (optype) == COMPLEX_TYPE
13313	       && type == TREE_TYPE (optype))
13314	return fold_build1 (REALPART_EXPR, type, op);
13315    }
13316
13317  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13318  if (TREE_CODE (sub) == PLUS_EXPR
13319      && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13320    {
13321      tree op00 = TREE_OPERAND (sub, 0);
13322      tree op01 = TREE_OPERAND (sub, 1);
13323      tree op00type;
13324
13325      STRIP_NOPS (op00);
13326      op00type = TREE_TYPE (op00);
13327      if (TREE_CODE (op00) == ADDR_EXPR
13328 	  && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
13329	  && type == TREE_TYPE (TREE_TYPE (op00type)))
13330	{
13331	  tree size = TYPE_SIZE_UNIT (type);
13332	  if (tree_int_cst_equal (size, op01))
13333	    return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
13334	}
13335    }
13336
13337  /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13338  if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13339      && type == TREE_TYPE (TREE_TYPE (subtype)))
13340    {
13341      tree type_domain;
13342      tree min_val = size_zero_node;
13343      sub = build_fold_indirect_ref (sub);
13344      type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13345      if (type_domain && TYPE_MIN_VALUE (type_domain))
13346	min_val = TYPE_MIN_VALUE (type_domain);
13347      return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
13348    }
13349
13350  return NULL_TREE;
13351}
13352
13353/* Builds an expression for an indirection through T, simplifying some
13354   cases.  */
13355
13356tree
13357build_fold_indirect_ref (tree t)
13358{
13359  tree type = TREE_TYPE (TREE_TYPE (t));
13360  tree sub = fold_indirect_ref_1 (type, t);
13361
13362  if (sub)
13363    return sub;
13364  else
13365    return build1 (INDIRECT_REF, type, t);
13366}
13367
13368/* Given an INDIRECT_REF T, return either T or a simplified version.  */
13369
13370tree
13371fold_indirect_ref (tree t)
13372{
13373  tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
13374
13375  if (sub)
13376    return sub;
13377  else
13378    return t;
13379}
13380
13381/* Strip non-trapping, non-side-effecting tree nodes from an expression
13382   whose result is ignored.  The type of the returned tree need not be
13383   the same as the original expression.  */
13384
13385tree
13386fold_ignored_result (tree t)
13387{
13388  if (!TREE_SIDE_EFFECTS (t))
13389    return integer_zero_node;
13390
13391  for (;;)
13392    switch (TREE_CODE_CLASS (TREE_CODE (t)))
13393      {
13394      case tcc_unary:
13395	t = TREE_OPERAND (t, 0);
13396	break;
13397
13398      case tcc_binary:
13399      case tcc_comparison:
13400	if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13401	  t = TREE_OPERAND (t, 0);
13402	else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
13403	  t = TREE_OPERAND (t, 1);
13404	else
13405	  return t;
13406	break;
13407
13408      case tcc_expression:
13409	switch (TREE_CODE (t))
13410	  {
13411	  case COMPOUND_EXPR:
13412	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13413	      return t;
13414	    t = TREE_OPERAND (t, 0);
13415	    break;
13416
13417	  case COND_EXPR:
13418	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
13419		|| TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
13420	      return t;
13421	    t = TREE_OPERAND (t, 0);
13422	    break;
13423
13424	  default:
13425	    return t;
13426	  }
13427	break;
13428
13429      default:
13430	return t;
13431      }
13432}
13433
13434/* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13435   This can only be applied to objects of a sizetype.  */
13436
13437tree
13438round_up (tree value, int divisor)
13439{
13440  tree div = NULL_TREE;
13441
13442  gcc_assert (divisor > 0);
13443  if (divisor == 1)
13444    return value;
13445
13446  /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
13447     have to do anything.  Only do this when we are not given a const,
13448     because in that case, this check is more expensive than just
13449     doing it.  */
13450  if (TREE_CODE (value) != INTEGER_CST)
13451    {
13452      div = build_int_cst (TREE_TYPE (value), divisor);
13453
13454      if (multiple_of_p (TREE_TYPE (value), value, div))
13455	return value;
13456    }
13457
13458  /* If divisor is a power of two, simplify this to bit manipulation.  */
13459  if (divisor == (divisor & -divisor))
13460    {
13461      tree t;
13462
13463      t = build_int_cst (TREE_TYPE (value), divisor - 1);
13464      value = size_binop (PLUS_EXPR, value, t);
13465      t = build_int_cst (TREE_TYPE (value), -divisor);
13466      value = size_binop (BIT_AND_EXPR, value, t);
13467    }
13468  else
13469    {
13470      if (!div)
13471	div = build_int_cst (TREE_TYPE (value), divisor);
13472      value = size_binop (CEIL_DIV_EXPR, value, div);
13473      value = size_binop (MULT_EXPR, value, div);
13474    }
13475
13476  return value;
13477}
13478
13479/* Likewise, but round down.  */
13480
13481tree
13482round_down (tree value, int divisor)
13483{
13484  tree div = NULL_TREE;
13485
13486  gcc_assert (divisor > 0);
13487  if (divisor == 1)
13488    return value;
13489
13490  /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
13491     have to do anything.  Only do this when we are not given a const,
13492     because in that case, this check is more expensive than just
13493     doing it.  */
13494  if (TREE_CODE (value) != INTEGER_CST)
13495    {
13496      div = build_int_cst (TREE_TYPE (value), divisor);
13497
13498      if (multiple_of_p (TREE_TYPE (value), value, div))
13499	return value;
13500    }
13501
13502  /* If divisor is a power of two, simplify this to bit manipulation.  */
13503  if (divisor == (divisor & -divisor))
13504    {
13505      tree t;
13506
13507      t = build_int_cst (TREE_TYPE (value), -divisor);
13508      value = size_binop (BIT_AND_EXPR, value, t);
13509    }
13510  else
13511    {
13512      if (!div)
13513	div = build_int_cst (TREE_TYPE (value), divisor);
13514      value = size_binop (FLOOR_DIV_EXPR, value, div);
13515      value = size_binop (MULT_EXPR, value, div);
13516    }
13517
13518  return value;
13519}
13520
13521/* Returns the pointer to the base of the object addressed by EXP and
13522   extracts the information about the offset of the access, storing it
13523   to PBITPOS and POFFSET.  */
13524
13525static tree
13526split_address_to_core_and_offset (tree exp,
13527				  HOST_WIDE_INT *pbitpos, tree *poffset)
13528{
13529  tree core;
13530  enum machine_mode mode;
13531  int unsignedp, volatilep;
13532  HOST_WIDE_INT bitsize;
13533
13534  if (TREE_CODE (exp) == ADDR_EXPR)
13535    {
13536      core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
13537				  poffset, &mode, &unsignedp, &volatilep,
13538				  false);
13539      core = build_fold_addr_expr (core);
13540    }
13541  else
13542    {
13543      core = exp;
13544      *pbitpos = 0;
13545      *poffset = NULL_TREE;
13546    }
13547
13548  return core;
13549}
13550
13551/* Returns true if addresses of E1 and E2 differ by a constant, false
13552   otherwise.  If they do, E1 - E2 is stored in *DIFF.  */
13553
13554bool
13555ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
13556{
13557  tree core1, core2;
13558  HOST_WIDE_INT bitpos1, bitpos2;
13559  tree toffset1, toffset2, tdiff, type;
13560
13561  core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
13562  core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
13563
13564  if (bitpos1 % BITS_PER_UNIT != 0
13565      || bitpos2 % BITS_PER_UNIT != 0
13566      || !operand_equal_p (core1, core2, 0))
13567    return false;
13568
13569  if (toffset1 && toffset2)
13570    {
13571      type = TREE_TYPE (toffset1);
13572      if (type != TREE_TYPE (toffset2))
13573	toffset2 = fold_convert (type, toffset2);
13574
13575      tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
13576      if (!cst_and_fits_in_hwi (tdiff))
13577	return false;
13578
13579      *diff = int_cst_value (tdiff);
13580    }
13581  else if (toffset1 || toffset2)
13582    {
13583      /* If only one of the offsets is non-constant, the difference cannot
13584	 be a constant.  */
13585      return false;
13586    }
13587  else
13588    *diff = 0;
13589
13590  *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
13591  return true;
13592}
13593
13594/* Simplify the floating point expression EXP when the sign of the
13595   result is not significant.  Return NULL_TREE if no simplification
13596   is possible.  */
13597
13598tree
13599fold_strip_sign_ops (tree exp)
13600{
13601  tree arg0, arg1;
13602
13603  switch (TREE_CODE (exp))
13604    {
13605    case ABS_EXPR:
13606    case NEGATE_EXPR:
13607      arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13608      return arg0 ? arg0 : TREE_OPERAND (exp, 0);
13609
13610    case MULT_EXPR:
13611    case RDIV_EXPR:
13612      if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
13613	return NULL_TREE;
13614      arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13615      arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13616      if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13617	return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13618			    arg0 ? arg0 : TREE_OPERAND (exp, 0),
13619			    arg1 ? arg1 : TREE_OPERAND (exp, 1));
13620      break;
13621
13622    default:
13623      break;
13624    }
13625  return NULL_TREE;
13626}
13627
13628