1/* If-conversion support.
2   Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006
3   Free Software Foundation, Inc.
4
5   This file is part of GCC.
6
7   GCC is free software; you can redistribute it and/or modify it
8   under the terms of the GNU General Public License as published by
9   the Free Software Foundation; either version 2, or (at your option)
10   any later version.
11
12   GCC is distributed in the hope that it will be useful, but WITHOUT
13   ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
14   or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public
15   License for more details.
16
17   You should have received a copy of the GNU General Public License
18   along with GCC; see the file COPYING.  If not, write to the Free
19   Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20   02110-1301, USA.  */
21
22#include "config.h"
23#include "system.h"
24#include "coretypes.h"
25#include "tm.h"
26
27#include "rtl.h"
28#include "regs.h"
29#include "function.h"
30#include "flags.h"
31#include "insn-config.h"
32#include "recog.h"
33#include "except.h"
34#include "hard-reg-set.h"
35#include "basic-block.h"
36#include "expr.h"
37#include "real.h"
38#include "output.h"
39#include "optabs.h"
40#include "toplev.h"
41#include "tm_p.h"
42#include "cfgloop.h"
43#include "target.h"
44#include "timevar.h"
45#include "tree-pass.h"
46
47
48#ifndef HAVE_conditional_execution
49#define HAVE_conditional_execution 0
50#endif
51#ifndef HAVE_conditional_move
52#define HAVE_conditional_move 0
53#endif
54#ifndef HAVE_incscc
55#define HAVE_incscc 0
56#endif
57#ifndef HAVE_decscc
58#define HAVE_decscc 0
59#endif
60#ifndef HAVE_trap
61#define HAVE_trap 0
62#endif
63#ifndef HAVE_conditional_trap
64#define HAVE_conditional_trap 0
65#endif
66
67#ifndef MAX_CONDITIONAL_EXECUTE
68#define MAX_CONDITIONAL_EXECUTE   (BRANCH_COST + 1)
69#endif
70
71#define NULL_BLOCK	((basic_block) NULL)
72
73/* # of IF-THEN or IF-THEN-ELSE blocks we looked at  */
74static int num_possible_if_blocks;
75
76/* # of IF-THEN or IF-THEN-ELSE blocks were converted to conditional
77   execution.  */
78static int num_updated_if_blocks;
79
80/* # of changes made which require life information to be updated.  */
81static int num_true_changes;
82
83/* Whether conditional execution changes were made.  */
84static int cond_exec_changed_p;
85
86/* True if life data ok at present.  */
87static bool life_data_ok;
88
89/* Forward references.  */
90static int count_bb_insns (basic_block);
91static bool cheap_bb_rtx_cost_p (basic_block, int);
92static rtx first_active_insn (basic_block);
93static rtx last_active_insn (basic_block, int);
94static basic_block block_fallthru (basic_block);
95static int cond_exec_process_insns (ce_if_block_t *, rtx, rtx, rtx, rtx, int);
96static rtx cond_exec_get_condition (rtx);
97static int cond_exec_process_if_block (ce_if_block_t *, int);
98static rtx noce_get_condition (rtx, rtx *);
99static int noce_operand_ok (rtx);
100static int noce_process_if_block (ce_if_block_t *);
101static int process_if_block (ce_if_block_t *);
102static void merge_if_block (ce_if_block_t *);
103static int find_cond_trap (basic_block, edge, edge);
104static basic_block find_if_header (basic_block, int);
105static int block_jumps_and_fallthru_p (basic_block, basic_block);
106static int find_if_block (ce_if_block_t *);
107static int find_if_case_1 (basic_block, edge, edge);
108static int find_if_case_2 (basic_block, edge, edge);
109static int find_memory (rtx *, void *);
110static int dead_or_predicable (basic_block, basic_block, basic_block,
111			       basic_block, int);
112static void noce_emit_move_insn (rtx, rtx);
113static rtx block_has_only_trap (basic_block);
114
115/* Count the number of non-jump active insns in BB.  */
116
117static int
118count_bb_insns (basic_block bb)
119{
120  int count = 0;
121  rtx insn = BB_HEAD (bb);
122
123  while (1)
124    {
125      if (CALL_P (insn) || NONJUMP_INSN_P (insn))
126	count++;
127
128      if (insn == BB_END (bb))
129	break;
130      insn = NEXT_INSN (insn);
131    }
132
133  return count;
134}
135
136/* Determine whether the total insn_rtx_cost on non-jump insns in
137   basic block BB is less than MAX_COST.  This function returns
138   false if the cost of any instruction could not be estimated.  */
139
140static bool
141cheap_bb_rtx_cost_p (basic_block bb, int max_cost)
142{
143  int count = 0;
144  rtx insn = BB_HEAD (bb);
145
146  while (1)
147    {
148      if (NONJUMP_INSN_P (insn))
149	{
150	  int cost = insn_rtx_cost (PATTERN (insn));
151	  if (cost == 0)
152	    return false;
153
154	  /* If this instruction is the load or set of a "stack" register,
155	     such as a floating point register on x87, then the cost of
156	     speculatively executing this insn may need to include
157	     the additional cost of popping its result off of the
158	     register stack.  Unfortunately, correctly recognizing and
159	     accounting for this additional overhead is tricky, so for
160	     now we simply prohibit such speculative execution.  */
161#ifdef STACK_REGS
162	  {
163	    rtx set = single_set (insn);
164	    if (set && STACK_REG_P (SET_DEST (set)))
165	      return false;
166	  }
167#endif
168
169	  count += cost;
170	  if (count >= max_cost)
171	    return false;
172	}
173      else if (CALL_P (insn))
174	return false;
175
176      if (insn == BB_END (bb))
177	break;
178      insn = NEXT_INSN (insn);
179    }
180
181  return true;
182}
183
184/* Return the first non-jump active insn in the basic block.  */
185
186static rtx
187first_active_insn (basic_block bb)
188{
189  rtx insn = BB_HEAD (bb);
190
191  if (LABEL_P (insn))
192    {
193      if (insn == BB_END (bb))
194	return NULL_RTX;
195      insn = NEXT_INSN (insn);
196    }
197
198  while (NOTE_P (insn))
199    {
200      if (insn == BB_END (bb))
201	return NULL_RTX;
202      insn = NEXT_INSN (insn);
203    }
204
205  if (JUMP_P (insn))
206    return NULL_RTX;
207
208  return insn;
209}
210
211/* Return the last non-jump active (non-jump) insn in the basic block.  */
212
213static rtx
214last_active_insn (basic_block bb, int skip_use_p)
215{
216  rtx insn = BB_END (bb);
217  rtx head = BB_HEAD (bb);
218
219  while (NOTE_P (insn)
220	 || JUMP_P (insn)
221	 || (skip_use_p
222	     && NONJUMP_INSN_P (insn)
223	     && GET_CODE (PATTERN (insn)) == USE))
224    {
225      if (insn == head)
226	return NULL_RTX;
227      insn = PREV_INSN (insn);
228    }
229
230  if (LABEL_P (insn))
231    return NULL_RTX;
232
233  return insn;
234}
235
236/* Return the basic block reached by falling though the basic block BB.  */
237
238static basic_block
239block_fallthru (basic_block bb)
240{
241  edge e;
242  edge_iterator ei;
243
244  FOR_EACH_EDGE (e, ei, bb->succs)
245    if (e->flags & EDGE_FALLTHRU)
246      break;
247
248  return (e) ? e->dest : NULL_BLOCK;
249}
250
251/* Go through a bunch of insns, converting them to conditional
252   execution format if possible.  Return TRUE if all of the non-note
253   insns were processed.  */
254
255static int
256cond_exec_process_insns (ce_if_block_t *ce_info ATTRIBUTE_UNUSED,
257			 /* if block information */rtx start,
258			 /* first insn to look at */rtx end,
259			 /* last insn to look at */rtx test,
260			 /* conditional execution test */rtx prob_val,
261			 /* probability of branch taken. */int mod_ok)
262{
263  int must_be_last = FALSE;
264  rtx insn;
265  rtx xtest;
266  rtx pattern;
267
268  if (!start || !end)
269    return FALSE;
270
271  for (insn = start; ; insn = NEXT_INSN (insn))
272    {
273      if (NOTE_P (insn))
274	goto insn_done;
275
276      gcc_assert(NONJUMP_INSN_P (insn) || CALL_P (insn));
277
278      /* Remove USE insns that get in the way.  */
279      if (reload_completed && GET_CODE (PATTERN (insn)) == USE)
280	{
281	  /* ??? Ug.  Actually unlinking the thing is problematic,
282	     given what we'd have to coordinate with our callers.  */
283	  SET_INSN_DELETED (insn);
284	  goto insn_done;
285	}
286
287      /* Last insn wasn't last?  */
288      if (must_be_last)
289	return FALSE;
290
291      if (modified_in_p (test, insn))
292	{
293	  if (!mod_ok)
294	    return FALSE;
295	  must_be_last = TRUE;
296	}
297
298      /* Now build the conditional form of the instruction.  */
299      pattern = PATTERN (insn);
300      xtest = copy_rtx (test);
301
302      /* If this is already a COND_EXEC, rewrite the test to be an AND of the
303         two conditions.  */
304      if (GET_CODE (pattern) == COND_EXEC)
305	{
306	  if (GET_MODE (xtest) != GET_MODE (COND_EXEC_TEST (pattern)))
307	    return FALSE;
308
309	  xtest = gen_rtx_AND (GET_MODE (xtest), xtest,
310			       COND_EXEC_TEST (pattern));
311	  pattern = COND_EXEC_CODE (pattern);
312	}
313
314      pattern = gen_rtx_COND_EXEC (VOIDmode, xtest, pattern);
315
316      /* If the machine needs to modify the insn being conditionally executed,
317         say for example to force a constant integer operand into a temp
318         register, do so here.  */
319#ifdef IFCVT_MODIFY_INSN
320      IFCVT_MODIFY_INSN (ce_info, pattern, insn);
321      if (! pattern)
322	return FALSE;
323#endif
324
325      validate_change (insn, &PATTERN (insn), pattern, 1);
326
327      if (CALL_P (insn) && prob_val)
328	validate_change (insn, &REG_NOTES (insn),
329			 alloc_EXPR_LIST (REG_BR_PROB, prob_val,
330					  REG_NOTES (insn)), 1);
331
332    insn_done:
333      if (insn == end)
334	break;
335    }
336
337  return TRUE;
338}
339
340/* Return the condition for a jump.  Do not do any special processing.  */
341
342static rtx
343cond_exec_get_condition (rtx jump)
344{
345  rtx test_if, cond;
346
347  if (any_condjump_p (jump))
348    test_if = SET_SRC (pc_set (jump));
349  else
350    return NULL_RTX;
351  cond = XEXP (test_if, 0);
352
353  /* If this branches to JUMP_LABEL when the condition is false,
354     reverse the condition.  */
355  if (GET_CODE (XEXP (test_if, 2)) == LABEL_REF
356      && XEXP (XEXP (test_if, 2), 0) == JUMP_LABEL (jump))
357    {
358      enum rtx_code rev = reversed_comparison_code (cond, jump);
359      if (rev == UNKNOWN)
360	return NULL_RTX;
361
362      cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
363			     XEXP (cond, 1));
364    }
365
366  return cond;
367}
368
369/* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
370   to conditional execution.  Return TRUE if we were successful at
371   converting the block.  */
372
373static int
374cond_exec_process_if_block (ce_if_block_t * ce_info,
375			    /* if block information */int do_multiple_p)
376{
377  basic_block test_bb = ce_info->test_bb;	/* last test block */
378  basic_block then_bb = ce_info->then_bb;	/* THEN */
379  basic_block else_bb = ce_info->else_bb;	/* ELSE or NULL */
380  rtx test_expr;		/* expression in IF_THEN_ELSE that is tested */
381  rtx then_start;		/* first insn in THEN block */
382  rtx then_end;			/* last insn + 1 in THEN block */
383  rtx else_start = NULL_RTX;	/* first insn in ELSE block or NULL */
384  rtx else_end = NULL_RTX;	/* last insn + 1 in ELSE block */
385  int max;			/* max # of insns to convert.  */
386  int then_mod_ok;		/* whether conditional mods are ok in THEN */
387  rtx true_expr;		/* test for else block insns */
388  rtx false_expr;		/* test for then block insns */
389  rtx true_prob_val;		/* probability of else block */
390  rtx false_prob_val;		/* probability of then block */
391  int n_insns;
392  enum rtx_code false_code;
393
394  /* If test is comprised of && or || elements, and we've failed at handling
395     all of them together, just use the last test if it is the special case of
396     && elements without an ELSE block.  */
397  if (!do_multiple_p && ce_info->num_multiple_test_blocks)
398    {
399      if (else_bb || ! ce_info->and_and_p)
400	return FALSE;
401
402      ce_info->test_bb = test_bb = ce_info->last_test_bb;
403      ce_info->num_multiple_test_blocks = 0;
404      ce_info->num_and_and_blocks = 0;
405      ce_info->num_or_or_blocks = 0;
406    }
407
408  /* Find the conditional jump to the ELSE or JOIN part, and isolate
409     the test.  */
410  test_expr = cond_exec_get_condition (BB_END (test_bb));
411  if (! test_expr)
412    return FALSE;
413
414  /* If the conditional jump is more than just a conditional jump,
415     then we can not do conditional execution conversion on this block.  */
416  if (! onlyjump_p (BB_END (test_bb)))
417    return FALSE;
418
419  /* Collect the bounds of where we're to search, skipping any labels, jumps
420     and notes at the beginning and end of the block.  Then count the total
421     number of insns and see if it is small enough to convert.  */
422  then_start = first_active_insn (then_bb);
423  then_end = last_active_insn (then_bb, TRUE);
424  n_insns = ce_info->num_then_insns = count_bb_insns (then_bb);
425  max = MAX_CONDITIONAL_EXECUTE;
426
427  if (else_bb)
428    {
429      max *= 2;
430      else_start = first_active_insn (else_bb);
431      else_end = last_active_insn (else_bb, TRUE);
432      n_insns += ce_info->num_else_insns = count_bb_insns (else_bb);
433    }
434
435  if (n_insns > max)
436    return FALSE;
437
438  /* Map test_expr/test_jump into the appropriate MD tests to use on
439     the conditionally executed code.  */
440
441  true_expr = test_expr;
442
443  false_code = reversed_comparison_code (true_expr, BB_END (test_bb));
444  if (false_code != UNKNOWN)
445    false_expr = gen_rtx_fmt_ee (false_code, GET_MODE (true_expr),
446				 XEXP (true_expr, 0), XEXP (true_expr, 1));
447  else
448    false_expr = NULL_RTX;
449
450#ifdef IFCVT_MODIFY_TESTS
451  /* If the machine description needs to modify the tests, such as setting a
452     conditional execution register from a comparison, it can do so here.  */
453  IFCVT_MODIFY_TESTS (ce_info, true_expr, false_expr);
454
455  /* See if the conversion failed.  */
456  if (!true_expr || !false_expr)
457    goto fail;
458#endif
459
460  true_prob_val = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
461  if (true_prob_val)
462    {
463      true_prob_val = XEXP (true_prob_val, 0);
464      false_prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (true_prob_val));
465    }
466  else
467    false_prob_val = NULL_RTX;
468
469  /* If we have && or || tests, do them here.  These tests are in the adjacent
470     blocks after the first block containing the test.  */
471  if (ce_info->num_multiple_test_blocks > 0)
472    {
473      basic_block bb = test_bb;
474      basic_block last_test_bb = ce_info->last_test_bb;
475
476      if (! false_expr)
477	goto fail;
478
479      do
480	{
481	  rtx start, end;
482	  rtx t, f;
483	  enum rtx_code f_code;
484
485	  bb = block_fallthru (bb);
486	  start = first_active_insn (bb);
487	  end = last_active_insn (bb, TRUE);
488	  if (start
489	      && ! cond_exec_process_insns (ce_info, start, end, false_expr,
490					    false_prob_val, FALSE))
491	    goto fail;
492
493	  /* If the conditional jump is more than just a conditional jump, then
494	     we can not do conditional execution conversion on this block.  */
495	  if (! onlyjump_p (BB_END (bb)))
496	    goto fail;
497
498	  /* Find the conditional jump and isolate the test.  */
499	  t = cond_exec_get_condition (BB_END (bb));
500	  if (! t)
501	    goto fail;
502
503	  f_code = reversed_comparison_code (t, BB_END (bb));
504	  if (f_code == UNKNOWN)
505	    goto fail;
506
507	  f = gen_rtx_fmt_ee (f_code, GET_MODE (t), XEXP (t, 0), XEXP (t, 1));
508	  if (ce_info->and_and_p)
509	    {
510	      t = gen_rtx_AND (GET_MODE (t), true_expr, t);
511	      f = gen_rtx_IOR (GET_MODE (t), false_expr, f);
512	    }
513	  else
514	    {
515	      t = gen_rtx_IOR (GET_MODE (t), true_expr, t);
516	      f = gen_rtx_AND (GET_MODE (t), false_expr, f);
517	    }
518
519	  /* If the machine description needs to modify the tests, such as
520	     setting a conditional execution register from a comparison, it can
521	     do so here.  */
522#ifdef IFCVT_MODIFY_MULTIPLE_TESTS
523	  IFCVT_MODIFY_MULTIPLE_TESTS (ce_info, bb, t, f);
524
525	  /* See if the conversion failed.  */
526	  if (!t || !f)
527	    goto fail;
528#endif
529
530	  true_expr = t;
531	  false_expr = f;
532	}
533      while (bb != last_test_bb);
534    }
535
536  /* For IF-THEN-ELSE blocks, we don't allow modifications of the test
537     on then THEN block.  */
538  then_mod_ok = (else_bb == NULL_BLOCK);
539
540  /* Go through the THEN and ELSE blocks converting the insns if possible
541     to conditional execution.  */
542
543  if (then_end
544      && (! false_expr
545	  || ! cond_exec_process_insns (ce_info, then_start, then_end,
546					false_expr, false_prob_val,
547					then_mod_ok)))
548    goto fail;
549
550  if (else_bb && else_end
551      && ! cond_exec_process_insns (ce_info, else_start, else_end,
552				    true_expr, true_prob_val, TRUE))
553    goto fail;
554
555  /* If we cannot apply the changes, fail.  Do not go through the normal fail
556     processing, since apply_change_group will call cancel_changes.  */
557  if (! apply_change_group ())
558    {
559#ifdef IFCVT_MODIFY_CANCEL
560      /* Cancel any machine dependent changes.  */
561      IFCVT_MODIFY_CANCEL (ce_info);
562#endif
563      return FALSE;
564    }
565
566#ifdef IFCVT_MODIFY_FINAL
567  /* Do any machine dependent final modifications.  */
568  IFCVT_MODIFY_FINAL (ce_info);
569#endif
570
571  /* Conversion succeeded.  */
572  if (dump_file)
573    fprintf (dump_file, "%d insn%s converted to conditional execution.\n",
574	     n_insns, (n_insns == 1) ? " was" : "s were");
575
576  /* Merge the blocks!  */
577  merge_if_block (ce_info);
578  cond_exec_changed_p = TRUE;
579  return TRUE;
580
581 fail:
582#ifdef IFCVT_MODIFY_CANCEL
583  /* Cancel any machine dependent changes.  */
584  IFCVT_MODIFY_CANCEL (ce_info);
585#endif
586
587  cancel_changes (0);
588  return FALSE;
589}
590
591/* Used by noce_process_if_block to communicate with its subroutines.
592
593   The subroutines know that A and B may be evaluated freely.  They
594   know that X is a register.  They should insert new instructions
595   before cond_earliest.  */
596
597struct noce_if_info
598{
599  basic_block test_bb;
600  rtx insn_a, insn_b;
601  rtx x, a, b;
602  rtx jump, cond, cond_earliest;
603  /* True if "b" was originally evaluated unconditionally.  */
604  bool b_unconditional;
605};
606
607static rtx noce_emit_store_flag (struct noce_if_info *, rtx, int, int);
608static int noce_try_move (struct noce_if_info *);
609static int noce_try_store_flag (struct noce_if_info *);
610static int noce_try_addcc (struct noce_if_info *);
611static int noce_try_store_flag_constants (struct noce_if_info *);
612static int noce_try_store_flag_mask (struct noce_if_info *);
613static rtx noce_emit_cmove (struct noce_if_info *, rtx, enum rtx_code, rtx,
614			    rtx, rtx, rtx);
615static int noce_try_cmove (struct noce_if_info *);
616static int noce_try_cmove_arith (struct noce_if_info *);
617static rtx noce_get_alt_condition (struct noce_if_info *, rtx, rtx *);
618static int noce_try_minmax (struct noce_if_info *);
619static int noce_try_abs (struct noce_if_info *);
620static int noce_try_sign_mask (struct noce_if_info *);
621
622/* Helper function for noce_try_store_flag*.  */
623
624static rtx
625noce_emit_store_flag (struct noce_if_info *if_info, rtx x, int reversep,
626		      int normalize)
627{
628  rtx cond = if_info->cond;
629  int cond_complex;
630  enum rtx_code code;
631
632  cond_complex = (! general_operand (XEXP (cond, 0), VOIDmode)
633		  || ! general_operand (XEXP (cond, 1), VOIDmode));
634
635  /* If earliest == jump, or when the condition is complex, try to
636     build the store_flag insn directly.  */
637
638  if (cond_complex)
639    cond = XEXP (SET_SRC (pc_set (if_info->jump)), 0);
640
641  if (reversep)
642    code = reversed_comparison_code (cond, if_info->jump);
643  else
644    code = GET_CODE (cond);
645
646  if ((if_info->cond_earliest == if_info->jump || cond_complex)
647      && (normalize == 0 || STORE_FLAG_VALUE == normalize))
648    {
649      rtx tmp;
650
651      tmp = gen_rtx_fmt_ee (code, GET_MODE (x), XEXP (cond, 0),
652			    XEXP (cond, 1));
653      tmp = gen_rtx_SET (VOIDmode, x, tmp);
654
655      start_sequence ();
656      tmp = emit_insn (tmp);
657
658      if (recog_memoized (tmp) >= 0)
659	{
660	  tmp = get_insns ();
661	  end_sequence ();
662	  emit_insn (tmp);
663
664	  if_info->cond_earliest = if_info->jump;
665
666	  return x;
667	}
668
669      end_sequence ();
670    }
671
672  /* Don't even try if the comparison operands or the mode of X are weird.  */
673  if (cond_complex || !SCALAR_INT_MODE_P (GET_MODE (x)))
674    return NULL_RTX;
675
676  return emit_store_flag (x, code, XEXP (cond, 0),
677			  XEXP (cond, 1), VOIDmode,
678			  (code == LTU || code == LEU
679			   || code == GEU || code == GTU), normalize);
680}
681
682/* Emit instruction to move an rtx, possibly into STRICT_LOW_PART.
683   X is the destination/target and Y is the value to copy.  */
684
685static void
686noce_emit_move_insn (rtx x, rtx y)
687{
688  enum machine_mode outmode;
689  rtx outer, inner;
690  int bitpos;
691
692  if (GET_CODE (x) != STRICT_LOW_PART)
693    {
694      rtx seq, insn, target;
695      optab ot;
696
697      start_sequence ();
698      /* Check that the SET_SRC is reasonable before calling emit_move_insn,
699	 otherwise construct a suitable SET pattern ourselves.  */
700      insn = (OBJECT_P (y) || CONSTANT_P (y) || GET_CODE (y) == SUBREG)
701	     ? emit_move_insn (x, y)
702	     : emit_insn (gen_rtx_SET (VOIDmode, x, y));
703      seq = get_insns ();
704      end_sequence();
705
706      if (recog_memoized (insn) <= 0)
707	{
708	  if (GET_CODE (x) == ZERO_EXTRACT)
709	    {
710	      rtx op = XEXP (x, 0);
711	      unsigned HOST_WIDE_INT size = INTVAL (XEXP (x, 1));
712	      unsigned HOST_WIDE_INT start = INTVAL (XEXP (x, 2));
713
714	      /* store_bit_field expects START to be relative to
715		 BYTES_BIG_ENDIAN and adjusts this value for machines with
716		 BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN.  In order to be able to
717		 invoke store_bit_field again it is necessary to have the START
718		 value from the first call.  */
719	      if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
720		{
721		  if (MEM_P (op))
722		    start = BITS_PER_UNIT - start - size;
723		  else
724		    {
725		      gcc_assert (REG_P (op));
726		      start = BITS_PER_WORD - start - size;
727		    }
728		}
729
730	      gcc_assert (start < (MEM_P (op) ? BITS_PER_UNIT : BITS_PER_WORD));
731	      store_bit_field (op, size, start, GET_MODE (x), y);
732	      return;
733	    }
734
735	  switch (GET_RTX_CLASS (GET_CODE (y)))
736	    {
737	    case RTX_UNARY:
738	      ot = code_to_optab[GET_CODE (y)];
739	      if (ot)
740		{
741		  start_sequence ();
742		  target = expand_unop (GET_MODE (y), ot, XEXP (y, 0), x, 0);
743		  if (target != NULL_RTX)
744		    {
745		      if (target != x)
746			emit_move_insn (x, target);
747		      seq = get_insns ();
748		    }
749		  end_sequence ();
750		}
751	      break;
752
753	    case RTX_BIN_ARITH:
754	    case RTX_COMM_ARITH:
755	      ot = code_to_optab[GET_CODE (y)];
756	      if (ot)
757		{
758		  start_sequence ();
759		  target = expand_binop (GET_MODE (y), ot,
760					 XEXP (y, 0), XEXP (y, 1),
761					 x, 0, OPTAB_DIRECT);
762		  if (target != NULL_RTX)
763		    {
764		      if (target != x)
765			  emit_move_insn (x, target);
766		      seq = get_insns ();
767		    }
768		  end_sequence ();
769		}
770	      break;
771
772	    default:
773	      break;
774	    }
775	}
776
777      emit_insn (seq);
778      return;
779    }
780
781  outer = XEXP (x, 0);
782  inner = XEXP (outer, 0);
783  outmode = GET_MODE (outer);
784  bitpos = SUBREG_BYTE (outer) * BITS_PER_UNIT;
785  store_bit_field (inner, GET_MODE_BITSIZE (outmode), bitpos, outmode, y);
786}
787
788/* Return sequence of instructions generated by if conversion.  This
789   function calls end_sequence() to end the current stream, ensures
790   that are instructions are unshared, recognizable non-jump insns.
791   On failure, this function returns a NULL_RTX.  */
792
793static rtx
794end_ifcvt_sequence (struct noce_if_info *if_info)
795{
796  rtx insn;
797  rtx seq = get_insns ();
798
799  set_used_flags (if_info->x);
800  set_used_flags (if_info->cond);
801  unshare_all_rtl_in_chain (seq);
802  end_sequence ();
803
804  /* Make sure that all of the instructions emitted are recognizable,
805     and that we haven't introduced a new jump instruction.
806     As an exercise for the reader, build a general mechanism that
807     allows proper placement of required clobbers.  */
808  for (insn = seq; insn; insn = NEXT_INSN (insn))
809    if (JUMP_P (insn)
810	|| recog_memoized (insn) == -1)
811      return NULL_RTX;
812
813  return seq;
814}
815
816/* Convert "if (a != b) x = a; else x = b" into "x = a" and
817   "if (a == b) x = a; else x = b" into "x = b".  */
818
819static int
820noce_try_move (struct noce_if_info *if_info)
821{
822  rtx cond = if_info->cond;
823  enum rtx_code code = GET_CODE (cond);
824  rtx y, seq;
825
826  if (code != NE && code != EQ)
827    return FALSE;
828
829  /* This optimization isn't valid if either A or B could be a NaN
830     or a signed zero.  */
831  if (HONOR_NANS (GET_MODE (if_info->x))
832      || HONOR_SIGNED_ZEROS (GET_MODE (if_info->x)))
833    return FALSE;
834
835  /* Check whether the operands of the comparison are A and in
836     either order.  */
837  if ((rtx_equal_p (if_info->a, XEXP (cond, 0))
838       && rtx_equal_p (if_info->b, XEXP (cond, 1)))
839      || (rtx_equal_p (if_info->a, XEXP (cond, 1))
840	  && rtx_equal_p (if_info->b, XEXP (cond, 0))))
841    {
842      y = (code == EQ) ? if_info->a : if_info->b;
843
844      /* Avoid generating the move if the source is the destination.  */
845      if (! rtx_equal_p (if_info->x, y))
846	{
847	  start_sequence ();
848	  noce_emit_move_insn (if_info->x, y);
849	  seq = end_ifcvt_sequence (if_info);
850	  if (!seq)
851	    return FALSE;
852
853	  emit_insn_before_setloc (seq, if_info->jump,
854				   INSN_LOCATOR (if_info->insn_a));
855	}
856      return TRUE;
857    }
858  return FALSE;
859}
860
861/* Convert "if (test) x = 1; else x = 0".
862
863   Only try 0 and STORE_FLAG_VALUE here.  Other combinations will be
864   tried in noce_try_store_flag_constants after noce_try_cmove has had
865   a go at the conversion.  */
866
867static int
868noce_try_store_flag (struct noce_if_info *if_info)
869{
870  int reversep;
871  rtx target, seq;
872
873  if (GET_CODE (if_info->b) == CONST_INT
874      && INTVAL (if_info->b) == STORE_FLAG_VALUE
875      && if_info->a == const0_rtx)
876    reversep = 0;
877  else if (if_info->b == const0_rtx
878	   && GET_CODE (if_info->a) == CONST_INT
879	   && INTVAL (if_info->a) == STORE_FLAG_VALUE
880	   && (reversed_comparison_code (if_info->cond, if_info->jump)
881	       != UNKNOWN))
882    reversep = 1;
883  else
884    return FALSE;
885
886  start_sequence ();
887
888  target = noce_emit_store_flag (if_info, if_info->x, reversep, 0);
889  if (target)
890    {
891      if (target != if_info->x)
892	noce_emit_move_insn (if_info->x, target);
893
894      seq = end_ifcvt_sequence (if_info);
895      if (! seq)
896	return FALSE;
897
898      emit_insn_before_setloc (seq, if_info->jump,
899			       INSN_LOCATOR (if_info->insn_a));
900      return TRUE;
901    }
902  else
903    {
904      end_sequence ();
905      return FALSE;
906    }
907}
908
909/* Convert "if (test) x = a; else x = b", for A and B constant.  */
910
911static int
912noce_try_store_flag_constants (struct noce_if_info *if_info)
913{
914  rtx target, seq;
915  int reversep;
916  HOST_WIDE_INT itrue, ifalse, diff, tmp;
917  int normalize, can_reverse;
918  enum machine_mode mode;
919
920  if (! no_new_pseudos
921      && GET_CODE (if_info->a) == CONST_INT
922      && GET_CODE (if_info->b) == CONST_INT)
923    {
924      mode = GET_MODE (if_info->x);
925      ifalse = INTVAL (if_info->a);
926      itrue = INTVAL (if_info->b);
927
928      /* Make sure we can represent the difference between the two values.  */
929      if ((itrue - ifalse > 0)
930	  != ((ifalse < 0) != (itrue < 0) ? ifalse < 0 : ifalse < itrue))
931	return FALSE;
932
933      diff = trunc_int_for_mode (itrue - ifalse, mode);
934
935      can_reverse = (reversed_comparison_code (if_info->cond, if_info->jump)
936		     != UNKNOWN);
937
938      reversep = 0;
939      if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
940	normalize = 0;
941      else if (ifalse == 0 && exact_log2 (itrue) >= 0
942	       && (STORE_FLAG_VALUE == 1
943		   || BRANCH_COST >= 2))
944	normalize = 1;
945      else if (itrue == 0 && exact_log2 (ifalse) >= 0 && can_reverse
946	       && (STORE_FLAG_VALUE == 1 || BRANCH_COST >= 2))
947	normalize = 1, reversep = 1;
948      else if (itrue == -1
949	       && (STORE_FLAG_VALUE == -1
950		   || BRANCH_COST >= 2))
951	normalize = -1;
952      else if (ifalse == -1 && can_reverse
953	       && (STORE_FLAG_VALUE == -1 || BRANCH_COST >= 2))
954	normalize = -1, reversep = 1;
955      else if ((BRANCH_COST >= 2 && STORE_FLAG_VALUE == -1)
956	       || BRANCH_COST >= 3)
957	normalize = -1;
958      else
959	return FALSE;
960
961      if (reversep)
962	{
963	  tmp = itrue; itrue = ifalse; ifalse = tmp;
964	  diff = trunc_int_for_mode (-diff, mode);
965	}
966
967      start_sequence ();
968      target = noce_emit_store_flag (if_info, if_info->x, reversep, normalize);
969      if (! target)
970	{
971	  end_sequence ();
972	  return FALSE;
973	}
974
975      /* if (test) x = 3; else x = 4;
976	 =>   x = 3 + (test == 0);  */
977      if (diff == STORE_FLAG_VALUE || diff == -STORE_FLAG_VALUE)
978	{
979	  target = expand_simple_binop (mode,
980					(diff == STORE_FLAG_VALUE
981					 ? PLUS : MINUS),
982					GEN_INT (ifalse), target, if_info->x, 0,
983					OPTAB_WIDEN);
984	}
985
986      /* if (test) x = 8; else x = 0;
987	 =>   x = (test != 0) << 3;  */
988      else if (ifalse == 0 && (tmp = exact_log2 (itrue)) >= 0)
989	{
990	  target = expand_simple_binop (mode, ASHIFT,
991					target, GEN_INT (tmp), if_info->x, 0,
992					OPTAB_WIDEN);
993	}
994
995      /* if (test) x = -1; else x = b;
996	 =>   x = -(test != 0) | b;  */
997      else if (itrue == -1)
998	{
999	  target = expand_simple_binop (mode, IOR,
1000					target, GEN_INT (ifalse), if_info->x, 0,
1001					OPTAB_WIDEN);
1002	}
1003
1004      /* if (test) x = a; else x = b;
1005	 =>   x = (-(test != 0) & (b - a)) + a;  */
1006      else
1007	{
1008	  target = expand_simple_binop (mode, AND,
1009					target, GEN_INT (diff), if_info->x, 0,
1010					OPTAB_WIDEN);
1011	  if (target)
1012	    target = expand_simple_binop (mode, PLUS,
1013					  target, GEN_INT (ifalse),
1014					  if_info->x, 0, OPTAB_WIDEN);
1015	}
1016
1017      if (! target)
1018	{
1019	  end_sequence ();
1020	  return FALSE;
1021	}
1022
1023      if (target != if_info->x)
1024	noce_emit_move_insn (if_info->x, target);
1025
1026      seq = end_ifcvt_sequence (if_info);
1027      if (!seq)
1028	return FALSE;
1029
1030      emit_insn_before_setloc (seq, if_info->jump,
1031			       INSN_LOCATOR (if_info->insn_a));
1032      return TRUE;
1033    }
1034
1035  return FALSE;
1036}
1037
1038/* Convert "if (test) foo++" into "foo += (test != 0)", and
1039   similarly for "foo--".  */
1040
1041static int
1042noce_try_addcc (struct noce_if_info *if_info)
1043{
1044  rtx target, seq;
1045  int subtract, normalize;
1046
1047  if (! no_new_pseudos
1048      && GET_CODE (if_info->a) == PLUS
1049      && rtx_equal_p (XEXP (if_info->a, 0), if_info->b)
1050      && (reversed_comparison_code (if_info->cond, if_info->jump)
1051	  != UNKNOWN))
1052    {
1053      rtx cond = if_info->cond;
1054      enum rtx_code code = reversed_comparison_code (cond, if_info->jump);
1055
1056      /* First try to use addcc pattern.  */
1057      if (general_operand (XEXP (cond, 0), VOIDmode)
1058	  && general_operand (XEXP (cond, 1), VOIDmode))
1059	{
1060	  start_sequence ();
1061	  target = emit_conditional_add (if_info->x, code,
1062					 XEXP (cond, 0),
1063					 XEXP (cond, 1),
1064					 VOIDmode,
1065					 if_info->b,
1066					 XEXP (if_info->a, 1),
1067					 GET_MODE (if_info->x),
1068					 (code == LTU || code == GEU
1069					  || code == LEU || code == GTU));
1070	  if (target)
1071	    {
1072	      if (target != if_info->x)
1073		noce_emit_move_insn (if_info->x, target);
1074
1075	      seq = end_ifcvt_sequence (if_info);
1076	      if (!seq)
1077		return FALSE;
1078
1079	      emit_insn_before_setloc (seq, if_info->jump,
1080				       INSN_LOCATOR (if_info->insn_a));
1081	      return TRUE;
1082	    }
1083	  end_sequence ();
1084	}
1085
1086      /* If that fails, construct conditional increment or decrement using
1087	 setcc.  */
1088      if (BRANCH_COST >= 2
1089	  && (XEXP (if_info->a, 1) == const1_rtx
1090	      || XEXP (if_info->a, 1) == constm1_rtx))
1091        {
1092	  start_sequence ();
1093	  if (STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1094	    subtract = 0, normalize = 0;
1095	  else if (-STORE_FLAG_VALUE == INTVAL (XEXP (if_info->a, 1)))
1096	    subtract = 1, normalize = 0;
1097	  else
1098	    subtract = 0, normalize = INTVAL (XEXP (if_info->a, 1));
1099
1100
1101	  target = noce_emit_store_flag (if_info,
1102					 gen_reg_rtx (GET_MODE (if_info->x)),
1103					 1, normalize);
1104
1105	  if (target)
1106	    target = expand_simple_binop (GET_MODE (if_info->x),
1107					  subtract ? MINUS : PLUS,
1108					  if_info->b, target, if_info->x,
1109					  0, OPTAB_WIDEN);
1110	  if (target)
1111	    {
1112	      if (target != if_info->x)
1113		noce_emit_move_insn (if_info->x, target);
1114
1115	      seq = end_ifcvt_sequence (if_info);
1116	      if (!seq)
1117		return FALSE;
1118
1119	      emit_insn_before_setloc (seq, if_info->jump,
1120				       INSN_LOCATOR (if_info->insn_a));
1121	      return TRUE;
1122	    }
1123	  end_sequence ();
1124	}
1125    }
1126
1127  return FALSE;
1128}
1129
1130/* Convert "if (test) x = 0;" to "x &= -(test == 0);"  */
1131
1132static int
1133noce_try_store_flag_mask (struct noce_if_info *if_info)
1134{
1135  rtx target, seq;
1136  int reversep;
1137
1138  reversep = 0;
1139  if (! no_new_pseudos
1140      && (BRANCH_COST >= 2
1141	  || STORE_FLAG_VALUE == -1)
1142      && ((if_info->a == const0_rtx
1143	   && rtx_equal_p (if_info->b, if_info->x))
1144	  || ((reversep = (reversed_comparison_code (if_info->cond,
1145						     if_info->jump)
1146			   != UNKNOWN))
1147	      && if_info->b == const0_rtx
1148	      && rtx_equal_p (if_info->a, if_info->x))))
1149    {
1150      start_sequence ();
1151      target = noce_emit_store_flag (if_info,
1152				     gen_reg_rtx (GET_MODE (if_info->x)),
1153				     reversep, -1);
1154      if (target)
1155        target = expand_simple_binop (GET_MODE (if_info->x), AND,
1156				      if_info->x,
1157				      target, if_info->x, 0,
1158				      OPTAB_WIDEN);
1159
1160      if (target)
1161	{
1162	  if (target != if_info->x)
1163	    noce_emit_move_insn (if_info->x, target);
1164
1165	  seq = end_ifcvt_sequence (if_info);
1166	  if (!seq)
1167	    return FALSE;
1168
1169	  emit_insn_before_setloc (seq, if_info->jump,
1170				   INSN_LOCATOR (if_info->insn_a));
1171	  return TRUE;
1172	}
1173
1174      end_sequence ();
1175    }
1176
1177  return FALSE;
1178}
1179
1180/* Helper function for noce_try_cmove and noce_try_cmove_arith.  */
1181
1182static rtx
1183noce_emit_cmove (struct noce_if_info *if_info, rtx x, enum rtx_code code,
1184		 rtx cmp_a, rtx cmp_b, rtx vfalse, rtx vtrue)
1185{
1186  /* If earliest == jump, try to build the cmove insn directly.
1187     This is helpful when combine has created some complex condition
1188     (like for alpha's cmovlbs) that we can't hope to regenerate
1189     through the normal interface.  */
1190
1191  if (if_info->cond_earliest == if_info->jump)
1192    {
1193      rtx tmp;
1194
1195      tmp = gen_rtx_fmt_ee (code, GET_MODE (if_info->cond), cmp_a, cmp_b);
1196      tmp = gen_rtx_IF_THEN_ELSE (GET_MODE (x), tmp, vtrue, vfalse);
1197      tmp = gen_rtx_SET (VOIDmode, x, tmp);
1198
1199      start_sequence ();
1200      tmp = emit_insn (tmp);
1201
1202      if (recog_memoized (tmp) >= 0)
1203	{
1204	  tmp = get_insns ();
1205	  end_sequence ();
1206	  emit_insn (tmp);
1207
1208	  return x;
1209	}
1210
1211      end_sequence ();
1212    }
1213
1214  /* Don't even try if the comparison operands are weird.  */
1215  if (! general_operand (cmp_a, GET_MODE (cmp_a))
1216      || ! general_operand (cmp_b, GET_MODE (cmp_b)))
1217    return NULL_RTX;
1218
1219#if HAVE_conditional_move
1220  return emit_conditional_move (x, code, cmp_a, cmp_b, VOIDmode,
1221				vtrue, vfalse, GET_MODE (x),
1222			        (code == LTU || code == GEU
1223				 || code == LEU || code == GTU));
1224#else
1225  /* We'll never get here, as noce_process_if_block doesn't call the
1226     functions involved.  Ifdef code, however, should be discouraged
1227     because it leads to typos in the code not selected.  However,
1228     emit_conditional_move won't exist either.  */
1229  return NULL_RTX;
1230#endif
1231}
1232
1233/* Try only simple constants and registers here.  More complex cases
1234   are handled in noce_try_cmove_arith after noce_try_store_flag_arith
1235   has had a go at it.  */
1236
1237static int
1238noce_try_cmove (struct noce_if_info *if_info)
1239{
1240  enum rtx_code code;
1241  rtx target, seq;
1242
1243  if ((CONSTANT_P (if_info->a) || register_operand (if_info->a, VOIDmode))
1244      && (CONSTANT_P (if_info->b) || register_operand (if_info->b, VOIDmode)))
1245    {
1246      start_sequence ();
1247
1248      code = GET_CODE (if_info->cond);
1249      target = noce_emit_cmove (if_info, if_info->x, code,
1250				XEXP (if_info->cond, 0),
1251				XEXP (if_info->cond, 1),
1252				if_info->a, if_info->b);
1253
1254      if (target)
1255	{
1256	  if (target != if_info->x)
1257	    noce_emit_move_insn (if_info->x, target);
1258
1259	  seq = end_ifcvt_sequence (if_info);
1260	  if (!seq)
1261	    return FALSE;
1262
1263	  emit_insn_before_setloc (seq, if_info->jump,
1264				   INSN_LOCATOR (if_info->insn_a));
1265	  return TRUE;
1266	}
1267      else
1268	{
1269	  end_sequence ();
1270	  return FALSE;
1271	}
1272    }
1273
1274  return FALSE;
1275}
1276
1277/* Try more complex cases involving conditional_move.  */
1278
1279static int
1280noce_try_cmove_arith (struct noce_if_info *if_info)
1281{
1282  rtx a = if_info->a;
1283  rtx b = if_info->b;
1284  rtx x = if_info->x;
1285  rtx orig_a, orig_b;
1286  rtx insn_a, insn_b;
1287  rtx tmp, target;
1288  int is_mem = 0;
1289  int insn_cost;
1290  enum rtx_code code;
1291
1292  /* A conditional move from two memory sources is equivalent to a
1293     conditional on their addresses followed by a load.  Don't do this
1294     early because it'll screw alias analysis.  Note that we've
1295     already checked for no side effects.  */
1296  if (! no_new_pseudos && cse_not_expected
1297      && MEM_P (a) && MEM_P (b)
1298      && BRANCH_COST >= 5)
1299    {
1300      a = XEXP (a, 0);
1301      b = XEXP (b, 0);
1302      x = gen_reg_rtx (Pmode);
1303      is_mem = 1;
1304    }
1305
1306  /* ??? We could handle this if we knew that a load from A or B could
1307     not fault.  This is also true if we've already loaded
1308     from the address along the path from ENTRY.  */
1309  else if (may_trap_p (a) || may_trap_p (b))
1310    return FALSE;
1311
1312  /* if (test) x = a + b; else x = c - d;
1313     => y = a + b;
1314        x = c - d;
1315	if (test)
1316	  x = y;
1317  */
1318
1319  code = GET_CODE (if_info->cond);
1320  insn_a = if_info->insn_a;
1321  insn_b = if_info->insn_b;
1322
1323  /* Total insn_rtx_cost should be smaller than branch cost.  Exit
1324     if insn_rtx_cost can't be estimated.  */
1325  if (insn_a)
1326    {
1327      insn_cost = insn_rtx_cost (PATTERN (insn_a));
1328      if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (BRANCH_COST))
1329	return FALSE;
1330    }
1331  else
1332    {
1333      insn_cost = 0;
1334    }
1335
1336  if (insn_b) {
1337    insn_cost += insn_rtx_cost (PATTERN (insn_b));
1338    if (insn_cost == 0 || insn_cost > COSTS_N_INSNS (BRANCH_COST))
1339      return FALSE;
1340  }
1341
1342  /* Possibly rearrange operands to make things come out more natural.  */
1343  if (reversed_comparison_code (if_info->cond, if_info->jump) != UNKNOWN)
1344    {
1345      int reversep = 0;
1346      if (rtx_equal_p (b, x))
1347	reversep = 1;
1348      else if (general_operand (b, GET_MODE (b)))
1349	reversep = 1;
1350
1351      if (reversep)
1352	{
1353	  code = reversed_comparison_code (if_info->cond, if_info->jump);
1354	  tmp = a, a = b, b = tmp;
1355	  tmp = insn_a, insn_a = insn_b, insn_b = tmp;
1356	}
1357    }
1358
1359  start_sequence ();
1360
1361  orig_a = a;
1362  orig_b = b;
1363
1364  /* If either operand is complex, load it into a register first.
1365     The best way to do this is to copy the original insn.  In this
1366     way we preserve any clobbers etc that the insn may have had.
1367     This is of course not possible in the IS_MEM case.  */
1368  if (! general_operand (a, GET_MODE (a)))
1369    {
1370      rtx set;
1371
1372      if (no_new_pseudos)
1373	goto end_seq_and_fail;
1374
1375      if (is_mem)
1376	{
1377	  tmp = gen_reg_rtx (GET_MODE (a));
1378	  tmp = emit_insn (gen_rtx_SET (VOIDmode, tmp, a));
1379	}
1380      else if (! insn_a)
1381	goto end_seq_and_fail;
1382      else
1383	{
1384	  a = gen_reg_rtx (GET_MODE (a));
1385	  tmp = copy_rtx (insn_a);
1386	  set = single_set (tmp);
1387	  SET_DEST (set) = a;
1388	  tmp = emit_insn (PATTERN (tmp));
1389	}
1390      if (recog_memoized (tmp) < 0)
1391	goto end_seq_and_fail;
1392    }
1393  if (! general_operand (b, GET_MODE (b)))
1394    {
1395      rtx set, last;
1396
1397      if (no_new_pseudos)
1398	goto end_seq_and_fail;
1399
1400      if (is_mem)
1401	{
1402          tmp = gen_reg_rtx (GET_MODE (b));
1403	  tmp = gen_rtx_SET (VOIDmode, tmp, b);
1404	}
1405      else if (! insn_b)
1406	goto end_seq_and_fail;
1407      else
1408	{
1409          b = gen_reg_rtx (GET_MODE (b));
1410	  tmp = copy_rtx (insn_b);
1411	  set = single_set (tmp);
1412	  SET_DEST (set) = b;
1413	  tmp = PATTERN (tmp);
1414	}
1415
1416      /* If insn to set up A clobbers any registers B depends on, try to
1417	 swap insn that sets up A with the one that sets up B.  If even
1418	 that doesn't help, punt.  */
1419      last = get_last_insn ();
1420      if (last && modified_in_p (orig_b, last))
1421	{
1422	  tmp = emit_insn_before (tmp, get_insns ());
1423	  if (modified_in_p (orig_a, tmp))
1424	    goto end_seq_and_fail;
1425	}
1426      else
1427	tmp = emit_insn (tmp);
1428
1429      if (recog_memoized (tmp) < 0)
1430	goto end_seq_and_fail;
1431    }
1432
1433  target = noce_emit_cmove (if_info, x, code, XEXP (if_info->cond, 0),
1434			    XEXP (if_info->cond, 1), a, b);
1435
1436  if (! target)
1437    goto end_seq_and_fail;
1438
1439  /* If we're handling a memory for above, emit the load now.  */
1440  if (is_mem)
1441    {
1442      tmp = gen_rtx_MEM (GET_MODE (if_info->x), target);
1443
1444      /* Copy over flags as appropriate.  */
1445      if (MEM_VOLATILE_P (if_info->a) || MEM_VOLATILE_P (if_info->b))
1446	MEM_VOLATILE_P (tmp) = 1;
1447      if (MEM_IN_STRUCT_P (if_info->a) && MEM_IN_STRUCT_P (if_info->b))
1448	MEM_IN_STRUCT_P (tmp) = 1;
1449      if (MEM_SCALAR_P (if_info->a) && MEM_SCALAR_P (if_info->b))
1450	MEM_SCALAR_P (tmp) = 1;
1451      if (MEM_ALIAS_SET (if_info->a) == MEM_ALIAS_SET (if_info->b))
1452	set_mem_alias_set (tmp, MEM_ALIAS_SET (if_info->a));
1453      set_mem_align (tmp,
1454		     MIN (MEM_ALIGN (if_info->a), MEM_ALIGN (if_info->b)));
1455
1456      noce_emit_move_insn (if_info->x, tmp);
1457    }
1458  else if (target != x)
1459    noce_emit_move_insn (x, target);
1460
1461  tmp = end_ifcvt_sequence (if_info);
1462  if (!tmp)
1463    return FALSE;
1464
1465  emit_insn_before_setloc (tmp, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1466  return TRUE;
1467
1468 end_seq_and_fail:
1469  end_sequence ();
1470  return FALSE;
1471}
1472
1473/* For most cases, the simplified condition we found is the best
1474   choice, but this is not the case for the min/max/abs transforms.
1475   For these we wish to know that it is A or B in the condition.  */
1476
1477static rtx
1478noce_get_alt_condition (struct noce_if_info *if_info, rtx target,
1479			rtx *earliest)
1480{
1481  rtx cond, set, insn;
1482  int reverse;
1483
1484  /* If target is already mentioned in the known condition, return it.  */
1485  if (reg_mentioned_p (target, if_info->cond))
1486    {
1487      *earliest = if_info->cond_earliest;
1488      return if_info->cond;
1489    }
1490
1491  set = pc_set (if_info->jump);
1492  cond = XEXP (SET_SRC (set), 0);
1493  reverse
1494    = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
1495      && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (if_info->jump);
1496
1497  /* If we're looking for a constant, try to make the conditional
1498     have that constant in it.  There are two reasons why it may
1499     not have the constant we want:
1500
1501     1. GCC may have needed to put the constant in a register, because
1502        the target can't compare directly against that constant.  For
1503        this case, we look for a SET immediately before the comparison
1504        that puts a constant in that register.
1505
1506     2. GCC may have canonicalized the conditional, for example
1507	replacing "if x < 4" with "if x <= 3".  We can undo that (or
1508	make equivalent types of changes) to get the constants we need
1509	if they're off by one in the right direction.  */
1510
1511  if (GET_CODE (target) == CONST_INT)
1512    {
1513      enum rtx_code code = GET_CODE (if_info->cond);
1514      rtx op_a = XEXP (if_info->cond, 0);
1515      rtx op_b = XEXP (if_info->cond, 1);
1516      rtx prev_insn;
1517
1518      /* First, look to see if we put a constant in a register.  */
1519      prev_insn = prev_nonnote_insn (if_info->cond_earliest);
1520      if (prev_insn
1521	  && INSN_P (prev_insn)
1522	  && GET_CODE (PATTERN (prev_insn)) == SET)
1523	{
1524	  rtx src = find_reg_equal_equiv_note (prev_insn);
1525	  if (!src)
1526	    src = SET_SRC (PATTERN (prev_insn));
1527	  if (GET_CODE (src) == CONST_INT)
1528	    {
1529	      if (rtx_equal_p (op_a, SET_DEST (PATTERN (prev_insn))))
1530		op_a = src;
1531	      else if (rtx_equal_p (op_b, SET_DEST (PATTERN (prev_insn))))
1532		op_b = src;
1533
1534	      if (GET_CODE (op_a) == CONST_INT)
1535		{
1536		  rtx tmp = op_a;
1537		  op_a = op_b;
1538		  op_b = tmp;
1539		  code = swap_condition (code);
1540		}
1541	    }
1542	}
1543
1544      /* Now, look to see if we can get the right constant by
1545	 adjusting the conditional.  */
1546      if (GET_CODE (op_b) == CONST_INT)
1547	{
1548	  HOST_WIDE_INT desired_val = INTVAL (target);
1549	  HOST_WIDE_INT actual_val = INTVAL (op_b);
1550
1551	  switch (code)
1552	    {
1553	    case LT:
1554	      if (actual_val == desired_val + 1)
1555		{
1556		  code = LE;
1557		  op_b = GEN_INT (desired_val);
1558		}
1559	      break;
1560	    case LE:
1561	      if (actual_val == desired_val - 1)
1562		{
1563		  code = LT;
1564		  op_b = GEN_INT (desired_val);
1565		}
1566	      break;
1567	    case GT:
1568	      if (actual_val == desired_val - 1)
1569		{
1570		  code = GE;
1571		  op_b = GEN_INT (desired_val);
1572		}
1573	      break;
1574	    case GE:
1575	      if (actual_val == desired_val + 1)
1576		{
1577		  code = GT;
1578		  op_b = GEN_INT (desired_val);
1579		}
1580	      break;
1581	    default:
1582	      break;
1583	    }
1584	}
1585
1586      /* If we made any changes, generate a new conditional that is
1587	 equivalent to what we started with, but has the right
1588	 constants in it.  */
1589      if (code != GET_CODE (if_info->cond)
1590	  || op_a != XEXP (if_info->cond, 0)
1591	  || op_b != XEXP (if_info->cond, 1))
1592	{
1593	  cond = gen_rtx_fmt_ee (code, GET_MODE (cond), op_a, op_b);
1594	  *earliest = if_info->cond_earliest;
1595	  return cond;
1596	}
1597    }
1598
1599  cond = canonicalize_condition (if_info->jump, cond, reverse,
1600				 earliest, target, false, true);
1601  if (! cond || ! reg_mentioned_p (target, cond))
1602    return NULL;
1603
1604  /* We almost certainly searched back to a different place.
1605     Need to re-verify correct lifetimes.  */
1606
1607  /* X may not be mentioned in the range (cond_earliest, jump].  */
1608  for (insn = if_info->jump; insn != *earliest; insn = PREV_INSN (insn))
1609    if (INSN_P (insn) && reg_overlap_mentioned_p (if_info->x, PATTERN (insn)))
1610      return NULL;
1611
1612  /* A and B may not be modified in the range [cond_earliest, jump).  */
1613  for (insn = *earliest; insn != if_info->jump; insn = NEXT_INSN (insn))
1614    if (INSN_P (insn)
1615	&& (modified_in_p (if_info->a, insn)
1616	    || modified_in_p (if_info->b, insn)))
1617      return NULL;
1618
1619  return cond;
1620}
1621
1622/* Convert "if (a < b) x = a; else x = b;" to "x = min(a, b);", etc.  */
1623
1624static int
1625noce_try_minmax (struct noce_if_info *if_info)
1626{
1627  rtx cond, earliest, target, seq;
1628  enum rtx_code code, op;
1629  int unsignedp;
1630
1631  /* ??? Can't guarantee that expand_binop won't create pseudos.  */
1632  if (no_new_pseudos)
1633    return FALSE;
1634
1635  /* ??? Reject modes with NaNs or signed zeros since we don't know how
1636     they will be resolved with an SMIN/SMAX.  It wouldn't be too hard
1637     to get the target to tell us...  */
1638  if (HONOR_SIGNED_ZEROS (GET_MODE (if_info->x))
1639      || HONOR_NANS (GET_MODE (if_info->x)))
1640    return FALSE;
1641
1642  cond = noce_get_alt_condition (if_info, if_info->a, &earliest);
1643  if (!cond)
1644    return FALSE;
1645
1646  /* Verify the condition is of the form we expect, and canonicalize
1647     the comparison code.  */
1648  code = GET_CODE (cond);
1649  if (rtx_equal_p (XEXP (cond, 0), if_info->a))
1650    {
1651      if (! rtx_equal_p (XEXP (cond, 1), if_info->b))
1652	return FALSE;
1653    }
1654  else if (rtx_equal_p (XEXP (cond, 1), if_info->a))
1655    {
1656      if (! rtx_equal_p (XEXP (cond, 0), if_info->b))
1657	return FALSE;
1658      code = swap_condition (code);
1659    }
1660  else
1661    return FALSE;
1662
1663  /* Determine what sort of operation this is.  Note that the code is for
1664     a taken branch, so the code->operation mapping appears backwards.  */
1665  switch (code)
1666    {
1667    case LT:
1668    case LE:
1669    case UNLT:
1670    case UNLE:
1671      op = SMAX;
1672      unsignedp = 0;
1673      break;
1674    case GT:
1675    case GE:
1676    case UNGT:
1677    case UNGE:
1678      op = SMIN;
1679      unsignedp = 0;
1680      break;
1681    case LTU:
1682    case LEU:
1683      op = UMAX;
1684      unsignedp = 1;
1685      break;
1686    case GTU:
1687    case GEU:
1688      op = UMIN;
1689      unsignedp = 1;
1690      break;
1691    default:
1692      return FALSE;
1693    }
1694
1695  start_sequence ();
1696
1697  target = expand_simple_binop (GET_MODE (if_info->x), op,
1698				if_info->a, if_info->b,
1699				if_info->x, unsignedp, OPTAB_WIDEN);
1700  if (! target)
1701    {
1702      end_sequence ();
1703      return FALSE;
1704    }
1705  if (target != if_info->x)
1706    noce_emit_move_insn (if_info->x, target);
1707
1708  seq = end_ifcvt_sequence (if_info);
1709  if (!seq)
1710    return FALSE;
1711
1712  emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1713  if_info->cond = cond;
1714  if_info->cond_earliest = earliest;
1715
1716  return TRUE;
1717}
1718
1719/* Convert "if (a < 0) x = -a; else x = a;" to "x = abs(a);", etc.  */
1720
1721static int
1722noce_try_abs (struct noce_if_info *if_info)
1723{
1724  rtx cond, earliest, target, seq, a, b, c;
1725  int negate;
1726
1727  /* ??? Can't guarantee that expand_binop won't create pseudos.  */
1728  if (no_new_pseudos)
1729    return FALSE;
1730
1731  /* Recognize A and B as constituting an ABS or NABS.  The canonical
1732     form is a branch around the negation, taken when the object is the
1733     first operand of a comparison against 0 that evaluates to true.  */
1734  a = if_info->a;
1735  b = if_info->b;
1736  if (GET_CODE (a) == NEG && rtx_equal_p (XEXP (a, 0), b))
1737    negate = 0;
1738  else if (GET_CODE (b) == NEG && rtx_equal_p (XEXP (b, 0), a))
1739    {
1740      c = a; a = b; b = c;
1741      negate = 1;
1742    }
1743  else
1744    return FALSE;
1745
1746  cond = noce_get_alt_condition (if_info, b, &earliest);
1747  if (!cond)
1748    return FALSE;
1749
1750  /* Verify the condition is of the form we expect.  */
1751  if (rtx_equal_p (XEXP (cond, 0), b))
1752    c = XEXP (cond, 1);
1753  else if (rtx_equal_p (XEXP (cond, 1), b))
1754    {
1755      c = XEXP (cond, 0);
1756      negate = !negate;
1757    }
1758  else
1759    return FALSE;
1760
1761  /* Verify that C is zero.  Search one step backward for a
1762     REG_EQUAL note or a simple source if necessary.  */
1763  if (REG_P (c))
1764    {
1765      rtx set, insn = prev_nonnote_insn (earliest);
1766      if (insn
1767	  && (set = single_set (insn))
1768	  && rtx_equal_p (SET_DEST (set), c))
1769	{
1770	  rtx note = find_reg_equal_equiv_note (insn);
1771	  if (note)
1772	    c = XEXP (note, 0);
1773	  else
1774	    c = SET_SRC (set);
1775	}
1776      else
1777	return FALSE;
1778    }
1779  if (MEM_P (c)
1780      && GET_CODE (XEXP (c, 0)) == SYMBOL_REF
1781      && CONSTANT_POOL_ADDRESS_P (XEXP (c, 0)))
1782    c = get_pool_constant (XEXP (c, 0));
1783
1784  /* Work around funny ideas get_condition has wrt canonicalization.
1785     Note that these rtx constants are known to be CONST_INT, and
1786     therefore imply integer comparisons.  */
1787  if (c == constm1_rtx && GET_CODE (cond) == GT)
1788    ;
1789  else if (c == const1_rtx && GET_CODE (cond) == LT)
1790    ;
1791  else if (c != CONST0_RTX (GET_MODE (b)))
1792    return FALSE;
1793
1794  /* Determine what sort of operation this is.  */
1795  switch (GET_CODE (cond))
1796    {
1797    case LT:
1798    case LE:
1799    case UNLT:
1800    case UNLE:
1801      negate = !negate;
1802      break;
1803    case GT:
1804    case GE:
1805    case UNGT:
1806    case UNGE:
1807      break;
1808    default:
1809      return FALSE;
1810    }
1811
1812  start_sequence ();
1813
1814  target = expand_abs_nojump (GET_MODE (if_info->x), b, if_info->x, 1);
1815
1816  /* ??? It's a quandary whether cmove would be better here, especially
1817     for integers.  Perhaps combine will clean things up.  */
1818  if (target && negate)
1819    target = expand_simple_unop (GET_MODE (target), NEG, target, if_info->x, 0);
1820
1821  if (! target)
1822    {
1823      end_sequence ();
1824      return FALSE;
1825    }
1826
1827  if (target != if_info->x)
1828    noce_emit_move_insn (if_info->x, target);
1829
1830  seq = end_ifcvt_sequence (if_info);
1831  if (!seq)
1832    return FALSE;
1833
1834  emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1835  if_info->cond = cond;
1836  if_info->cond_earliest = earliest;
1837
1838  return TRUE;
1839}
1840
1841/* Convert "if (m < 0) x = b; else x = 0;" to "x = (m >> C) & b;".  */
1842
1843static int
1844noce_try_sign_mask (struct noce_if_info *if_info)
1845{
1846  rtx cond, t, m, c, seq;
1847  enum machine_mode mode;
1848  enum rtx_code code;
1849
1850  if (no_new_pseudos)
1851    return FALSE;
1852
1853  cond = if_info->cond;
1854  code = GET_CODE (cond);
1855  m = XEXP (cond, 0);
1856  c = XEXP (cond, 1);
1857
1858  t = NULL_RTX;
1859  if (if_info->a == const0_rtx)
1860    {
1861      if ((code == LT && c == const0_rtx)
1862	  || (code == LE && c == constm1_rtx))
1863	t = if_info->b;
1864    }
1865  else if (if_info->b == const0_rtx)
1866    {
1867      if ((code == GE && c == const0_rtx)
1868	  || (code == GT && c == constm1_rtx))
1869	t = if_info->a;
1870    }
1871
1872  if (! t || side_effects_p (t))
1873    return FALSE;
1874
1875  /* We currently don't handle different modes.  */
1876  mode = GET_MODE (t);
1877  if (GET_MODE (m) != mode)
1878    return FALSE;
1879
1880  /* This is only profitable if T is cheap, or T is unconditionally
1881     executed/evaluated in the original insn sequence.  */
1882  if (rtx_cost (t, SET) >= COSTS_N_INSNS (2)
1883      && (!if_info->b_unconditional
1884          || t != if_info->b))
1885    return FALSE;
1886
1887  start_sequence ();
1888  /* Use emit_store_flag to generate "m < 0 ? -1 : 0" instead of expanding
1889     "(signed) m >> 31" directly.  This benefits targets with specialized
1890     insns to obtain the signmask, but still uses ashr_optab otherwise.  */
1891  m = emit_store_flag (gen_reg_rtx (mode), LT, m, const0_rtx, mode, 0, -1);
1892  t = m ? expand_binop (mode, and_optab, m, t, NULL_RTX, 0, OPTAB_DIRECT)
1893	: NULL_RTX;
1894
1895  if (!t)
1896    {
1897      end_sequence ();
1898      return FALSE;
1899    }
1900
1901  noce_emit_move_insn (if_info->x, t);
1902
1903  seq = end_ifcvt_sequence (if_info);
1904  if (!seq)
1905    return FALSE;
1906
1907  emit_insn_before_setloc (seq, if_info->jump, INSN_LOCATOR (if_info->insn_a));
1908  return TRUE;
1909}
1910
1911
1912/* Optimize away "if (x & C) x |= C" and similar bit manipulation
1913   transformations.  */
1914
1915static int
1916noce_try_bitop (struct noce_if_info *if_info)
1917{
1918  rtx cond, x, a, result, seq;
1919  enum machine_mode mode;
1920  enum rtx_code code;
1921  int bitnum;
1922
1923  x = if_info->x;
1924  cond = if_info->cond;
1925  code = GET_CODE (cond);
1926
1927  /* Check for no else condition.  */
1928  if (! rtx_equal_p (x, if_info->b))
1929    return FALSE;
1930
1931  /* Check for a suitable condition.  */
1932  if (code != NE && code != EQ)
1933    return FALSE;
1934  if (XEXP (cond, 1) != const0_rtx)
1935    return FALSE;
1936  cond = XEXP (cond, 0);
1937
1938  /* ??? We could also handle AND here.  */
1939  if (GET_CODE (cond) == ZERO_EXTRACT)
1940    {
1941      if (XEXP (cond, 1) != const1_rtx
1942	  || GET_CODE (XEXP (cond, 2)) != CONST_INT
1943	  || ! rtx_equal_p (x, XEXP (cond, 0)))
1944	return FALSE;
1945      bitnum = INTVAL (XEXP (cond, 2));
1946      mode = GET_MODE (x);
1947      if (BITS_BIG_ENDIAN)
1948	bitnum = GET_MODE_BITSIZE (mode) - 1 - bitnum;
1949      if (bitnum < 0 || bitnum >= HOST_BITS_PER_WIDE_INT)
1950	return FALSE;
1951    }
1952  else
1953    return FALSE;
1954
1955  a = if_info->a;
1956  if (GET_CODE (a) == IOR || GET_CODE (a) == XOR)
1957    {
1958      /* Check for "if (X & C) x = x op C".  */
1959      if (! rtx_equal_p (x, XEXP (a, 0))
1960          || GET_CODE (XEXP (a, 1)) != CONST_INT
1961	  || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
1962	     != (unsigned HOST_WIDE_INT) 1 << bitnum)
1963        return FALSE;
1964
1965      /* if ((x & C) == 0) x |= C; is transformed to x |= C.   */
1966      /* if ((x & C) != 0) x |= C; is transformed to nothing.  */
1967      if (GET_CODE (a) == IOR)
1968	result = (code == NE) ? a : NULL_RTX;
1969      else if (code == NE)
1970	{
1971	  /* if ((x & C) == 0) x ^= C; is transformed to x |= C.   */
1972	  result = gen_int_mode ((HOST_WIDE_INT) 1 << bitnum, mode);
1973	  result = simplify_gen_binary (IOR, mode, x, result);
1974	}
1975      else
1976	{
1977	  /* if ((x & C) != 0) x ^= C; is transformed to x &= ~C.  */
1978	  result = gen_int_mode (~((HOST_WIDE_INT) 1 << bitnum), mode);
1979	  result = simplify_gen_binary (AND, mode, x, result);
1980	}
1981    }
1982  else if (GET_CODE (a) == AND)
1983    {
1984      /* Check for "if (X & C) x &= ~C".  */
1985      if (! rtx_equal_p (x, XEXP (a, 0))
1986	  || GET_CODE (XEXP (a, 1)) != CONST_INT
1987	  || (INTVAL (XEXP (a, 1)) & GET_MODE_MASK (mode))
1988	     != (~((HOST_WIDE_INT) 1 << bitnum) & GET_MODE_MASK (mode)))
1989        return FALSE;
1990
1991      /* if ((x & C) == 0) x &= ~C; is transformed to nothing.  */
1992      /* if ((x & C) != 0) x &= ~C; is transformed to x &= ~C.  */
1993      result = (code == EQ) ? a : NULL_RTX;
1994    }
1995  else
1996    return FALSE;
1997
1998  if (result)
1999    {
2000      start_sequence ();
2001      noce_emit_move_insn (x, result);
2002      seq = end_ifcvt_sequence (if_info);
2003      if (!seq)
2004	return FALSE;
2005
2006      emit_insn_before_setloc (seq, if_info->jump,
2007			       INSN_LOCATOR (if_info->insn_a));
2008    }
2009  return TRUE;
2010}
2011
2012
2013/* Similar to get_condition, only the resulting condition must be
2014   valid at JUMP, instead of at EARLIEST.  */
2015
2016static rtx
2017noce_get_condition (rtx jump, rtx *earliest)
2018{
2019  rtx cond, set, tmp;
2020  bool reverse;
2021
2022  if (! any_condjump_p (jump))
2023    return NULL_RTX;
2024
2025  set = pc_set (jump);
2026
2027  /* If this branches to JUMP_LABEL when the condition is false,
2028     reverse the condition.  */
2029  reverse = (GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
2030	     && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump));
2031
2032  /* If the condition variable is a register and is MODE_INT, accept it.  */
2033
2034  cond = XEXP (SET_SRC (set), 0);
2035  tmp = XEXP (cond, 0);
2036  if (REG_P (tmp) && GET_MODE_CLASS (GET_MODE (tmp)) == MODE_INT)
2037    {
2038      *earliest = jump;
2039
2040      if (reverse)
2041	cond = gen_rtx_fmt_ee (reverse_condition (GET_CODE (cond)),
2042			       GET_MODE (cond), tmp, XEXP (cond, 1));
2043      return cond;
2044    }
2045
2046  /* Otherwise, fall back on canonicalize_condition to do the dirty
2047     work of manipulating MODE_CC values and COMPARE rtx codes.  */
2048  return canonicalize_condition (jump, cond, reverse, earliest,
2049				 NULL_RTX, false, true);
2050}
2051
2052/* Initialize for a simple IF-THEN or IF-THEN-ELSE block.  We will not
2053   be using conditional execution.  Set some fields of IF_INFO based
2054   on CE_INFO: test_bb, cond, jump, cond_earliest.  Return TRUE if
2055   things look OK.  */
2056
2057static int
2058noce_init_if_info (struct ce_if_block *ce_info, struct noce_if_info *if_info)
2059{
2060  basic_block test_bb = ce_info->test_bb;
2061  rtx cond, jump;
2062
2063  /* If test is comprised of && or || elements, don't handle it unless
2064     it is the special case of && elements without an ELSE block.  */
2065  if (ce_info->num_multiple_test_blocks)
2066    {
2067      if (ce_info->else_bb || !ce_info->and_and_p)
2068	return FALSE;
2069
2070      ce_info->test_bb = test_bb = ce_info->last_test_bb;
2071      ce_info->num_multiple_test_blocks = 0;
2072      ce_info->num_and_and_blocks = 0;
2073      ce_info->num_or_or_blocks = 0;
2074    }
2075
2076  /* If this is not a standard conditional jump, we can't parse it.  */
2077  jump = BB_END (test_bb);
2078  cond = noce_get_condition (jump, &if_info->cond_earliest);
2079  if (!cond)
2080    return FALSE;
2081
2082  /* If the conditional jump is more than just a conditional
2083     jump, then we can not do if-conversion on this block.  */
2084  if (! onlyjump_p (jump))
2085    return FALSE;
2086
2087  /* We must be comparing objects whose modes imply the size.  */
2088  if (GET_MODE (XEXP (cond, 0)) == BLKmode)
2089    return FALSE;
2090
2091  if_info->test_bb = test_bb;
2092  if_info->cond = cond;
2093  if_info->jump = jump;
2094
2095  return TRUE;
2096}
2097
2098/* Return true if OP is ok for if-then-else processing.  */
2099
2100static int
2101noce_operand_ok (rtx op)
2102{
2103  /* We special-case memories, so handle any of them with
2104     no address side effects.  */
2105  if (MEM_P (op))
2106    return ! side_effects_p (XEXP (op, 0));
2107
2108  if (side_effects_p (op))
2109    return FALSE;
2110
2111  return ! may_trap_p (op);
2112}
2113
2114/* Return true if a write into MEM may trap or fault.  */
2115
2116static bool
2117noce_mem_write_may_trap_or_fault_p (rtx mem)
2118{
2119  rtx addr;
2120
2121  if (MEM_READONLY_P (mem))
2122    return true;
2123
2124  if (may_trap_or_fault_p (mem))
2125    return true;
2126
2127  addr = XEXP (mem, 0);
2128
2129  /* Call target hook to avoid the effects of -fpic etc....  */
2130  addr = targetm.delegitimize_address (addr);
2131
2132  while (addr)
2133    switch (GET_CODE (addr))
2134      {
2135      case CONST:
2136      case PRE_DEC:
2137      case PRE_INC:
2138      case POST_DEC:
2139      case POST_INC:
2140      case POST_MODIFY:
2141	addr = XEXP (addr, 0);
2142	break;
2143      case LO_SUM:
2144      case PRE_MODIFY:
2145	addr = XEXP (addr, 1);
2146	break;
2147      case PLUS:
2148	if (GET_CODE (XEXP (addr, 1)) == CONST_INT)
2149	  addr = XEXP (addr, 0);
2150	else
2151	  return false;
2152	break;
2153      case LABEL_REF:
2154	return true;
2155      case SYMBOL_REF:
2156	if (SYMBOL_REF_DECL (addr)
2157	    && decl_readonly_section (SYMBOL_REF_DECL (addr), 0))
2158	  return true;
2159	return false;
2160      default:
2161	return false;
2162      }
2163
2164  return false;
2165}
2166
2167/* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
2168   without using conditional execution.  Return TRUE if we were
2169   successful at converting the block.  */
2170
2171static int
2172noce_process_if_block (struct ce_if_block * ce_info)
2173{
2174  basic_block test_bb = ce_info->test_bb;	/* test block */
2175  basic_block then_bb = ce_info->then_bb;	/* THEN */
2176  basic_block else_bb = ce_info->else_bb;	/* ELSE or NULL */
2177  struct noce_if_info if_info;
2178  rtx insn_a, insn_b;
2179  rtx set_a, set_b;
2180  rtx orig_x, x, a, b;
2181  rtx jump, cond;
2182
2183  /* We're looking for patterns of the form
2184
2185     (1) if (...) x = a; else x = b;
2186     (2) x = b; if (...) x = a;
2187     (3) if (...) x = a;   // as if with an initial x = x.
2188
2189     The later patterns require jumps to be more expensive.
2190
2191     ??? For future expansion, look for multiple X in such patterns.  */
2192
2193  if (!noce_init_if_info (ce_info, &if_info))
2194    return FALSE;
2195
2196  cond = if_info.cond;
2197  jump = if_info.jump;
2198
2199  /* Look for one of the potential sets.  */
2200  insn_a = first_active_insn (then_bb);
2201  if (! insn_a
2202      || insn_a != last_active_insn (then_bb, FALSE)
2203      || (set_a = single_set (insn_a)) == NULL_RTX)
2204    return FALSE;
2205
2206  x = SET_DEST (set_a);
2207  a = SET_SRC (set_a);
2208
2209  /* Look for the other potential set.  Make sure we've got equivalent
2210     destinations.  */
2211  /* ??? This is overconservative.  Storing to two different mems is
2212     as easy as conditionally computing the address.  Storing to a
2213     single mem merely requires a scratch memory to use as one of the
2214     destination addresses; often the memory immediately below the
2215     stack pointer is available for this.  */
2216  set_b = NULL_RTX;
2217  if (else_bb)
2218    {
2219      insn_b = first_active_insn (else_bb);
2220      if (! insn_b
2221	  || insn_b != last_active_insn (else_bb, FALSE)
2222	  || (set_b = single_set (insn_b)) == NULL_RTX
2223	  || ! rtx_equal_p (x, SET_DEST (set_b)))
2224	return FALSE;
2225    }
2226  else
2227    {
2228      insn_b = prev_nonnote_insn (if_info.cond_earliest);
2229      /* We're going to be moving the evaluation of B down from above
2230	 COND_EARLIEST to JUMP.  Make sure the relevant data is still
2231	 intact.  */
2232      if (! insn_b
2233	  || !NONJUMP_INSN_P (insn_b)
2234	  || (set_b = single_set (insn_b)) == NULL_RTX
2235	  || ! rtx_equal_p (x, SET_DEST (set_b))
2236	  || reg_overlap_mentioned_p (x, SET_SRC (set_b))
2237	  || modified_between_p (SET_SRC (set_b),
2238				 PREV_INSN (if_info.cond_earliest), jump)
2239	  /* Likewise with X.  In particular this can happen when
2240	     noce_get_condition looks farther back in the instruction
2241	     stream than one might expect.  */
2242	  || reg_overlap_mentioned_p (x, cond)
2243	  || reg_overlap_mentioned_p (x, a)
2244	  || modified_between_p (x, PREV_INSN (if_info.cond_earliest), jump))
2245	insn_b = set_b = NULL_RTX;
2246    }
2247
2248  /* If x has side effects then only the if-then-else form is safe to
2249     convert.  But even in that case we would need to restore any notes
2250     (such as REG_INC) at then end.  That can be tricky if
2251     noce_emit_move_insn expands to more than one insn, so disable the
2252     optimization entirely for now if there are side effects.  */
2253  if (side_effects_p (x))
2254    return FALSE;
2255
2256  b = (set_b ? SET_SRC (set_b) : x);
2257
2258  /* Only operate on register destinations, and even then avoid extending
2259     the lifetime of hard registers on small register class machines.  */
2260  orig_x = x;
2261  if (!REG_P (x)
2262      || (SMALL_REGISTER_CLASSES
2263	  && REGNO (x) < FIRST_PSEUDO_REGISTER))
2264    {
2265      if (no_new_pseudos || GET_MODE (x) == BLKmode)
2266	return FALSE;
2267
2268      if (GET_MODE (x) == ZERO_EXTRACT
2269	  && (GET_CODE (XEXP (x, 1)) != CONST_INT
2270	      || GET_CODE (XEXP (x, 2)) != CONST_INT))
2271	return FALSE;
2272
2273      x = gen_reg_rtx (GET_MODE (GET_CODE (x) == STRICT_LOW_PART
2274				 ? XEXP (x, 0) : x));
2275    }
2276
2277  /* Don't operate on sources that may trap or are volatile.  */
2278  if (! noce_operand_ok (a) || ! noce_operand_ok (b))
2279    return FALSE;
2280
2281  /* Set up the info block for our subroutines.  */
2282  if_info.insn_a = insn_a;
2283  if_info.insn_b = insn_b;
2284  if_info.x = x;
2285  if_info.a = a;
2286  if_info.b = b;
2287  if_info.b_unconditional = else_bb == 0;
2288
2289  /* Try optimizations in some approximation of a useful order.  */
2290  /* ??? Should first look to see if X is live incoming at all.  If it
2291     isn't, we don't need anything but an unconditional set.  */
2292
2293  /* Look and see if A and B are really the same.  Avoid creating silly
2294     cmove constructs that no one will fix up later.  */
2295  if (rtx_equal_p (a, b))
2296    {
2297      /* If we have an INSN_B, we don't have to create any new rtl.  Just
2298	 move the instruction that we already have.  If we don't have an
2299	 INSN_B, that means that A == X, and we've got a noop move.  In
2300	 that case don't do anything and let the code below delete INSN_A.  */
2301      if (insn_b && else_bb)
2302	{
2303	  rtx note;
2304
2305	  if (else_bb && insn_b == BB_END (else_bb))
2306	    BB_END (else_bb) = PREV_INSN (insn_b);
2307	  reorder_insns (insn_b, insn_b, PREV_INSN (jump));
2308
2309	  /* If there was a REG_EQUAL note, delete it since it may have been
2310	     true due to this insn being after a jump.  */
2311	  if ((note = find_reg_note (insn_b, REG_EQUAL, NULL_RTX)) != 0)
2312	    remove_note (insn_b, note);
2313
2314	  insn_b = NULL_RTX;
2315	}
2316      /* If we have "x = b; if (...) x = a;", and x has side-effects, then
2317	 x must be executed twice.  */
2318      else if (insn_b && side_effects_p (orig_x))
2319	return FALSE;
2320
2321      x = orig_x;
2322      goto success;
2323    }
2324
2325  /* Disallow the "if (...) x = a;" form (with an implicit "else x = x;")
2326     for optimizations if writing to x may trap or fault, i.e. it's a memory
2327     other than a static var or a stack slot, is misaligned on strict
2328     aligned machines or is read-only.
2329     If x is a read-only memory, then the program is valid only if we
2330     avoid the store into it.  If there are stores on both the THEN and
2331     ELSE arms, then we can go ahead with the conversion; either the
2332     program is broken, or the condition is always false such that the
2333     other memory is selected.  */
2334  if (!set_b && MEM_P (orig_x) && noce_mem_write_may_trap_or_fault_p (orig_x))
2335    return FALSE;
2336
2337  if (noce_try_move (&if_info))
2338    goto success;
2339  if (noce_try_store_flag (&if_info))
2340    goto success;
2341  if (noce_try_bitop (&if_info))
2342    goto success;
2343  if (noce_try_minmax (&if_info))
2344    goto success;
2345  if (noce_try_abs (&if_info))
2346    goto success;
2347  if (HAVE_conditional_move
2348      && noce_try_cmove (&if_info))
2349    goto success;
2350  if (! HAVE_conditional_execution)
2351    {
2352      if (noce_try_store_flag_constants (&if_info))
2353	goto success;
2354      if (noce_try_addcc (&if_info))
2355	goto success;
2356      if (noce_try_store_flag_mask (&if_info))
2357	goto success;
2358      if (HAVE_conditional_move
2359	  && noce_try_cmove_arith (&if_info))
2360	goto success;
2361      if (noce_try_sign_mask (&if_info))
2362	goto success;
2363    }
2364
2365  return FALSE;
2366
2367 success:
2368  /* The original sets may now be killed.  */
2369  delete_insn (insn_a);
2370
2371  /* Several special cases here: First, we may have reused insn_b above,
2372     in which case insn_b is now NULL.  Second, we want to delete insn_b
2373     if it came from the ELSE block, because follows the now correct
2374     write that appears in the TEST block.  However, if we got insn_b from
2375     the TEST block, it may in fact be loading data needed for the comparison.
2376     We'll let life_analysis remove the insn if it's really dead.  */
2377  if (insn_b && else_bb)
2378    delete_insn (insn_b);
2379
2380  /* The new insns will have been inserted immediately before the jump.  We
2381     should be able to remove the jump with impunity, but the condition itself
2382     may have been modified by gcse to be shared across basic blocks.  */
2383  delete_insn (jump);
2384
2385  /* If we used a temporary, fix it up now.  */
2386  if (orig_x != x)
2387    {
2388      start_sequence ();
2389      noce_emit_move_insn (orig_x, x);
2390      insn_b = get_insns ();
2391      set_used_flags (orig_x);
2392      unshare_all_rtl_in_chain (insn_b);
2393      end_sequence ();
2394
2395      emit_insn_after_setloc (insn_b, BB_END (test_bb), INSN_LOCATOR (insn_a));
2396    }
2397
2398  /* Merge the blocks!  */
2399  merge_if_block (ce_info);
2400
2401  return TRUE;
2402}
2403
2404/* Check whether a block is suitable for conditional move conversion.
2405   Every insn must be a simple set of a register to a constant or a
2406   register.  For each assignment, store the value in the array VALS,
2407   indexed by register number.  COND is the condition we will
2408   test.  */
2409
2410static int
2411check_cond_move_block (basic_block bb, rtx *vals, rtx cond)
2412{
2413  rtx insn;
2414
2415  FOR_BB_INSNS (bb, insn)
2416    {
2417      rtx set, dest, src;
2418
2419      if (!INSN_P (insn) || JUMP_P (insn))
2420	continue;
2421      set = single_set (insn);
2422      if (!set)
2423	return FALSE;
2424
2425      dest = SET_DEST (set);
2426      src = SET_SRC (set);
2427      if (!REG_P (dest)
2428	  || (SMALL_REGISTER_CLASSES && HARD_REGISTER_P (dest)))
2429	return FALSE;
2430
2431      if (!CONSTANT_P (src) && !register_operand (src, VOIDmode))
2432	return FALSE;
2433
2434      if (side_effects_p (src) || side_effects_p (dest))
2435	return FALSE;
2436
2437      if (may_trap_p (src) || may_trap_p (dest))
2438	return FALSE;
2439
2440      /* Don't try to handle this if the source register was
2441	 modified earlier in the block.  */
2442      if ((REG_P (src)
2443	   && vals[REGNO (src)] != NULL)
2444	  || (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src))
2445	      && vals[REGNO (SUBREG_REG (src))] != NULL))
2446	return FALSE;
2447
2448      /* Don't try to handle this if the destination register was
2449	 modified earlier in the block.  */
2450      if (vals[REGNO (dest)] != NULL)
2451	return FALSE;
2452
2453      /* Don't try to handle this if the condition uses the
2454	 destination register.  */
2455      if (reg_overlap_mentioned_p (dest, cond))
2456	return FALSE;
2457
2458      vals[REGNO (dest)] = src;
2459
2460      /* Don't try to handle this if the source register is modified
2461	 later in the block.  */
2462      if (!CONSTANT_P (src)
2463	  && modified_between_p (src, insn, NEXT_INSN (BB_END (bb))))
2464	return FALSE;
2465    }
2466
2467  return TRUE;
2468}
2469
2470/* Given a simple IF-THEN or IF-THEN-ELSE block, attempt to convert it
2471   using only conditional moves.  Return TRUE if we were successful at
2472   converting the block.  */
2473
2474static int
2475cond_move_process_if_block (struct ce_if_block *ce_info)
2476{
2477  basic_block then_bb = ce_info->then_bb;
2478  basic_block else_bb = ce_info->else_bb;
2479  struct noce_if_info if_info;
2480  rtx jump, cond, insn, seq, cond_arg0, cond_arg1, loc_insn;
2481  int max_reg, size, c, i;
2482  rtx *then_vals;
2483  rtx *else_vals;
2484  enum rtx_code code;
2485
2486  if (!HAVE_conditional_move || no_new_pseudos)
2487    return FALSE;
2488
2489  memset (&if_info, 0, sizeof if_info);
2490
2491  if (!noce_init_if_info (ce_info, &if_info))
2492    return FALSE;
2493
2494  cond = if_info.cond;
2495  jump = if_info.jump;
2496
2497  /* Build a mapping for each block to the value used for each
2498     register.  */
2499  max_reg = max_reg_num ();
2500  size = (max_reg + 1) * sizeof (rtx);
2501  then_vals = (rtx *) alloca (size);
2502  else_vals = (rtx *) alloca (size);
2503  memset (then_vals, 0, size);
2504  memset (else_vals, 0, size);
2505
2506  /* Make sure the blocks are suitable.  */
2507  if (!check_cond_move_block (then_bb, then_vals, cond)
2508      || (else_bb && !check_cond_move_block (else_bb, else_vals, cond)))
2509    return FALSE;
2510
2511  /* Make sure the blocks can be used together.  If the same register
2512     is set in both blocks, and is not set to a constant in both
2513     cases, then both blocks must set it to the same register.  We
2514     have already verified that if it is set to a register, that the
2515     source register does not change after the assignment.  Also count
2516     the number of registers set in only one of the blocks.  */
2517  c = 0;
2518  for (i = 0; i <= max_reg; ++i)
2519    {
2520      if (!then_vals[i] && !else_vals[i])
2521	continue;
2522
2523      if (!then_vals[i] || !else_vals[i])
2524	++c;
2525      else
2526	{
2527	  if (!CONSTANT_P (then_vals[i])
2528	      && !CONSTANT_P (else_vals[i])
2529	      && !rtx_equal_p (then_vals[i], else_vals[i]))
2530	    return FALSE;
2531	}
2532    }
2533
2534  /* Make sure it is reasonable to convert this block.  What matters
2535     is the number of assignments currently made in only one of the
2536     branches, since if we convert we are going to always execute
2537     them.  */
2538  if (c > MAX_CONDITIONAL_EXECUTE)
2539    return FALSE;
2540
2541  /* Emit the conditional moves.  First do the then block, then do
2542     anything left in the else blocks.  */
2543
2544  code = GET_CODE (cond);
2545  cond_arg0 = XEXP (cond, 0);
2546  cond_arg1 = XEXP (cond, 1);
2547
2548  start_sequence ();
2549
2550  FOR_BB_INSNS (then_bb, insn)
2551    {
2552      rtx set, target, dest, t, e;
2553      unsigned int regno;
2554
2555      if (!INSN_P (insn) || JUMP_P (insn))
2556	continue;
2557      set = single_set (insn);
2558      gcc_assert (set && REG_P (SET_DEST (set)));
2559
2560      dest = SET_DEST (set);
2561      regno = REGNO (dest);
2562      t = then_vals[regno];
2563      e = else_vals[regno];
2564      gcc_assert (t);
2565      if (!e)
2566	e = dest;
2567      target = noce_emit_cmove (&if_info, dest, code, cond_arg0, cond_arg1,
2568				t, e);
2569      if (!target)
2570	{
2571	  end_sequence ();
2572	  return FALSE;
2573	}
2574
2575      if (target != dest)
2576	noce_emit_move_insn (dest, target);
2577    }
2578
2579  if (else_bb)
2580    {
2581      FOR_BB_INSNS (else_bb, insn)
2582	{
2583	  rtx set, target, dest;
2584	  unsigned int regno;
2585
2586	  if (!INSN_P (insn) || JUMP_P (insn))
2587	    continue;
2588	  set = single_set (insn);
2589	  gcc_assert (set && REG_P (SET_DEST (set)));
2590
2591	  dest = SET_DEST (set);
2592	  regno = REGNO (dest);
2593
2594	  /* If this register was set in the then block, we already
2595	     handled this case above.  */
2596	  if (then_vals[regno])
2597	    continue;
2598	  gcc_assert (else_vals[regno]);
2599
2600	  target = noce_emit_cmove (&if_info, dest, code, cond_arg0, cond_arg1,
2601				    dest, else_vals[regno]);
2602	  if (!target)
2603	    {
2604	      end_sequence ();
2605	      return FALSE;
2606	    }
2607
2608	  if (target != dest)
2609	    noce_emit_move_insn (dest, target);
2610	}
2611    }
2612
2613  seq = end_ifcvt_sequence (&if_info);
2614  if (!seq)
2615    return FALSE;
2616
2617  loc_insn = first_active_insn (then_bb);
2618  if (!loc_insn)
2619    {
2620      loc_insn = first_active_insn (else_bb);
2621      gcc_assert (loc_insn);
2622    }
2623  emit_insn_before_setloc (seq, jump, INSN_LOCATOR (loc_insn));
2624
2625  FOR_BB_INSNS (then_bb, insn)
2626    if (INSN_P (insn) && !JUMP_P (insn))
2627      delete_insn (insn);
2628  if (else_bb)
2629    {
2630      FOR_BB_INSNS (else_bb, insn)
2631	if (INSN_P (insn) && !JUMP_P (insn))
2632	  delete_insn (insn);
2633    }
2634  delete_insn (jump);
2635
2636  merge_if_block (ce_info);
2637
2638  return TRUE;
2639}
2640
2641/* Attempt to convert an IF-THEN or IF-THEN-ELSE block into
2642   straight line code.  Return true if successful.  */
2643
2644static int
2645process_if_block (struct ce_if_block * ce_info)
2646{
2647  if (! reload_completed
2648      && noce_process_if_block (ce_info))
2649    return TRUE;
2650
2651  if (HAVE_conditional_move
2652      && cond_move_process_if_block (ce_info))
2653    return TRUE;
2654
2655  if (HAVE_conditional_execution && reload_completed)
2656    {
2657      /* If we have && and || tests, try to first handle combining the && and
2658         || tests into the conditional code, and if that fails, go back and
2659         handle it without the && and ||, which at present handles the && case
2660         if there was no ELSE block.  */
2661      if (cond_exec_process_if_block (ce_info, TRUE))
2662	return TRUE;
2663
2664      if (ce_info->num_multiple_test_blocks)
2665	{
2666	  cancel_changes (0);
2667
2668	  if (cond_exec_process_if_block (ce_info, FALSE))
2669	    return TRUE;
2670	}
2671    }
2672
2673  return FALSE;
2674}
2675
2676/* Merge the blocks and mark for local life update.  */
2677
2678static void
2679merge_if_block (struct ce_if_block * ce_info)
2680{
2681  basic_block test_bb = ce_info->test_bb;	/* last test block */
2682  basic_block then_bb = ce_info->then_bb;	/* THEN */
2683  basic_block else_bb = ce_info->else_bb;	/* ELSE or NULL */
2684  basic_block join_bb = ce_info->join_bb;	/* join block */
2685  basic_block combo_bb;
2686
2687  /* All block merging is done into the lower block numbers.  */
2688
2689  combo_bb = test_bb;
2690
2691  /* Merge any basic blocks to handle && and || subtests.  Each of
2692     the blocks are on the fallthru path from the predecessor block.  */
2693  if (ce_info->num_multiple_test_blocks > 0)
2694    {
2695      basic_block bb = test_bb;
2696      basic_block last_test_bb = ce_info->last_test_bb;
2697      basic_block fallthru = block_fallthru (bb);
2698
2699      do
2700	{
2701	  bb = fallthru;
2702	  fallthru = block_fallthru (bb);
2703	  merge_blocks (combo_bb, bb);
2704	  num_true_changes++;
2705	}
2706      while (bb != last_test_bb);
2707    }
2708
2709  /* Merge TEST block into THEN block.  Normally the THEN block won't have a
2710     label, but it might if there were || tests.  That label's count should be
2711     zero, and it normally should be removed.  */
2712
2713  if (then_bb)
2714    {
2715      if (combo_bb->il.rtl->global_live_at_end)
2716	COPY_REG_SET (combo_bb->il.rtl->global_live_at_end,
2717		      then_bb->il.rtl->global_live_at_end);
2718      merge_blocks (combo_bb, then_bb);
2719      num_true_changes++;
2720    }
2721
2722  /* The ELSE block, if it existed, had a label.  That label count
2723     will almost always be zero, but odd things can happen when labels
2724     get their addresses taken.  */
2725  if (else_bb)
2726    {
2727      merge_blocks (combo_bb, else_bb);
2728      num_true_changes++;
2729    }
2730
2731  /* If there was no join block reported, that means it was not adjacent
2732     to the others, and so we cannot merge them.  */
2733
2734  if (! join_bb)
2735    {
2736      rtx last = BB_END (combo_bb);
2737
2738      /* The outgoing edge for the current COMBO block should already
2739	 be correct.  Verify this.  */
2740      if (EDGE_COUNT (combo_bb->succs) == 0)
2741	gcc_assert (find_reg_note (last, REG_NORETURN, NULL)
2742		    || (NONJUMP_INSN_P (last)
2743			&& GET_CODE (PATTERN (last)) == TRAP_IF
2744			&& (TRAP_CONDITION (PATTERN (last))
2745			    == const_true_rtx)));
2746
2747      else
2748      /* There should still be something at the end of the THEN or ELSE
2749         blocks taking us to our final destination.  */
2750	gcc_assert (JUMP_P (last)
2751		    || (EDGE_SUCC (combo_bb, 0)->dest == EXIT_BLOCK_PTR
2752			&& CALL_P (last)
2753			&& SIBLING_CALL_P (last))
2754		    || ((EDGE_SUCC (combo_bb, 0)->flags & EDGE_EH)
2755			&& can_throw_internal (last)));
2756    }
2757
2758  /* The JOIN block may have had quite a number of other predecessors too.
2759     Since we've already merged the TEST, THEN and ELSE blocks, we should
2760     have only one remaining edge from our if-then-else diamond.  If there
2761     is more than one remaining edge, it must come from elsewhere.  There
2762     may be zero incoming edges if the THEN block didn't actually join
2763     back up (as with a call to a non-return function).  */
2764  else if (EDGE_COUNT (join_bb->preds) < 2
2765	   && join_bb != EXIT_BLOCK_PTR)
2766    {
2767      /* We can merge the JOIN.  */
2768      if (combo_bb->il.rtl->global_live_at_end)
2769	COPY_REG_SET (combo_bb->il.rtl->global_live_at_end,
2770		      join_bb->il.rtl->global_live_at_end);
2771
2772      merge_blocks (combo_bb, join_bb);
2773      num_true_changes++;
2774    }
2775  else
2776    {
2777      /* We cannot merge the JOIN.  */
2778
2779      /* The outgoing edge for the current COMBO block should already
2780	 be correct.  Verify this.  */
2781      gcc_assert (single_succ_p (combo_bb)
2782		  && single_succ (combo_bb) == join_bb);
2783
2784      /* Remove the jump and cruft from the end of the COMBO block.  */
2785      if (join_bb != EXIT_BLOCK_PTR)
2786	tidy_fallthru_edge (single_succ_edge (combo_bb));
2787    }
2788
2789  num_updated_if_blocks++;
2790}
2791
2792/* Find a block ending in a simple IF condition and try to transform it
2793   in some way.  When converting a multi-block condition, put the new code
2794   in the first such block and delete the rest.  Return a pointer to this
2795   first block if some transformation was done.  Return NULL otherwise.  */
2796
2797static basic_block
2798find_if_header (basic_block test_bb, int pass)
2799{
2800  ce_if_block_t ce_info;
2801  edge then_edge;
2802  edge else_edge;
2803
2804  /* The kind of block we're looking for has exactly two successors.  */
2805  if (EDGE_COUNT (test_bb->succs) != 2)
2806    return NULL;
2807
2808  then_edge = EDGE_SUCC (test_bb, 0);
2809  else_edge = EDGE_SUCC (test_bb, 1);
2810
2811  /* Neither edge should be abnormal.  */
2812  if ((then_edge->flags & EDGE_COMPLEX)
2813      || (else_edge->flags & EDGE_COMPLEX))
2814    return NULL;
2815
2816  /* Nor exit the loop.  */
2817  if ((then_edge->flags & EDGE_LOOP_EXIT)
2818      || (else_edge->flags & EDGE_LOOP_EXIT))
2819    return NULL;
2820
2821  /* The THEN edge is canonically the one that falls through.  */
2822  if (then_edge->flags & EDGE_FALLTHRU)
2823    ;
2824  else if (else_edge->flags & EDGE_FALLTHRU)
2825    {
2826      edge e = else_edge;
2827      else_edge = then_edge;
2828      then_edge = e;
2829    }
2830  else
2831    /* Otherwise this must be a multiway branch of some sort.  */
2832    return NULL;
2833
2834  memset (&ce_info, '\0', sizeof (ce_info));
2835  ce_info.test_bb = test_bb;
2836  ce_info.then_bb = then_edge->dest;
2837  ce_info.else_bb = else_edge->dest;
2838  ce_info.pass = pass;
2839
2840#ifdef IFCVT_INIT_EXTRA_FIELDS
2841  IFCVT_INIT_EXTRA_FIELDS (&ce_info);
2842#endif
2843
2844  if (find_if_block (&ce_info))
2845    goto success;
2846
2847  if (HAVE_trap && HAVE_conditional_trap
2848      && find_cond_trap (test_bb, then_edge, else_edge))
2849    goto success;
2850
2851  if (dom_computed[CDI_POST_DOMINATORS] >= DOM_NO_FAST_QUERY
2852      && (! HAVE_conditional_execution || reload_completed))
2853    {
2854      if (find_if_case_1 (test_bb, then_edge, else_edge))
2855	goto success;
2856      if (find_if_case_2 (test_bb, then_edge, else_edge))
2857	goto success;
2858    }
2859
2860  return NULL;
2861
2862 success:
2863  if (dump_file)
2864    fprintf (dump_file, "Conversion succeeded on pass %d.\n", pass);
2865  return ce_info.test_bb;
2866}
2867
2868/* Return true if a block has two edges, one of which falls through to the next
2869   block, and the other jumps to a specific block, so that we can tell if the
2870   block is part of an && test or an || test.  Returns either -1 or the number
2871   of non-note, non-jump, non-USE/CLOBBER insns in the block.  */
2872
2873static int
2874block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb)
2875{
2876  edge cur_edge;
2877  int fallthru_p = FALSE;
2878  int jump_p = FALSE;
2879  rtx insn;
2880  rtx end;
2881  int n_insns = 0;
2882  edge_iterator ei;
2883
2884  if (!cur_bb || !target_bb)
2885    return -1;
2886
2887  /* If no edges, obviously it doesn't jump or fallthru.  */
2888  if (EDGE_COUNT (cur_bb->succs) == 0)
2889    return FALSE;
2890
2891  FOR_EACH_EDGE (cur_edge, ei, cur_bb->succs)
2892    {
2893      if (cur_edge->flags & EDGE_COMPLEX)
2894	/* Anything complex isn't what we want.  */
2895	return -1;
2896
2897      else if (cur_edge->flags & EDGE_FALLTHRU)
2898	fallthru_p = TRUE;
2899
2900      else if (cur_edge->dest == target_bb)
2901	jump_p = TRUE;
2902
2903      else
2904	return -1;
2905    }
2906
2907  if ((jump_p & fallthru_p) == 0)
2908    return -1;
2909
2910  /* Don't allow calls in the block, since this is used to group && and ||
2911     together for conditional execution support.  ??? we should support
2912     conditional execution support across calls for IA-64 some day, but
2913     for now it makes the code simpler.  */
2914  end = BB_END (cur_bb);
2915  insn = BB_HEAD (cur_bb);
2916
2917  while (insn != NULL_RTX)
2918    {
2919      if (CALL_P (insn))
2920	return -1;
2921
2922      if (INSN_P (insn)
2923	  && !JUMP_P (insn)
2924	  && GET_CODE (PATTERN (insn)) != USE
2925	  && GET_CODE (PATTERN (insn)) != CLOBBER)
2926	n_insns++;
2927
2928      if (insn == end)
2929	break;
2930
2931      insn = NEXT_INSN (insn);
2932    }
2933
2934  return n_insns;
2935}
2936
2937/* Determine if a given basic block heads a simple IF-THEN or IF-THEN-ELSE
2938   block.  If so, we'll try to convert the insns to not require the branch.
2939   Return TRUE if we were successful at converting the block.  */
2940
2941static int
2942find_if_block (struct ce_if_block * ce_info)
2943{
2944  basic_block test_bb = ce_info->test_bb;
2945  basic_block then_bb = ce_info->then_bb;
2946  basic_block else_bb = ce_info->else_bb;
2947  basic_block join_bb = NULL_BLOCK;
2948  edge cur_edge;
2949  basic_block next;
2950  edge_iterator ei;
2951
2952  ce_info->last_test_bb = test_bb;
2953
2954  /* Discover if any fall through predecessors of the current test basic block
2955     were && tests (which jump to the else block) or || tests (which jump to
2956     the then block).  */
2957  if (HAVE_conditional_execution && reload_completed
2958      && single_pred_p (test_bb)
2959      && single_pred_edge (test_bb)->flags == EDGE_FALLTHRU)
2960    {
2961      basic_block bb = single_pred (test_bb);
2962      basic_block target_bb;
2963      int max_insns = MAX_CONDITIONAL_EXECUTE;
2964      int n_insns;
2965
2966      /* Determine if the preceding block is an && or || block.  */
2967      if ((n_insns = block_jumps_and_fallthru_p (bb, else_bb)) >= 0)
2968	{
2969	  ce_info->and_and_p = TRUE;
2970	  target_bb = else_bb;
2971	}
2972      else if ((n_insns = block_jumps_and_fallthru_p (bb, then_bb)) >= 0)
2973	{
2974	  ce_info->and_and_p = FALSE;
2975	  target_bb = then_bb;
2976	}
2977      else
2978	target_bb = NULL_BLOCK;
2979
2980      if (target_bb && n_insns <= max_insns)
2981	{
2982	  int total_insns = 0;
2983	  int blocks = 0;
2984
2985	  ce_info->last_test_bb = test_bb;
2986
2987	  /* Found at least one && or || block, look for more.  */
2988	  do
2989	    {
2990	      ce_info->test_bb = test_bb = bb;
2991	      total_insns += n_insns;
2992	      blocks++;
2993
2994	      if (!single_pred_p (bb))
2995		break;
2996
2997	      bb = single_pred (bb);
2998	      n_insns = block_jumps_and_fallthru_p (bb, target_bb);
2999	    }
3000	  while (n_insns >= 0 && (total_insns + n_insns) <= max_insns);
3001
3002	  ce_info->num_multiple_test_blocks = blocks;
3003	  ce_info->num_multiple_test_insns = total_insns;
3004
3005	  if (ce_info->and_and_p)
3006	    ce_info->num_and_and_blocks = blocks;
3007	  else
3008	    ce_info->num_or_or_blocks = blocks;
3009	}
3010    }
3011
3012  /* The THEN block of an IF-THEN combo must have exactly one predecessor,
3013     other than any || blocks which jump to the THEN block.  */
3014  if ((EDGE_COUNT (then_bb->preds) - ce_info->num_or_or_blocks) != 1)
3015    return FALSE;
3016
3017  /* The edges of the THEN and ELSE blocks cannot have complex edges.  */
3018  FOR_EACH_EDGE (cur_edge, ei, then_bb->preds)
3019    {
3020      if (cur_edge->flags & EDGE_COMPLEX)
3021	return FALSE;
3022    }
3023
3024  FOR_EACH_EDGE (cur_edge, ei, else_bb->preds)
3025    {
3026      if (cur_edge->flags & EDGE_COMPLEX)
3027	return FALSE;
3028    }
3029
3030  /* The THEN block of an IF-THEN combo must have zero or one successors.  */
3031  if (EDGE_COUNT (then_bb->succs) > 0
3032      && (!single_succ_p (then_bb)
3033          || (single_succ_edge (then_bb)->flags & EDGE_COMPLEX)
3034	  || (flow2_completed && tablejump_p (BB_END (then_bb), NULL, NULL))))
3035    return FALSE;
3036
3037  /* If the THEN block has no successors, conditional execution can still
3038     make a conditional call.  Don't do this unless the ELSE block has
3039     only one incoming edge -- the CFG manipulation is too ugly otherwise.
3040     Check for the last insn of the THEN block being an indirect jump, which
3041     is listed as not having any successors, but confuses the rest of the CE
3042     code processing.  ??? we should fix this in the future.  */
3043  if (EDGE_COUNT (then_bb->succs) == 0)
3044    {
3045      if (single_pred_p (else_bb))
3046	{
3047	  rtx last_insn = BB_END (then_bb);
3048
3049	  while (last_insn
3050		 && NOTE_P (last_insn)
3051		 && last_insn != BB_HEAD (then_bb))
3052	    last_insn = PREV_INSN (last_insn);
3053
3054	  if (last_insn
3055	      && JUMP_P (last_insn)
3056	      && ! simplejump_p (last_insn))
3057	    return FALSE;
3058
3059	  join_bb = else_bb;
3060	  else_bb = NULL_BLOCK;
3061	}
3062      else
3063	return FALSE;
3064    }
3065
3066  /* If the THEN block's successor is the other edge out of the TEST block,
3067     then we have an IF-THEN combo without an ELSE.  */
3068  else if (single_succ (then_bb) == else_bb)
3069    {
3070      join_bb = else_bb;
3071      else_bb = NULL_BLOCK;
3072    }
3073
3074  /* If the THEN and ELSE block meet in a subsequent block, and the ELSE
3075     has exactly one predecessor and one successor, and the outgoing edge
3076     is not complex, then we have an IF-THEN-ELSE combo.  */
3077  else if (single_succ_p (else_bb)
3078	   && single_succ (then_bb) == single_succ (else_bb)
3079	   && single_pred_p (else_bb)
3080	   && ! (single_succ_edge (else_bb)->flags & EDGE_COMPLEX)
3081	   && ! (flow2_completed && tablejump_p (BB_END (else_bb), NULL, NULL)))
3082    join_bb = single_succ (else_bb);
3083
3084  /* Otherwise it is not an IF-THEN or IF-THEN-ELSE combination.  */
3085  else
3086    return FALSE;
3087
3088  num_possible_if_blocks++;
3089
3090  if (dump_file)
3091    {
3092      fprintf (dump_file,
3093	       "\nIF-THEN%s block found, pass %d, start block %d "
3094	       "[insn %d], then %d [%d]",
3095	       (else_bb) ? "-ELSE" : "",
3096	       ce_info->pass,
3097	       test_bb->index,
3098	       BB_HEAD (test_bb) ? (int)INSN_UID (BB_HEAD (test_bb)) : -1,
3099	       then_bb->index,
3100	       BB_HEAD (then_bb) ? (int)INSN_UID (BB_HEAD (then_bb)) : -1);
3101
3102      if (else_bb)
3103	fprintf (dump_file, ", else %d [%d]",
3104		 else_bb->index,
3105		 BB_HEAD (else_bb) ? (int)INSN_UID (BB_HEAD (else_bb)) : -1);
3106
3107      fprintf (dump_file, ", join %d [%d]",
3108	       join_bb->index,
3109	       BB_HEAD (join_bb) ? (int)INSN_UID (BB_HEAD (join_bb)) : -1);
3110
3111      if (ce_info->num_multiple_test_blocks > 0)
3112	fprintf (dump_file, ", %d %s block%s last test %d [%d]",
3113		 ce_info->num_multiple_test_blocks,
3114		 (ce_info->and_and_p) ? "&&" : "||",
3115		 (ce_info->num_multiple_test_blocks == 1) ? "" : "s",
3116		 ce_info->last_test_bb->index,
3117		 ((BB_HEAD (ce_info->last_test_bb))
3118		  ? (int)INSN_UID (BB_HEAD (ce_info->last_test_bb))
3119		  : -1));
3120
3121      fputc ('\n', dump_file);
3122    }
3123
3124  /* Make sure IF, THEN, and ELSE, blocks are adjacent.  Actually, we get the
3125     first condition for free, since we've already asserted that there's a
3126     fallthru edge from IF to THEN.  Likewise for the && and || blocks, since
3127     we checked the FALLTHRU flag, those are already adjacent to the last IF
3128     block.  */
3129  /* ??? As an enhancement, move the ELSE block.  Have to deal with
3130     BLOCK notes, if by no other means than backing out the merge if they
3131     exist.  Sticky enough I don't want to think about it now.  */
3132  next = then_bb;
3133  if (else_bb && (next = next->next_bb) != else_bb)
3134    return FALSE;
3135  if ((next = next->next_bb) != join_bb && join_bb != EXIT_BLOCK_PTR)
3136    {
3137      if (else_bb)
3138	join_bb = NULL;
3139      else
3140	return FALSE;
3141    }
3142
3143  /* Do the real work.  */
3144  ce_info->else_bb = else_bb;
3145  ce_info->join_bb = join_bb;
3146
3147  return process_if_block (ce_info);
3148}
3149
3150/* Convert a branch over a trap, or a branch
3151   to a trap, into a conditional trap.  */
3152
3153static int
3154find_cond_trap (basic_block test_bb, edge then_edge, edge else_edge)
3155{
3156  basic_block then_bb = then_edge->dest;
3157  basic_block else_bb = else_edge->dest;
3158  basic_block other_bb, trap_bb;
3159  rtx trap, jump, cond, cond_earliest, seq;
3160  enum rtx_code code;
3161
3162  /* Locate the block with the trap instruction.  */
3163  /* ??? While we look for no successors, we really ought to allow
3164     EH successors.  Need to fix merge_if_block for that to work.  */
3165  if ((trap = block_has_only_trap (then_bb)) != NULL)
3166    trap_bb = then_bb, other_bb = else_bb;
3167  else if ((trap = block_has_only_trap (else_bb)) != NULL)
3168    trap_bb = else_bb, other_bb = then_bb;
3169  else
3170    return FALSE;
3171
3172  if (dump_file)
3173    {
3174      fprintf (dump_file, "\nTRAP-IF block found, start %d, trap %d\n",
3175	       test_bb->index, trap_bb->index);
3176    }
3177
3178  /* If this is not a standard conditional jump, we can't parse it.  */
3179  jump = BB_END (test_bb);
3180  cond = noce_get_condition (jump, &cond_earliest);
3181  if (! cond)
3182    return FALSE;
3183
3184  /* If the conditional jump is more than just a conditional jump, then
3185     we can not do if-conversion on this block.  */
3186  if (! onlyjump_p (jump))
3187    return FALSE;
3188
3189  /* We must be comparing objects whose modes imply the size.  */
3190  if (GET_MODE (XEXP (cond, 0)) == BLKmode)
3191    return FALSE;
3192
3193  /* Reverse the comparison code, if necessary.  */
3194  code = GET_CODE (cond);
3195  if (then_bb == trap_bb)
3196    {
3197      code = reversed_comparison_code (cond, jump);
3198      if (code == UNKNOWN)
3199	return FALSE;
3200    }
3201
3202  /* Attempt to generate the conditional trap.  */
3203  seq = gen_cond_trap (code, XEXP (cond, 0),
3204		       XEXP (cond, 1),
3205		       TRAP_CODE (PATTERN (trap)));
3206  if (seq == NULL)
3207    return FALSE;
3208
3209  num_true_changes++;
3210
3211  /* Emit the new insns before cond_earliest.  */
3212  emit_insn_before_setloc (seq, cond_earliest, INSN_LOCATOR (trap));
3213
3214  /* Delete the trap block if possible.  */
3215  remove_edge (trap_bb == then_bb ? then_edge : else_edge);
3216  if (EDGE_COUNT (trap_bb->preds) == 0)
3217    delete_basic_block (trap_bb);
3218
3219  /* If the non-trap block and the test are now adjacent, merge them.
3220     Otherwise we must insert a direct branch.  */
3221  if (test_bb->next_bb == other_bb)
3222    {
3223      struct ce_if_block new_ce_info;
3224      delete_insn (jump);
3225      memset (&new_ce_info, '\0', sizeof (new_ce_info));
3226      new_ce_info.test_bb = test_bb;
3227      new_ce_info.then_bb = NULL;
3228      new_ce_info.else_bb = NULL;
3229      new_ce_info.join_bb = other_bb;
3230      merge_if_block (&new_ce_info);
3231    }
3232  else
3233    {
3234      rtx lab, newjump;
3235
3236      lab = JUMP_LABEL (jump);
3237      newjump = emit_jump_insn_after (gen_jump (lab), jump);
3238      LABEL_NUSES (lab) += 1;
3239      JUMP_LABEL (newjump) = lab;
3240      emit_barrier_after (newjump);
3241
3242      delete_insn (jump);
3243    }
3244
3245  return TRUE;
3246}
3247
3248/* Subroutine of find_cond_trap: if BB contains only a trap insn,
3249   return it.  */
3250
3251static rtx
3252block_has_only_trap (basic_block bb)
3253{
3254  rtx trap;
3255
3256  /* We're not the exit block.  */
3257  if (bb == EXIT_BLOCK_PTR)
3258    return NULL_RTX;
3259
3260  /* The block must have no successors.  */
3261  if (EDGE_COUNT (bb->succs) > 0)
3262    return NULL_RTX;
3263
3264  /* The only instruction in the THEN block must be the trap.  */
3265  trap = first_active_insn (bb);
3266  if (! (trap == BB_END (bb)
3267	 && GET_CODE (PATTERN (trap)) == TRAP_IF
3268         && TRAP_CONDITION (PATTERN (trap)) == const_true_rtx))
3269    return NULL_RTX;
3270
3271  return trap;
3272}
3273
3274/* Look for IF-THEN-ELSE cases in which one of THEN or ELSE is
3275   transformable, but not necessarily the other.  There need be no
3276   JOIN block.
3277
3278   Return TRUE if we were successful at converting the block.
3279
3280   Cases we'd like to look at:
3281
3282   (1)
3283	if (test) goto over; // x not live
3284	x = a;
3285	goto label;
3286	over:
3287
3288   becomes
3289
3290	x = a;
3291	if (! test) goto label;
3292
3293   (2)
3294	if (test) goto E; // x not live
3295	x = big();
3296	goto L;
3297	E:
3298	x = b;
3299	goto M;
3300
3301   becomes
3302
3303	x = b;
3304	if (test) goto M;
3305	x = big();
3306	goto L;
3307
3308   (3) // This one's really only interesting for targets that can do
3309       // multiway branching, e.g. IA-64 BBB bundles.  For other targets
3310       // it results in multiple branches on a cache line, which often
3311       // does not sit well with predictors.
3312
3313	if (test1) goto E; // predicted not taken
3314	x = a;
3315	if (test2) goto F;
3316	...
3317	E:
3318	x = b;
3319	J:
3320
3321   becomes
3322
3323	x = a;
3324	if (test1) goto E;
3325	if (test2) goto F;
3326
3327   Notes:
3328
3329   (A) Don't do (2) if the branch is predicted against the block we're
3330   eliminating.  Do it anyway if we can eliminate a branch; this requires
3331   that the sole successor of the eliminated block postdominate the other
3332   side of the if.
3333
3334   (B) With CE, on (3) we can steal from both sides of the if, creating
3335
3336	if (test1) x = a;
3337	if (!test1) x = b;
3338	if (test1) goto J;
3339	if (test2) goto F;
3340	...
3341	J:
3342
3343   Again, this is most useful if J postdominates.
3344
3345   (C) CE substitutes for helpful life information.
3346
3347   (D) These heuristics need a lot of work.  */
3348
3349/* Tests for case 1 above.  */
3350
3351static int
3352find_if_case_1 (basic_block test_bb, edge then_edge, edge else_edge)
3353{
3354  basic_block then_bb = then_edge->dest;
3355  basic_block else_bb = else_edge->dest, new_bb;
3356  int then_bb_index;
3357
3358  /* If we are partitioning hot/cold basic blocks, we don't want to
3359     mess up unconditional or indirect jumps that cross between hot
3360     and cold sections.
3361
3362     Basic block partitioning may result in some jumps that appear to
3363     be optimizable (or blocks that appear to be mergeable), but which really
3364     must be left untouched (they are required to make it safely across
3365     partition boundaries).  See  the comments at the top of
3366     bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
3367
3368  if ((BB_END (then_bb)
3369       && find_reg_note (BB_END (then_bb), REG_CROSSING_JUMP, NULL_RTX))
3370      || (BB_END (test_bb)
3371	  && find_reg_note (BB_END (test_bb), REG_CROSSING_JUMP, NULL_RTX))
3372      || (BB_END (else_bb)
3373	  && find_reg_note (BB_END (else_bb), REG_CROSSING_JUMP,
3374			    NULL_RTX)))
3375    return FALSE;
3376
3377  /* THEN has one successor.  */
3378  if (!single_succ_p (then_bb))
3379    return FALSE;
3380
3381  /* THEN does not fall through, but is not strange either.  */
3382  if (single_succ_edge (then_bb)->flags & (EDGE_COMPLEX | EDGE_FALLTHRU))
3383    return FALSE;
3384
3385  /* THEN has one predecessor.  */
3386  if (!single_pred_p (then_bb))
3387    return FALSE;
3388
3389  /* THEN must do something.  */
3390  if (forwarder_block_p (then_bb))
3391    return FALSE;
3392
3393  num_possible_if_blocks++;
3394  if (dump_file)
3395    fprintf (dump_file,
3396	     "\nIF-CASE-1 found, start %d, then %d\n",
3397	     test_bb->index, then_bb->index);
3398
3399  /* THEN is small.  */
3400  if (! cheap_bb_rtx_cost_p (then_bb, COSTS_N_INSNS (BRANCH_COST)))
3401    return FALSE;
3402
3403  /* Registers set are dead, or are predicable.  */
3404  if (! dead_or_predicable (test_bb, then_bb, else_bb,
3405			    single_succ (then_bb), 1))
3406    return FALSE;
3407
3408  /* Conversion went ok, including moving the insns and fixing up the
3409     jump.  Adjust the CFG to match.  */
3410
3411  bitmap_ior (test_bb->il.rtl->global_live_at_end,
3412	      else_bb->il.rtl->global_live_at_start,
3413	      then_bb->il.rtl->global_live_at_end);
3414
3415
3416  /* We can avoid creating a new basic block if then_bb is immediately
3417     followed by else_bb, i.e. deleting then_bb allows test_bb to fall
3418     thru to else_bb.  */
3419
3420  if (then_bb->next_bb == else_bb
3421      && then_bb->prev_bb == test_bb
3422      && else_bb != EXIT_BLOCK_PTR)
3423    {
3424      redirect_edge_succ (FALLTHRU_EDGE (test_bb), else_bb);
3425      new_bb = 0;
3426    }
3427  else
3428    new_bb = redirect_edge_and_branch_force (FALLTHRU_EDGE (test_bb),
3429                                             else_bb);
3430
3431  then_bb_index = then_bb->index;
3432  delete_basic_block (then_bb);
3433
3434  /* Make rest of code believe that the newly created block is the THEN_BB
3435     block we removed.  */
3436  if (new_bb)
3437    {
3438      new_bb->index = then_bb_index;
3439      SET_BASIC_BLOCK (then_bb_index, new_bb);
3440      /* Since the fallthru edge was redirected from test_bb to new_bb,
3441         we need to ensure that new_bb is in the same partition as
3442         test bb (you can not fall through across section boundaries).  */
3443      BB_COPY_PARTITION (new_bb, test_bb);
3444    }
3445  /* We've possibly created jump to next insn, cleanup_cfg will solve that
3446     later.  */
3447
3448  num_true_changes++;
3449  num_updated_if_blocks++;
3450
3451  return TRUE;
3452}
3453
3454/* Test for case 2 above.  */
3455
3456static int
3457find_if_case_2 (basic_block test_bb, edge then_edge, edge else_edge)
3458{
3459  basic_block then_bb = then_edge->dest;
3460  basic_block else_bb = else_edge->dest;
3461  edge else_succ;
3462  rtx note;
3463
3464  /* If we are partitioning hot/cold basic blocks, we don't want to
3465     mess up unconditional or indirect jumps that cross between hot
3466     and cold sections.
3467
3468     Basic block partitioning may result in some jumps that appear to
3469     be optimizable (or blocks that appear to be mergeable), but which really
3470     must be left untouched (they are required to make it safely across
3471     partition boundaries).  See  the comments at the top of
3472     bb-reorder.c:partition_hot_cold_basic_blocks for complete details.  */
3473
3474  if ((BB_END (then_bb)
3475       && find_reg_note (BB_END (then_bb), REG_CROSSING_JUMP, NULL_RTX))
3476      || (BB_END (test_bb)
3477	  && find_reg_note (BB_END (test_bb), REG_CROSSING_JUMP, NULL_RTX))
3478      || (BB_END (else_bb)
3479	  && find_reg_note (BB_END (else_bb), REG_CROSSING_JUMP,
3480			    NULL_RTX)))
3481    return FALSE;
3482
3483  /* ELSE has one successor.  */
3484  if (!single_succ_p (else_bb))
3485    return FALSE;
3486  else
3487    else_succ = single_succ_edge (else_bb);
3488
3489  /* ELSE outgoing edge is not complex.  */
3490  if (else_succ->flags & EDGE_COMPLEX)
3491    return FALSE;
3492
3493  /* ELSE has one predecessor.  */
3494  if (!single_pred_p (else_bb))
3495    return FALSE;
3496
3497  /* THEN is not EXIT.  */
3498  if (then_bb->index < NUM_FIXED_BLOCKS)
3499    return FALSE;
3500
3501  /* ELSE is predicted or SUCC(ELSE) postdominates THEN.  */
3502  note = find_reg_note (BB_END (test_bb), REG_BR_PROB, NULL_RTX);
3503  if (note && INTVAL (XEXP (note, 0)) >= REG_BR_PROB_BASE / 2)
3504    ;
3505  else if (else_succ->dest->index < NUM_FIXED_BLOCKS
3506	   || dominated_by_p (CDI_POST_DOMINATORS, then_bb,
3507			      else_succ->dest))
3508    ;
3509  else
3510    return FALSE;
3511
3512  num_possible_if_blocks++;
3513  if (dump_file)
3514    fprintf (dump_file,
3515	     "\nIF-CASE-2 found, start %d, else %d\n",
3516	     test_bb->index, else_bb->index);
3517
3518  /* ELSE is small.  */
3519  if (! cheap_bb_rtx_cost_p (else_bb, COSTS_N_INSNS (BRANCH_COST)))
3520    return FALSE;
3521
3522  /* Registers set are dead, or are predicable.  */
3523  if (! dead_or_predicable (test_bb, else_bb, then_bb, else_succ->dest, 0))
3524    return FALSE;
3525
3526  /* Conversion went ok, including moving the insns and fixing up the
3527     jump.  Adjust the CFG to match.  */
3528
3529  bitmap_ior (test_bb->il.rtl->global_live_at_end,
3530	      then_bb->il.rtl->global_live_at_start,
3531	      else_bb->il.rtl->global_live_at_end);
3532
3533  delete_basic_block (else_bb);
3534
3535  num_true_changes++;
3536  num_updated_if_blocks++;
3537
3538  /* ??? We may now fallthru from one of THEN's successors into a join
3539     block.  Rerun cleanup_cfg?  Examine things manually?  Wait?  */
3540
3541  return TRUE;
3542}
3543
3544/* A subroutine of dead_or_predicable called through for_each_rtx.
3545   Return 1 if a memory is found.  */
3546
3547static int
3548find_memory (rtx *px, void *data ATTRIBUTE_UNUSED)
3549{
3550  return MEM_P (*px);
3551}
3552
3553/* Used by the code above to perform the actual rtl transformations.
3554   Return TRUE if successful.
3555
3556   TEST_BB is the block containing the conditional branch.  MERGE_BB
3557   is the block containing the code to manipulate.  NEW_DEST is the
3558   label TEST_BB should be branching to after the conversion.
3559   REVERSEP is true if the sense of the branch should be reversed.  */
3560
3561static int
3562dead_or_predicable (basic_block test_bb, basic_block merge_bb,
3563		    basic_block other_bb, basic_block new_dest, int reversep)
3564{
3565  rtx head, end, jump, earliest = NULL_RTX, old_dest, new_label = NULL_RTX;
3566
3567  jump = BB_END (test_bb);
3568
3569  /* Find the extent of the real code in the merge block.  */
3570  head = BB_HEAD (merge_bb);
3571  end = BB_END (merge_bb);
3572
3573  /* If merge_bb ends with a tablejump, predicating/moving insn's
3574     into test_bb and then deleting merge_bb will result in the jumptable
3575     that follows merge_bb being removed along with merge_bb and then we
3576     get an unresolved reference to the jumptable.  */
3577  if (tablejump_p (end, NULL, NULL))
3578    return FALSE;
3579
3580  if (LABEL_P (head))
3581    head = NEXT_INSN (head);
3582  if (NOTE_P (head))
3583    {
3584      if (head == end)
3585	{
3586	  head = end = NULL_RTX;
3587	  goto no_body;
3588	}
3589      head = NEXT_INSN (head);
3590    }
3591
3592  if (JUMP_P (end))
3593    {
3594      if (head == end)
3595	{
3596	  head = end = NULL_RTX;
3597	  goto no_body;
3598	}
3599      end = PREV_INSN (end);
3600    }
3601
3602  /* Disable handling dead code by conditional execution if the machine needs
3603     to do anything funny with the tests, etc.  */
3604#ifndef IFCVT_MODIFY_TESTS
3605  if (HAVE_conditional_execution)
3606    {
3607      /* In the conditional execution case, we have things easy.  We know
3608	 the condition is reversible.  We don't have to check life info
3609	 because we're going to conditionally execute the code anyway.
3610	 All that's left is making sure the insns involved can actually
3611	 be predicated.  */
3612
3613      rtx cond, prob_val;
3614
3615      cond = cond_exec_get_condition (jump);
3616      if (! cond)
3617	return FALSE;
3618
3619      prob_val = find_reg_note (jump, REG_BR_PROB, NULL_RTX);
3620      if (prob_val)
3621	prob_val = XEXP (prob_val, 0);
3622
3623      if (reversep)
3624	{
3625	  enum rtx_code rev = reversed_comparison_code (cond, jump);
3626	  if (rev == UNKNOWN)
3627	    return FALSE;
3628	  cond = gen_rtx_fmt_ee (rev, GET_MODE (cond), XEXP (cond, 0),
3629			         XEXP (cond, 1));
3630	  if (prob_val)
3631	    prob_val = GEN_INT (REG_BR_PROB_BASE - INTVAL (prob_val));
3632	}
3633
3634      if (! cond_exec_process_insns ((ce_if_block_t *)0, head, end, cond,
3635				     prob_val, 0))
3636	goto cancel;
3637
3638      earliest = jump;
3639    }
3640  else
3641#endif
3642    {
3643      /* In the non-conditional execution case, we have to verify that there
3644	 are no trapping operations, no calls, no references to memory, and
3645	 that any registers modified are dead at the branch site.  */
3646
3647      rtx insn, cond, prev;
3648      regset merge_set, tmp, test_live, test_set;
3649      struct propagate_block_info *pbi;
3650      unsigned i, fail = 0;
3651      bitmap_iterator bi;
3652
3653      /* Check for no calls or trapping operations.  */
3654      for (insn = head; ; insn = NEXT_INSN (insn))
3655	{
3656	  if (CALL_P (insn))
3657	    return FALSE;
3658	  if (INSN_P (insn))
3659	    {
3660	      if (may_trap_p (PATTERN (insn)))
3661		return FALSE;
3662
3663	      /* ??? Even non-trapping memories such as stack frame
3664		 references must be avoided.  For stores, we collect
3665		 no lifetime info; for reads, we'd have to assert
3666		 true_dependence false against every store in the
3667		 TEST range.  */
3668	      if (for_each_rtx (&PATTERN (insn), find_memory, NULL))
3669		return FALSE;
3670	    }
3671	  if (insn == end)
3672	    break;
3673	}
3674
3675      if (! any_condjump_p (jump))
3676	return FALSE;
3677
3678      /* Find the extent of the conditional.  */
3679      cond = noce_get_condition (jump, &earliest);
3680      if (! cond)
3681	return FALSE;
3682
3683      /* Collect:
3684	   MERGE_SET = set of registers set in MERGE_BB
3685	   TEST_LIVE = set of registers live at EARLIEST
3686	   TEST_SET  = set of registers set between EARLIEST and the
3687		       end of the block.  */
3688
3689      tmp = ALLOC_REG_SET (&reg_obstack);
3690      merge_set = ALLOC_REG_SET (&reg_obstack);
3691      test_live = ALLOC_REG_SET (&reg_obstack);
3692      test_set = ALLOC_REG_SET (&reg_obstack);
3693
3694      /* ??? bb->local_set is only valid during calculate_global_regs_live,
3695	 so we must recompute usage for MERGE_BB.  Not so bad, I suppose,
3696         since we've already asserted that MERGE_BB is small.  */
3697      /* If we allocated new pseudos (e.g. in the conditional move
3698	 expander called from noce_emit_cmove), we must resize the
3699	 array first.  */
3700      if (max_regno < max_reg_num ())
3701	{
3702	  max_regno = max_reg_num ();
3703	  allocate_reg_info (max_regno, FALSE, FALSE);
3704	}
3705      propagate_block (merge_bb, tmp, merge_set, merge_set, 0);
3706
3707      /* For small register class machines, don't lengthen lifetimes of
3708	 hard registers before reload.  */
3709      if (SMALL_REGISTER_CLASSES && ! reload_completed)
3710	{
3711          EXECUTE_IF_SET_IN_BITMAP (merge_set, 0, i, bi)
3712	    {
3713	      if (i < FIRST_PSEUDO_REGISTER
3714		  && ! fixed_regs[i]
3715		  && ! global_regs[i])
3716		fail = 1;
3717	    }
3718	}
3719
3720      /* For TEST, we're interested in a range of insns, not a whole block.
3721	 Moreover, we're interested in the insns live from OTHER_BB.  */
3722
3723      COPY_REG_SET (test_live, other_bb->il.rtl->global_live_at_start);
3724      pbi = init_propagate_block_info (test_bb, test_live, test_set, test_set,
3725				       0);
3726
3727      for (insn = jump; ; insn = prev)
3728	{
3729	  prev = propagate_one_insn (pbi, insn);
3730	  if (insn == earliest)
3731	    break;
3732	}
3733
3734      free_propagate_block_info (pbi);
3735
3736      /* We can perform the transformation if
3737	   MERGE_SET & (TEST_SET | TEST_LIVE)
3738	 and
3739	   TEST_SET & merge_bb->il.rtl->global_live_at_start
3740	 are empty.  */
3741
3742      if (bitmap_intersect_p (test_set, merge_set)
3743	  || bitmap_intersect_p (test_live, merge_set)
3744	  || bitmap_intersect_p (test_set,
3745	    			 merge_bb->il.rtl->global_live_at_start))
3746	fail = 1;
3747
3748      FREE_REG_SET (tmp);
3749      FREE_REG_SET (merge_set);
3750      FREE_REG_SET (test_live);
3751      FREE_REG_SET (test_set);
3752
3753      if (fail)
3754	return FALSE;
3755    }
3756
3757 no_body:
3758  /* We don't want to use normal invert_jump or redirect_jump because
3759     we don't want to delete_insn called.  Also, we want to do our own
3760     change group management.  */
3761
3762  old_dest = JUMP_LABEL (jump);
3763  if (other_bb != new_dest)
3764    {
3765      new_label = block_label (new_dest);
3766      if (reversep
3767	  ? ! invert_jump_1 (jump, new_label)
3768	  : ! redirect_jump_1 (jump, new_label))
3769	goto cancel;
3770    }
3771
3772  if (! apply_change_group ())
3773    return FALSE;
3774
3775  if (other_bb != new_dest)
3776    {
3777      redirect_jump_2 (jump, old_dest, new_label, -1, reversep);
3778
3779      redirect_edge_succ (BRANCH_EDGE (test_bb), new_dest);
3780      if (reversep)
3781	{
3782	  gcov_type count, probability;
3783	  count = BRANCH_EDGE (test_bb)->count;
3784	  BRANCH_EDGE (test_bb)->count = FALLTHRU_EDGE (test_bb)->count;
3785	  FALLTHRU_EDGE (test_bb)->count = count;
3786	  probability = BRANCH_EDGE (test_bb)->probability;
3787	  BRANCH_EDGE (test_bb)->probability
3788	    = FALLTHRU_EDGE (test_bb)->probability;
3789	  FALLTHRU_EDGE (test_bb)->probability = probability;
3790	  update_br_prob_note (test_bb);
3791	}
3792    }
3793
3794  /* Move the insns out of MERGE_BB to before the branch.  */
3795  if (head != NULL)
3796    {
3797      rtx insn;
3798
3799      if (end == BB_END (merge_bb))
3800	BB_END (merge_bb) = PREV_INSN (head);
3801
3802      if (squeeze_notes (&head, &end))
3803	return TRUE;
3804
3805      /* PR 21767: When moving insns above a conditional branch, REG_EQUAL
3806	 notes might become invalid.  */
3807      insn = head;
3808      do
3809	{
3810	  rtx note, set;
3811
3812	  if (! INSN_P (insn))
3813	    continue;
3814	  note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
3815	  if (! note)
3816	    continue;
3817	  set = single_set (insn);
3818	  if (!set || !function_invariant_p (SET_SRC (set)))
3819	    remove_note (insn, note);
3820	} while (insn != end && (insn = NEXT_INSN (insn)));
3821
3822      reorder_insns (head, end, PREV_INSN (earliest));
3823    }
3824
3825  /* Remove the jump and edge if we can.  */
3826  if (other_bb == new_dest)
3827    {
3828      delete_insn (jump);
3829      remove_edge (BRANCH_EDGE (test_bb));
3830      /* ??? Can't merge blocks here, as then_bb is still in use.
3831	 At minimum, the merge will get done just before bb-reorder.  */
3832    }
3833
3834  return TRUE;
3835
3836 cancel:
3837  cancel_changes (0);
3838  return FALSE;
3839}
3840
3841/* Main entry point for all if-conversion.  */
3842
3843static void
3844if_convert (int x_life_data_ok)
3845{
3846  basic_block bb;
3847  int pass;
3848
3849  num_possible_if_blocks = 0;
3850  num_updated_if_blocks = 0;
3851  num_true_changes = 0;
3852  life_data_ok = (x_life_data_ok != 0);
3853
3854  if ((! targetm.cannot_modify_jumps_p ())
3855      && (!flag_reorder_blocks_and_partition || !no_new_pseudos
3856	  || !targetm.have_named_sections))
3857    {
3858      struct loops loops;
3859
3860      flow_loops_find (&loops);
3861      mark_loop_exit_edges (&loops);
3862      flow_loops_free (&loops);
3863      free_dominance_info (CDI_DOMINATORS);
3864    }
3865
3866  /* Compute postdominators if we think we'll use them.  */
3867  if (HAVE_conditional_execution || life_data_ok)
3868    calculate_dominance_info (CDI_POST_DOMINATORS);
3869
3870  if (life_data_ok)
3871    clear_bb_flags ();
3872
3873  /* Go through each of the basic blocks looking for things to convert.  If we
3874     have conditional execution, we make multiple passes to allow us to handle
3875     IF-THEN{-ELSE} blocks within other IF-THEN{-ELSE} blocks.  */
3876  pass = 0;
3877  do
3878    {
3879      cond_exec_changed_p = FALSE;
3880      pass++;
3881
3882#ifdef IFCVT_MULTIPLE_DUMPS
3883      if (dump_file && pass > 1)
3884	fprintf (dump_file, "\n\n========== Pass %d ==========\n", pass);
3885#endif
3886
3887      FOR_EACH_BB (bb)
3888	{
3889	  basic_block new_bb;
3890	  while ((new_bb = find_if_header (bb, pass)))
3891	    bb = new_bb;
3892	}
3893
3894#ifdef IFCVT_MULTIPLE_DUMPS
3895      if (dump_file && cond_exec_changed_p)
3896	print_rtl_with_bb (dump_file, get_insns ());
3897#endif
3898    }
3899  while (cond_exec_changed_p);
3900
3901#ifdef IFCVT_MULTIPLE_DUMPS
3902  if (dump_file)
3903    fprintf (dump_file, "\n\n========== no more changes\n");
3904#endif
3905
3906  free_dominance_info (CDI_POST_DOMINATORS);
3907
3908  if (dump_file)
3909    fflush (dump_file);
3910
3911  clear_aux_for_blocks ();
3912
3913  /* Rebuild life info for basic blocks that require it.  */
3914  if (num_true_changes && life_data_ok)
3915    {
3916      /* If we allocated new pseudos, we must resize the array for sched1.  */
3917      if (max_regno < max_reg_num ())
3918	{
3919	  max_regno = max_reg_num ();
3920	  allocate_reg_info (max_regno, FALSE, FALSE);
3921	}
3922      update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
3923					PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
3924					| PROP_KILL_DEAD_CODE);
3925    }
3926
3927  /* Write the final stats.  */
3928  if (dump_file && num_possible_if_blocks > 0)
3929    {
3930      fprintf (dump_file,
3931	       "\n%d possible IF blocks searched.\n",
3932	       num_possible_if_blocks);
3933      fprintf (dump_file,
3934	       "%d IF blocks converted.\n",
3935	       num_updated_if_blocks);
3936      fprintf (dump_file,
3937	       "%d true changes made.\n\n\n",
3938	       num_true_changes);
3939    }
3940
3941#ifdef ENABLE_CHECKING
3942  verify_flow_info ();
3943#endif
3944}
3945
3946static bool
3947gate_handle_if_conversion (void)
3948{
3949  return (optimize > 0);
3950}
3951
3952/* If-conversion and CFG cleanup.  */
3953static unsigned int
3954rest_of_handle_if_conversion (void)
3955{
3956  if (flag_if_conversion)
3957    {
3958      if (dump_file)
3959        dump_flow_info (dump_file, dump_flags);
3960      cleanup_cfg (CLEANUP_EXPENSIVE);
3961      reg_scan (get_insns (), max_reg_num ());
3962      if_convert (0);
3963    }
3964
3965  timevar_push (TV_JUMP);
3966  cleanup_cfg (CLEANUP_EXPENSIVE);
3967  reg_scan (get_insns (), max_reg_num ());
3968  timevar_pop (TV_JUMP);
3969  return 0;
3970}
3971
3972struct tree_opt_pass pass_rtl_ifcvt =
3973{
3974  "ce1",                                /* name */
3975  gate_handle_if_conversion,            /* gate */
3976  rest_of_handle_if_conversion,         /* execute */
3977  NULL,                                 /* sub */
3978  NULL,                                 /* next */
3979  0,                                    /* static_pass_number */
3980  TV_IFCVT,                             /* tv_id */
3981  0,                                    /* properties_required */
3982  0,                                    /* properties_provided */
3983  0,                                    /* properties_destroyed */
3984  0,                                    /* todo_flags_start */
3985  TODO_dump_func,                       /* todo_flags_finish */
3986  'C'                                   /* letter */
3987};
3988
3989static bool
3990gate_handle_if_after_combine (void)
3991{
3992  return (optimize > 0 && flag_if_conversion);
3993}
3994
3995
3996/* Rerun if-conversion, as combine may have simplified things enough
3997   to now meet sequence length restrictions.  */
3998static unsigned int
3999rest_of_handle_if_after_combine (void)
4000{
4001  no_new_pseudos = 0;
4002  if_convert (1);
4003  no_new_pseudos = 1;
4004  return 0;
4005}
4006
4007struct tree_opt_pass pass_if_after_combine =
4008{
4009  "ce2",                                /* name */
4010  gate_handle_if_after_combine,         /* gate */
4011  rest_of_handle_if_after_combine,      /* execute */
4012  NULL,                                 /* sub */
4013  NULL,                                 /* next */
4014  0,                                    /* static_pass_number */
4015  TV_IFCVT,                             /* tv_id */
4016  0,                                    /* properties_required */
4017  0,                                    /* properties_provided */
4018  0,                                    /* properties_destroyed */
4019  0,                                    /* todo_flags_start */
4020  TODO_dump_func |
4021  TODO_ggc_collect,                     /* todo_flags_finish */
4022  'C'                                   /* letter */
4023};
4024
4025
4026static bool
4027gate_handle_if_after_reload (void)
4028{
4029  return (optimize > 0);
4030}
4031
4032static unsigned int
4033rest_of_handle_if_after_reload (void)
4034{
4035  /* Last attempt to optimize CFG, as scheduling, peepholing and insn
4036     splitting possibly introduced more crossjumping opportunities.  */
4037  cleanup_cfg (CLEANUP_EXPENSIVE
4038               | CLEANUP_UPDATE_LIFE
4039               | (flag_crossjumping ? CLEANUP_CROSSJUMP : 0));
4040  if (flag_if_conversion2)
4041    if_convert (1);
4042  return 0;
4043}
4044
4045
4046struct tree_opt_pass pass_if_after_reload =
4047{
4048  "ce3",                                /* name */
4049  gate_handle_if_after_reload,          /* gate */
4050  rest_of_handle_if_after_reload,       /* execute */
4051  NULL,                                 /* sub */
4052  NULL,                                 /* next */
4053  0,                                    /* static_pass_number */
4054  TV_IFCVT2,                            /* tv_id */
4055  0,                                    /* properties_required */
4056  0,                                    /* properties_provided */
4057  0,                                    /* properties_destroyed */
4058  0,                                    /* todo_flags_start */
4059  TODO_dump_func |
4060  TODO_ggc_collect,                     /* todo_flags_finish */
4061  'E'                                   /* letter */
4062};
4063
4064
4065