1/* Lower complex number operations to scalar operations.
2   Copyright (C) 2004-2015 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it
7under the terms of the GNU General Public License as published by the
8Free Software Foundation; either version 3, or (at your option) any
9later version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT
12ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
24#include "hash-set.h"
25#include "machmode.h"
26#include "vec.h"
27#include "double-int.h"
28#include "input.h"
29#include "alias.h"
30#include "symtab.h"
31#include "wide-int.h"
32#include "inchash.h"
33#include "real.h"
34#include "tree.h"
35#include "fold-const.h"
36#include "stor-layout.h"
37#include "flags.h"
38#include "predict.h"
39#include "hard-reg-set.h"
40#include "function.h"
41#include "dominance.h"
42#include "cfg.h"
43#include "basic-block.h"
44#include "tree-ssa-alias.h"
45#include "internal-fn.h"
46#include "tree-eh.h"
47#include "gimple-expr.h"
48#include "is-a.h"
49#include "gimple.h"
50#include "gimplify.h"
51#include "gimple-iterator.h"
52#include "gimplify-me.h"
53#include "gimple-ssa.h"
54#include "tree-cfg.h"
55#include "tree-phinodes.h"
56#include "ssa-iterators.h"
57#include "stringpool.h"
58#include "tree-ssanames.h"
59#include "hashtab.h"
60#include "rtl.h"
61#include "statistics.h"
62#include "fixed-value.h"
63#include "insn-config.h"
64#include "expmed.h"
65#include "dojump.h"
66#include "explow.h"
67#include "calls.h"
68#include "emit-rtl.h"
69#include "varasm.h"
70#include "stmt.h"
71#include "expr.h"
72#include "tree-dfa.h"
73#include "tree-ssa.h"
74#include "tree-iterator.h"
75#include "tree-pass.h"
76#include "tree-ssa-propagate.h"
77#include "tree-hasher.h"
78#include "cfgloop.h"
79
80
81/* For each complex ssa name, a lattice value.  We're interested in finding
82   out whether a complex number is degenerate in some way, having only real
83   or only complex parts.  */
84
85enum
86{
87  UNINITIALIZED = 0,
88  ONLY_REAL = 1,
89  ONLY_IMAG = 2,
90  VARYING = 3
91};
92
93/* The type complex_lattice_t holds combinations of the above
94   constants.  */
95typedef int complex_lattice_t;
96
97#define PAIR(a, b)  ((a) << 2 | (b))
98
99
100static vec<complex_lattice_t> complex_lattice_values;
101
102/* For each complex variable, a pair of variables for the components exists in
103   the hashtable.  */
104static int_tree_htab_type *complex_variable_components;
105
106/* For each complex SSA_NAME, a pair of ssa names for the components.  */
107static vec<tree> complex_ssa_name_components;
108
109/* Lookup UID in the complex_variable_components hashtable and return the
110   associated tree.  */
111static tree
112cvc_lookup (unsigned int uid)
113{
114  struct int_tree_map in;
115  in.uid = uid;
116  return complex_variable_components->find_with_hash (in, uid).to;
117}
118
119/* Insert the pair UID, TO into the complex_variable_components hashtable.  */
120
121static void
122cvc_insert (unsigned int uid, tree to)
123{
124  int_tree_map h;
125  int_tree_map *loc;
126
127  h.uid = uid;
128  loc = complex_variable_components->find_slot_with_hash (h, uid, INSERT);
129  loc->uid = uid;
130  loc->to = to;
131}
132
133/* Return true if T is not a zero constant.  In the case of real values,
134   we're only interested in +0.0.  */
135
136static int
137some_nonzerop (tree t)
138{
139  int zerop = false;
140
141  /* Operations with real or imaginary part of a complex number zero
142     cannot be treated the same as operations with a real or imaginary
143     operand if we care about the signs of zeros in the result.  */
144  if (TREE_CODE (t) == REAL_CST && !flag_signed_zeros)
145    zerop = REAL_VALUES_IDENTICAL (TREE_REAL_CST (t), dconst0);
146  else if (TREE_CODE (t) == FIXED_CST)
147    zerop = fixed_zerop (t);
148  else if (TREE_CODE (t) == INTEGER_CST)
149    zerop = integer_zerop (t);
150
151  return !zerop;
152}
153
154
155/* Compute a lattice value from the components of a complex type REAL
156   and IMAG.  */
157
158static complex_lattice_t
159find_lattice_value_parts (tree real, tree imag)
160{
161  int r, i;
162  complex_lattice_t ret;
163
164  r = some_nonzerop (real);
165  i = some_nonzerop (imag);
166  ret = r * ONLY_REAL + i * ONLY_IMAG;
167
168  /* ??? On occasion we could do better than mapping 0+0i to real, but we
169     certainly don't want to leave it UNINITIALIZED, which eventually gets
170     mapped to VARYING.  */
171  if (ret == UNINITIALIZED)
172    ret = ONLY_REAL;
173
174  return ret;
175}
176
177
178/* Compute a lattice value from gimple_val T.  */
179
180static complex_lattice_t
181find_lattice_value (tree t)
182{
183  tree real, imag;
184
185  switch (TREE_CODE (t))
186    {
187    case SSA_NAME:
188      return complex_lattice_values[SSA_NAME_VERSION (t)];
189
190    case COMPLEX_CST:
191      real = TREE_REALPART (t);
192      imag = TREE_IMAGPART (t);
193      break;
194
195    default:
196      gcc_unreachable ();
197    }
198
199  return find_lattice_value_parts (real, imag);
200}
201
202/* Determine if LHS is something for which we're interested in seeing
203   simulation results.  */
204
205static bool
206is_complex_reg (tree lhs)
207{
208  return TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE && is_gimple_reg (lhs);
209}
210
211/* Mark the incoming parameters to the function as VARYING.  */
212
213static void
214init_parameter_lattice_values (void)
215{
216  tree parm, ssa_name;
217
218  for (parm = DECL_ARGUMENTS (cfun->decl); parm ; parm = DECL_CHAIN (parm))
219    if (is_complex_reg (parm)
220	&& (ssa_name = ssa_default_def (cfun, parm)) != NULL_TREE)
221      complex_lattice_values[SSA_NAME_VERSION (ssa_name)] = VARYING;
222}
223
224/* Initialize simulation state for each statement.  Return false if we
225   found no statements we want to simulate, and thus there's nothing
226   for the entire pass to do.  */
227
228static bool
229init_dont_simulate_again (void)
230{
231  basic_block bb;
232  bool saw_a_complex_op = false;
233
234  FOR_EACH_BB_FN (bb, cfun)
235    {
236      for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
237	   gsi_next (&gsi))
238	{
239	  gphi *phi = gsi.phi ();
240	  prop_set_simulate_again (phi,
241				   is_complex_reg (gimple_phi_result (phi)));
242	}
243
244      for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
245	   gsi_next (&gsi))
246	{
247	  gimple stmt;
248	  tree op0, op1;
249	  bool sim_again_p;
250
251	  stmt = gsi_stmt (gsi);
252	  op0 = op1 = NULL_TREE;
253
254	  /* Most control-altering statements must be initially
255	     simulated, else we won't cover the entire cfg.  */
256	  sim_again_p = stmt_ends_bb_p (stmt);
257
258	  switch (gimple_code (stmt))
259	    {
260	    case GIMPLE_CALL:
261	      if (gimple_call_lhs (stmt))
262	        sim_again_p = is_complex_reg (gimple_call_lhs (stmt));
263	      break;
264
265	    case GIMPLE_ASSIGN:
266	      sim_again_p = is_complex_reg (gimple_assign_lhs (stmt));
267	      if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
268		  || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR)
269		op0 = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
270	      else
271		op0 = gimple_assign_rhs1 (stmt);
272	      if (gimple_num_ops (stmt) > 2)
273		op1 = gimple_assign_rhs2 (stmt);
274	      break;
275
276	    case GIMPLE_COND:
277	      op0 = gimple_cond_lhs (stmt);
278	      op1 = gimple_cond_rhs (stmt);
279	      break;
280
281	    default:
282	      break;
283	    }
284
285	  if (op0 || op1)
286	    switch (gimple_expr_code (stmt))
287	      {
288	      case EQ_EXPR:
289	      case NE_EXPR:
290	      case PLUS_EXPR:
291	      case MINUS_EXPR:
292	      case MULT_EXPR:
293	      case TRUNC_DIV_EXPR:
294	      case CEIL_DIV_EXPR:
295	      case FLOOR_DIV_EXPR:
296	      case ROUND_DIV_EXPR:
297	      case RDIV_EXPR:
298		if (TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE
299		    || TREE_CODE (TREE_TYPE (op1)) == COMPLEX_TYPE)
300		  saw_a_complex_op = true;
301		break;
302
303	      case NEGATE_EXPR:
304	      case CONJ_EXPR:
305		if (TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
306		  saw_a_complex_op = true;
307		break;
308
309	      case REALPART_EXPR:
310	      case IMAGPART_EXPR:
311		/* The total store transformation performed during
312		  gimplification creates such uninitialized loads
313		  and we need to lower the statement to be able
314		  to fix things up.  */
315		if (TREE_CODE (op0) == SSA_NAME
316		    && ssa_undefined_value_p (op0))
317		  saw_a_complex_op = true;
318		break;
319
320	      default:
321		break;
322	      }
323
324	  prop_set_simulate_again (stmt, sim_again_p);
325	}
326    }
327
328  return saw_a_complex_op;
329}
330
331
332/* Evaluate statement STMT against the complex lattice defined above.  */
333
334static enum ssa_prop_result
335complex_visit_stmt (gimple stmt, edge *taken_edge_p ATTRIBUTE_UNUSED,
336		    tree *result_p)
337{
338  complex_lattice_t new_l, old_l, op1_l, op2_l;
339  unsigned int ver;
340  tree lhs;
341
342  lhs = gimple_get_lhs (stmt);
343  /* Skip anything but GIMPLE_ASSIGN and GIMPLE_CALL with a lhs.  */
344  if (!lhs)
345    return SSA_PROP_VARYING;
346
347  /* These conditions should be satisfied due to the initial filter
348     set up in init_dont_simulate_again.  */
349  gcc_assert (TREE_CODE (lhs) == SSA_NAME);
350  gcc_assert (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE);
351
352  *result_p = lhs;
353  ver = SSA_NAME_VERSION (lhs);
354  old_l = complex_lattice_values[ver];
355
356  switch (gimple_expr_code (stmt))
357    {
358    case SSA_NAME:
359    case COMPLEX_CST:
360      new_l = find_lattice_value (gimple_assign_rhs1 (stmt));
361      break;
362
363    case COMPLEX_EXPR:
364      new_l = find_lattice_value_parts (gimple_assign_rhs1 (stmt),
365				        gimple_assign_rhs2 (stmt));
366      break;
367
368    case PLUS_EXPR:
369    case MINUS_EXPR:
370      op1_l = find_lattice_value (gimple_assign_rhs1 (stmt));
371      op2_l = find_lattice_value (gimple_assign_rhs2 (stmt));
372
373      /* We've set up the lattice values such that IOR neatly
374	 models addition.  */
375      new_l = op1_l | op2_l;
376      break;
377
378    case MULT_EXPR:
379    case RDIV_EXPR:
380    case TRUNC_DIV_EXPR:
381    case CEIL_DIV_EXPR:
382    case FLOOR_DIV_EXPR:
383    case ROUND_DIV_EXPR:
384      op1_l = find_lattice_value (gimple_assign_rhs1 (stmt));
385      op2_l = find_lattice_value (gimple_assign_rhs2 (stmt));
386
387      /* Obviously, if either varies, so does the result.  */
388      if (op1_l == VARYING || op2_l == VARYING)
389	new_l = VARYING;
390      /* Don't prematurely promote variables if we've not yet seen
391	 their inputs.  */
392      else if (op1_l == UNINITIALIZED)
393	new_l = op2_l;
394      else if (op2_l == UNINITIALIZED)
395	new_l = op1_l;
396      else
397	{
398	  /* At this point both numbers have only one component. If the
399	     numbers are of opposite kind, the result is imaginary,
400	     otherwise the result is real. The add/subtract translates
401	     the real/imag from/to 0/1; the ^ performs the comparison.  */
402	  new_l = ((op1_l - ONLY_REAL) ^ (op2_l - ONLY_REAL)) + ONLY_REAL;
403
404	  /* Don't allow the lattice value to flip-flop indefinitely.  */
405	  new_l |= old_l;
406	}
407      break;
408
409    case NEGATE_EXPR:
410    case CONJ_EXPR:
411      new_l = find_lattice_value (gimple_assign_rhs1 (stmt));
412      break;
413
414    default:
415      new_l = VARYING;
416      break;
417    }
418
419  /* If nothing changed this round, let the propagator know.  */
420  if (new_l == old_l)
421    return SSA_PROP_NOT_INTERESTING;
422
423  complex_lattice_values[ver] = new_l;
424  return new_l == VARYING ? SSA_PROP_VARYING : SSA_PROP_INTERESTING;
425}
426
427/* Evaluate a PHI node against the complex lattice defined above.  */
428
429static enum ssa_prop_result
430complex_visit_phi (gphi *phi)
431{
432  complex_lattice_t new_l, old_l;
433  unsigned int ver;
434  tree lhs;
435  int i;
436
437  lhs = gimple_phi_result (phi);
438
439  /* This condition should be satisfied due to the initial filter
440     set up in init_dont_simulate_again.  */
441  gcc_assert (TREE_CODE (TREE_TYPE (lhs)) == COMPLEX_TYPE);
442
443  /* We've set up the lattice values such that IOR neatly models PHI meet.  */
444  new_l = UNINITIALIZED;
445  for (i = gimple_phi_num_args (phi) - 1; i >= 0; --i)
446    new_l |= find_lattice_value (gimple_phi_arg_def (phi, i));
447
448  ver = SSA_NAME_VERSION (lhs);
449  old_l = complex_lattice_values[ver];
450
451  if (new_l == old_l)
452    return SSA_PROP_NOT_INTERESTING;
453
454  complex_lattice_values[ver] = new_l;
455  return new_l == VARYING ? SSA_PROP_VARYING : SSA_PROP_INTERESTING;
456}
457
458/* Create one backing variable for a complex component of ORIG.  */
459
460static tree
461create_one_component_var (tree type, tree orig, const char *prefix,
462			  const char *suffix, enum tree_code code)
463{
464  tree r = create_tmp_var (type, prefix);
465
466  DECL_SOURCE_LOCATION (r) = DECL_SOURCE_LOCATION (orig);
467  DECL_ARTIFICIAL (r) = 1;
468
469  if (DECL_NAME (orig) && !DECL_IGNORED_P (orig))
470    {
471      const char *name = IDENTIFIER_POINTER (DECL_NAME (orig));
472
473      DECL_NAME (r) = get_identifier (ACONCAT ((name, suffix, NULL)));
474
475      SET_DECL_DEBUG_EXPR (r, build1 (code, type, orig));
476      DECL_HAS_DEBUG_EXPR_P (r) = 1;
477      DECL_IGNORED_P (r) = 0;
478      TREE_NO_WARNING (r) = TREE_NO_WARNING (orig);
479    }
480  else
481    {
482      DECL_IGNORED_P (r) = 1;
483      TREE_NO_WARNING (r) = 1;
484    }
485
486  return r;
487}
488
489/* Retrieve a value for a complex component of VAR.  */
490
491static tree
492get_component_var (tree var, bool imag_p)
493{
494  size_t decl_index = DECL_UID (var) * 2 + imag_p;
495  tree ret = cvc_lookup (decl_index);
496
497  if (ret == NULL)
498    {
499      ret = create_one_component_var (TREE_TYPE (TREE_TYPE (var)), var,
500				      imag_p ? "CI" : "CR",
501				      imag_p ? "$imag" : "$real",
502				      imag_p ? IMAGPART_EXPR : REALPART_EXPR);
503      cvc_insert (decl_index, ret);
504    }
505
506  return ret;
507}
508
509/* Retrieve a value for a complex component of SSA_NAME.  */
510
511static tree
512get_component_ssa_name (tree ssa_name, bool imag_p)
513{
514  complex_lattice_t lattice = find_lattice_value (ssa_name);
515  size_t ssa_name_index;
516  tree ret;
517
518  if (lattice == (imag_p ? ONLY_REAL : ONLY_IMAG))
519    {
520      tree inner_type = TREE_TYPE (TREE_TYPE (ssa_name));
521      if (SCALAR_FLOAT_TYPE_P (inner_type))
522	return build_real (inner_type, dconst0);
523      else
524	return build_int_cst (inner_type, 0);
525    }
526
527  ssa_name_index = SSA_NAME_VERSION (ssa_name) * 2 + imag_p;
528  ret = complex_ssa_name_components[ssa_name_index];
529  if (ret == NULL)
530    {
531      if (SSA_NAME_VAR (ssa_name))
532	ret = get_component_var (SSA_NAME_VAR (ssa_name), imag_p);
533      else
534	ret = TREE_TYPE (TREE_TYPE (ssa_name));
535      ret = make_ssa_name (ret);
536
537      /* Copy some properties from the original.  In particular, whether it
538	 is used in an abnormal phi, and whether it's uninitialized.  */
539      SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ret)
540	= SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name);
541      if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
542	  && TREE_CODE (SSA_NAME_VAR (ssa_name)) == VAR_DECL)
543	{
544	  SSA_NAME_DEF_STMT (ret) = SSA_NAME_DEF_STMT (ssa_name);
545	  set_ssa_default_def (cfun, SSA_NAME_VAR (ret), ret);
546	}
547
548      complex_ssa_name_components[ssa_name_index] = ret;
549    }
550
551  return ret;
552}
553
554/* Set a value for a complex component of SSA_NAME, return a
555   gimple_seq of stuff that needs doing.  */
556
557static gimple_seq
558set_component_ssa_name (tree ssa_name, bool imag_p, tree value)
559{
560  complex_lattice_t lattice = find_lattice_value (ssa_name);
561  size_t ssa_name_index;
562  tree comp;
563  gimple last;
564  gimple_seq list;
565
566  /* We know the value must be zero, else there's a bug in our lattice
567     analysis.  But the value may well be a variable known to contain
568     zero.  We should be safe ignoring it.  */
569  if (lattice == (imag_p ? ONLY_REAL : ONLY_IMAG))
570    return NULL;
571
572  /* If we've already assigned an SSA_NAME to this component, then this
573     means that our walk of the basic blocks found a use before the set.
574     This is fine.  Now we should create an initialization for the value
575     we created earlier.  */
576  ssa_name_index = SSA_NAME_VERSION (ssa_name) * 2 + imag_p;
577  comp = complex_ssa_name_components[ssa_name_index];
578  if (comp)
579    ;
580
581  /* If we've nothing assigned, and the value we're given is already stable,
582     then install that as the value for this SSA_NAME.  This preemptively
583     copy-propagates the value, which avoids unnecessary memory allocation.  */
584  else if (is_gimple_min_invariant (value)
585	   && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
586    {
587      complex_ssa_name_components[ssa_name_index] = value;
588      return NULL;
589    }
590  else if (TREE_CODE (value) == SSA_NAME
591	   && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
592    {
593      /* Replace an anonymous base value with the variable from cvc_lookup.
594	 This should result in better debug info.  */
595      if (SSA_NAME_VAR (ssa_name)
596	  && (!SSA_NAME_VAR (value) || DECL_IGNORED_P (SSA_NAME_VAR (value)))
597	  && !DECL_IGNORED_P (SSA_NAME_VAR (ssa_name)))
598	{
599	  comp = get_component_var (SSA_NAME_VAR (ssa_name), imag_p);
600	  replace_ssa_name_symbol (value, comp);
601	}
602
603      complex_ssa_name_components[ssa_name_index] = value;
604      return NULL;
605    }
606
607  /* Finally, we need to stabilize the result by installing the value into
608     a new ssa name.  */
609  else
610    comp = get_component_ssa_name (ssa_name, imag_p);
611
612  /* Do all the work to assign VALUE to COMP.  */
613  list = NULL;
614  value = force_gimple_operand (value, &list, false, NULL);
615  last =  gimple_build_assign (comp, value);
616  gimple_seq_add_stmt (&list, last);
617  gcc_assert (SSA_NAME_DEF_STMT (comp) == last);
618
619  return list;
620}
621
622/* Extract the real or imaginary part of a complex variable or constant.
623   Make sure that it's a proper gimple_val and gimplify it if not.
624   Emit any new code before gsi.  */
625
626static tree
627extract_component (gimple_stmt_iterator *gsi, tree t, bool imagpart_p,
628		   bool gimple_p)
629{
630  switch (TREE_CODE (t))
631    {
632    case COMPLEX_CST:
633      return imagpart_p ? TREE_IMAGPART (t) : TREE_REALPART (t);
634
635    case COMPLEX_EXPR:
636      gcc_unreachable ();
637
638    case VAR_DECL:
639    case RESULT_DECL:
640    case PARM_DECL:
641    case COMPONENT_REF:
642    case ARRAY_REF:
643    case VIEW_CONVERT_EXPR:
644    case MEM_REF:
645      {
646	tree inner_type = TREE_TYPE (TREE_TYPE (t));
647
648	t = build1 ((imagpart_p ? IMAGPART_EXPR : REALPART_EXPR),
649		    inner_type, unshare_expr (t));
650
651	if (gimple_p)
652	  t = force_gimple_operand_gsi (gsi, t, true, NULL, true,
653                                        GSI_SAME_STMT);
654
655	return t;
656      }
657
658    case SSA_NAME:
659      return get_component_ssa_name (t, imagpart_p);
660
661    default:
662      gcc_unreachable ();
663    }
664}
665
666/* Update the complex components of the ssa name on the lhs of STMT.  */
667
668static void
669update_complex_components (gimple_stmt_iterator *gsi, gimple stmt, tree r,
670			   tree i)
671{
672  tree lhs;
673  gimple_seq list;
674
675  lhs = gimple_get_lhs (stmt);
676
677  list = set_component_ssa_name (lhs, false, r);
678  if (list)
679    gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING);
680
681  list = set_component_ssa_name (lhs, true, i);
682  if (list)
683    gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING);
684}
685
686static void
687update_complex_components_on_edge (edge e, tree lhs, tree r, tree i)
688{
689  gimple_seq list;
690
691  list = set_component_ssa_name (lhs, false, r);
692  if (list)
693    gsi_insert_seq_on_edge (e, list);
694
695  list = set_component_ssa_name (lhs, true, i);
696  if (list)
697    gsi_insert_seq_on_edge (e, list);
698}
699
700
701/* Update an assignment to a complex variable in place.  */
702
703static void
704update_complex_assignment (gimple_stmt_iterator *gsi, tree r, tree i)
705{
706  gimple stmt;
707
708  gimple_assign_set_rhs_with_ops (gsi, COMPLEX_EXPR, r, i);
709  stmt = gsi_stmt (*gsi);
710  update_stmt (stmt);
711  if (maybe_clean_eh_stmt (stmt))
712    gimple_purge_dead_eh_edges (gimple_bb (stmt));
713
714  if (gimple_in_ssa_p (cfun))
715    update_complex_components (gsi, gsi_stmt (*gsi), r, i);
716}
717
718
719/* Generate code at the entry point of the function to initialize the
720   component variables for a complex parameter.  */
721
722static void
723update_parameter_components (void)
724{
725  edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
726  tree parm;
727
728  for (parm = DECL_ARGUMENTS (cfun->decl); parm ; parm = DECL_CHAIN (parm))
729    {
730      tree type = TREE_TYPE (parm);
731      tree ssa_name, r, i;
732
733      if (TREE_CODE (type) != COMPLEX_TYPE || !is_gimple_reg (parm))
734	continue;
735
736      type = TREE_TYPE (type);
737      ssa_name = ssa_default_def (cfun, parm);
738      if (!ssa_name)
739	continue;
740
741      r = build1 (REALPART_EXPR, type, ssa_name);
742      i = build1 (IMAGPART_EXPR, type, ssa_name);
743      update_complex_components_on_edge (entry_edge, ssa_name, r, i);
744    }
745}
746
747/* Generate code to set the component variables of a complex variable
748   to match the PHI statements in block BB.  */
749
750static void
751update_phi_components (basic_block bb)
752{
753  gphi_iterator gsi;
754
755  for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
756    {
757      gphi *phi = gsi.phi ();
758
759      if (is_complex_reg (gimple_phi_result (phi)))
760	{
761	  tree lr, li;
762	  gimple pr = NULL, pi = NULL;
763	  unsigned int i, n;
764
765	  lr = get_component_ssa_name (gimple_phi_result (phi), false);
766	  if (TREE_CODE (lr) == SSA_NAME)
767	    pr = create_phi_node (lr, bb);
768
769	  li = get_component_ssa_name (gimple_phi_result (phi), true);
770	  if (TREE_CODE (li) == SSA_NAME)
771	    pi = create_phi_node (li, bb);
772
773	  for (i = 0, n = gimple_phi_num_args (phi); i < n; ++i)
774	    {
775	      tree comp, arg = gimple_phi_arg_def (phi, i);
776	      if (pr)
777		{
778		  comp = extract_component (NULL, arg, false, false);
779		  SET_PHI_ARG_DEF (pr, i, comp);
780		}
781	      if (pi)
782		{
783		  comp = extract_component (NULL, arg, true, false);
784		  SET_PHI_ARG_DEF (pi, i, comp);
785		}
786	    }
787	}
788    }
789}
790
791/* Expand a complex move to scalars.  */
792
793static void
794expand_complex_move (gimple_stmt_iterator *gsi, tree type)
795{
796  tree inner_type = TREE_TYPE (type);
797  tree r, i, lhs, rhs;
798  gimple stmt = gsi_stmt (*gsi);
799
800  if (is_gimple_assign (stmt))
801    {
802      lhs = gimple_assign_lhs (stmt);
803      if (gimple_num_ops (stmt) == 2)
804	rhs = gimple_assign_rhs1 (stmt);
805      else
806	rhs = NULL_TREE;
807    }
808  else if (is_gimple_call (stmt))
809    {
810      lhs = gimple_call_lhs (stmt);
811      rhs = NULL_TREE;
812    }
813  else
814    gcc_unreachable ();
815
816  if (TREE_CODE (lhs) == SSA_NAME)
817    {
818      if (is_ctrl_altering_stmt (stmt))
819	{
820	  edge e;
821
822	  /* The value is not assigned on the exception edges, so we need not
823	     concern ourselves there.  We do need to update on the fallthru
824	     edge.  Find it.  */
825	  e = find_fallthru_edge (gsi_bb (*gsi)->succs);
826	  if (!e)
827	    gcc_unreachable ();
828
829	  r = build1 (REALPART_EXPR, inner_type, lhs);
830	  i = build1 (IMAGPART_EXPR, inner_type, lhs);
831	  update_complex_components_on_edge (e, lhs, r, i);
832	}
833      else if (is_gimple_call (stmt)
834	       || gimple_has_side_effects (stmt)
835	       || gimple_assign_rhs_code (stmt) == PAREN_EXPR)
836	{
837	  r = build1 (REALPART_EXPR, inner_type, lhs);
838	  i = build1 (IMAGPART_EXPR, inner_type, lhs);
839	  update_complex_components (gsi, stmt, r, i);
840	}
841      else
842	{
843	  if (gimple_assign_rhs_code (stmt) != COMPLEX_EXPR)
844	    {
845	      r = extract_component (gsi, rhs, 0, true);
846	      i = extract_component (gsi, rhs, 1, true);
847	    }
848	  else
849	    {
850	      r = gimple_assign_rhs1 (stmt);
851	      i = gimple_assign_rhs2 (stmt);
852	    }
853	  update_complex_assignment (gsi, r, i);
854	}
855    }
856  else if (rhs && TREE_CODE (rhs) == SSA_NAME && !TREE_SIDE_EFFECTS (lhs))
857    {
858      tree x;
859      gimple t;
860      location_t loc;
861
862      loc = gimple_location (stmt);
863      r = extract_component (gsi, rhs, 0, false);
864      i = extract_component (gsi, rhs, 1, false);
865
866      x = build1 (REALPART_EXPR, inner_type, unshare_expr (lhs));
867      t = gimple_build_assign (x, r);
868      gimple_set_location (t, loc);
869      gsi_insert_before (gsi, t, GSI_SAME_STMT);
870
871      if (stmt == gsi_stmt (*gsi))
872	{
873	  x = build1 (IMAGPART_EXPR, inner_type, unshare_expr (lhs));
874	  gimple_assign_set_lhs (stmt, x);
875	  gimple_assign_set_rhs1 (stmt, i);
876	}
877      else
878	{
879	  x = build1 (IMAGPART_EXPR, inner_type, unshare_expr (lhs));
880	  t = gimple_build_assign (x, i);
881	  gimple_set_location (t, loc);
882	  gsi_insert_before (gsi, t, GSI_SAME_STMT);
883
884	  stmt = gsi_stmt (*gsi);
885	  gcc_assert (gimple_code (stmt) == GIMPLE_RETURN);
886	  gimple_return_set_retval (as_a <greturn *> (stmt), lhs);
887	}
888
889      update_stmt (stmt);
890    }
891}
892
893/* Expand complex addition to scalars:
894	a + b = (ar + br) + i(ai + bi)
895	a - b = (ar - br) + i(ai + bi)
896*/
897
898static void
899expand_complex_addition (gimple_stmt_iterator *gsi, tree inner_type,
900			 tree ar, tree ai, tree br, tree bi,
901			 enum tree_code code,
902			 complex_lattice_t al, complex_lattice_t bl)
903{
904  tree rr, ri;
905
906  switch (PAIR (al, bl))
907    {
908    case PAIR (ONLY_REAL, ONLY_REAL):
909      rr = gimplify_build2 (gsi, code, inner_type, ar, br);
910      ri = ai;
911      break;
912
913    case PAIR (ONLY_REAL, ONLY_IMAG):
914      rr = ar;
915      if (code == MINUS_EXPR)
916	ri = gimplify_build2 (gsi, MINUS_EXPR, inner_type, ai, bi);
917      else
918	ri = bi;
919      break;
920
921    case PAIR (ONLY_IMAG, ONLY_REAL):
922      if (code == MINUS_EXPR)
923	rr = gimplify_build2 (gsi, MINUS_EXPR, inner_type, ar, br);
924      else
925	rr = br;
926      ri = ai;
927      break;
928
929    case PAIR (ONLY_IMAG, ONLY_IMAG):
930      rr = ar;
931      ri = gimplify_build2 (gsi, code, inner_type, ai, bi);
932      break;
933
934    case PAIR (VARYING, ONLY_REAL):
935      rr = gimplify_build2 (gsi, code, inner_type, ar, br);
936      ri = ai;
937      break;
938
939    case PAIR (VARYING, ONLY_IMAG):
940      rr = ar;
941      ri = gimplify_build2 (gsi, code, inner_type, ai, bi);
942      break;
943
944    case PAIR (ONLY_REAL, VARYING):
945      if (code == MINUS_EXPR)
946	goto general;
947      rr = gimplify_build2 (gsi, code, inner_type, ar, br);
948      ri = bi;
949      break;
950
951    case PAIR (ONLY_IMAG, VARYING):
952      if (code == MINUS_EXPR)
953	goto general;
954      rr = br;
955      ri = gimplify_build2 (gsi, code, inner_type, ai, bi);
956      break;
957
958    case PAIR (VARYING, VARYING):
959    general:
960      rr = gimplify_build2 (gsi, code, inner_type, ar, br);
961      ri = gimplify_build2 (gsi, code, inner_type, ai, bi);
962      break;
963
964    default:
965      gcc_unreachable ();
966    }
967
968  update_complex_assignment (gsi, rr, ri);
969}
970
971/* Expand a complex multiplication or division to a libcall to the c99
972   compliant routines.  */
973
974static void
975expand_complex_libcall (gimple_stmt_iterator *gsi, tree ar, tree ai,
976			tree br, tree bi, enum tree_code code)
977{
978  machine_mode mode;
979  enum built_in_function bcode;
980  tree fn, type, lhs;
981  gimple old_stmt;
982  gcall *stmt;
983
984  old_stmt = gsi_stmt (*gsi);
985  lhs = gimple_assign_lhs (old_stmt);
986  type = TREE_TYPE (lhs);
987
988  mode = TYPE_MODE (type);
989  gcc_assert (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT);
990
991  if (code == MULT_EXPR)
992    bcode = ((enum built_in_function)
993	     (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
994  else if (code == RDIV_EXPR)
995    bcode = ((enum built_in_function)
996	     (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
997  else
998    gcc_unreachable ();
999  fn = builtin_decl_explicit (bcode);
1000
1001  stmt = gimple_build_call (fn, 4, ar, ai, br, bi);
1002  gimple_call_set_lhs (stmt, lhs);
1003  update_stmt (stmt);
1004  gsi_replace (gsi, stmt, false);
1005
1006  if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
1007    gimple_purge_dead_eh_edges (gsi_bb (*gsi));
1008
1009  if (gimple_in_ssa_p (cfun))
1010    {
1011      type = TREE_TYPE (type);
1012      update_complex_components (gsi, stmt,
1013				 build1 (REALPART_EXPR, type, lhs),
1014				 build1 (IMAGPART_EXPR, type, lhs));
1015      SSA_NAME_DEF_STMT (lhs) = stmt;
1016    }
1017}
1018
1019/* Expand complex multiplication to scalars:
1020	a * b = (ar*br - ai*bi) + i(ar*bi + br*ai)
1021*/
1022
1023static void
1024expand_complex_multiplication (gimple_stmt_iterator *gsi, tree inner_type,
1025			       tree ar, tree ai, tree br, tree bi,
1026			       complex_lattice_t al, complex_lattice_t bl)
1027{
1028  tree rr, ri;
1029
1030  if (al < bl)
1031    {
1032      complex_lattice_t tl;
1033      rr = ar, ar = br, br = rr;
1034      ri = ai, ai = bi, bi = ri;
1035      tl = al, al = bl, bl = tl;
1036    }
1037
1038  switch (PAIR (al, bl))
1039    {
1040    case PAIR (ONLY_REAL, ONLY_REAL):
1041      rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br);
1042      ri = ai;
1043      break;
1044
1045    case PAIR (ONLY_IMAG, ONLY_REAL):
1046      rr = ar;
1047      if (TREE_CODE (ai) == REAL_CST
1048	  && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst1))
1049	ri = br;
1050      else
1051	ri = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br);
1052      break;
1053
1054    case PAIR (ONLY_IMAG, ONLY_IMAG):
1055      rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi);
1056      rr = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, rr);
1057      ri = ar;
1058      break;
1059
1060    case PAIR (VARYING, ONLY_REAL):
1061      rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br);
1062      ri = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br);
1063      break;
1064
1065    case PAIR (VARYING, ONLY_IMAG):
1066      rr = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi);
1067      rr = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, rr);
1068      ri = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, bi);
1069      break;
1070
1071    case PAIR (VARYING, VARYING):
1072      if (flag_complex_method == 2 && SCALAR_FLOAT_TYPE_P (inner_type))
1073	{
1074	  expand_complex_libcall (gsi, ar, ai, br, bi, MULT_EXPR);
1075	  return;
1076	}
1077      else
1078	{
1079	  tree t1, t2, t3, t4;
1080
1081	  t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br);
1082	  t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi);
1083	  t3 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, bi);
1084
1085	  /* Avoid expanding redundant multiplication for the common
1086	     case of squaring a complex number.  */
1087	  if (ar == br && ai == bi)
1088	    t4 = t3;
1089	  else
1090	    t4 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br);
1091
1092	  rr = gimplify_build2 (gsi, MINUS_EXPR, inner_type, t1, t2);
1093	  ri = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t3, t4);
1094	}
1095      break;
1096
1097    default:
1098      gcc_unreachable ();
1099    }
1100
1101  update_complex_assignment (gsi, rr, ri);
1102}
1103
1104/* Keep this algorithm in sync with fold-const.c:const_binop().
1105
1106   Expand complex division to scalars, straightforward algorithm.
1107	a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1108	    t = br*br + bi*bi
1109*/
1110
1111static void
1112expand_complex_div_straight (gimple_stmt_iterator *gsi, tree inner_type,
1113			     tree ar, tree ai, tree br, tree bi,
1114			     enum tree_code code)
1115{
1116  tree rr, ri, div, t1, t2, t3;
1117
1118  t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, br, br);
1119  t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, bi, bi);
1120  div = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, t2);
1121
1122  t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br);
1123  t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi);
1124  t3 = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, t2);
1125  rr = gimplify_build2 (gsi, code, inner_type, t3, div);
1126
1127  t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br);
1128  t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, bi);
1129  t3 = gimplify_build2 (gsi, MINUS_EXPR, inner_type, t1, t2);
1130  ri = gimplify_build2 (gsi, code, inner_type, t3, div);
1131
1132  update_complex_assignment (gsi, rr, ri);
1133}
1134
1135/* Keep this algorithm in sync with fold-const.c:const_binop().
1136
1137   Expand complex division to scalars, modified algorithm to minimize
1138   overflow with wide input ranges.  */
1139
1140static void
1141expand_complex_div_wide (gimple_stmt_iterator *gsi, tree inner_type,
1142			 tree ar, tree ai, tree br, tree bi,
1143			 enum tree_code code)
1144{
1145  tree rr, ri, ratio, div, t1, t2, tr, ti, compare;
1146  basic_block bb_cond, bb_true, bb_false, bb_join;
1147  gimple stmt;
1148
1149  /* Examine |br| < |bi|, and branch.  */
1150  t1 = gimplify_build1 (gsi, ABS_EXPR, inner_type, br);
1151  t2 = gimplify_build1 (gsi, ABS_EXPR, inner_type, bi);
1152  compare = fold_build2_loc (gimple_location (gsi_stmt (*gsi)),
1153			     LT_EXPR, boolean_type_node, t1, t2);
1154  STRIP_NOPS (compare);
1155
1156  bb_cond = bb_true = bb_false = bb_join = NULL;
1157  rr = ri = tr = ti = NULL;
1158  if (TREE_CODE (compare) != INTEGER_CST)
1159    {
1160      edge e;
1161      gimple stmt;
1162      tree cond, tmp;
1163
1164      tmp = create_tmp_var (boolean_type_node);
1165      stmt = gimple_build_assign (tmp, compare);
1166      if (gimple_in_ssa_p (cfun))
1167	{
1168	  tmp = make_ssa_name (tmp, stmt);
1169	  gimple_assign_set_lhs (stmt, tmp);
1170	}
1171
1172      gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1173
1174      cond = fold_build2_loc (gimple_location (stmt),
1175			  EQ_EXPR, boolean_type_node, tmp, boolean_true_node);
1176      stmt = gimple_build_cond_from_tree (cond, NULL_TREE, NULL_TREE);
1177      gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1178
1179      /* Split the original block, and create the TRUE and FALSE blocks.  */
1180      e = split_block (gsi_bb (*gsi), stmt);
1181      bb_cond = e->src;
1182      bb_join = e->dest;
1183      bb_true = create_empty_bb (bb_cond);
1184      bb_false = create_empty_bb (bb_true);
1185
1186      /* Wire the blocks together.  */
1187      e->flags = EDGE_TRUE_VALUE;
1188      redirect_edge_succ (e, bb_true);
1189      make_edge (bb_cond, bb_false, EDGE_FALSE_VALUE);
1190      make_edge (bb_true, bb_join, EDGE_FALLTHRU);
1191      make_edge (bb_false, bb_join, EDGE_FALLTHRU);
1192      add_bb_to_loop (bb_true, bb_cond->loop_father);
1193      add_bb_to_loop (bb_false, bb_cond->loop_father);
1194
1195      /* Update dominance info.  Note that bb_join's data was
1196         updated by split_block.  */
1197      if (dom_info_available_p (CDI_DOMINATORS))
1198        {
1199          set_immediate_dominator (CDI_DOMINATORS, bb_true, bb_cond);
1200          set_immediate_dominator (CDI_DOMINATORS, bb_false, bb_cond);
1201        }
1202
1203      rr = create_tmp_reg (inner_type);
1204      ri = create_tmp_reg (inner_type);
1205    }
1206
1207  /* In the TRUE branch, we compute
1208      ratio = br/bi;
1209      div = (br * ratio) + bi;
1210      tr = (ar * ratio) + ai;
1211      ti = (ai * ratio) - ar;
1212      tr = tr / div;
1213      ti = ti / div;  */
1214  if (bb_true || integer_nonzerop (compare))
1215    {
1216      if (bb_true)
1217	{
1218	  *gsi = gsi_last_bb (bb_true);
1219	  gsi_insert_after (gsi, gimple_build_nop (), GSI_NEW_STMT);
1220	}
1221
1222      ratio = gimplify_build2 (gsi, code, inner_type, br, bi);
1223
1224      t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, br, ratio);
1225      div = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, bi);
1226
1227      t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, ratio);
1228      tr = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, ai);
1229
1230      t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, ratio);
1231      ti = gimplify_build2 (gsi, MINUS_EXPR, inner_type, t1, ar);
1232
1233      tr = gimplify_build2 (gsi, code, inner_type, tr, div);
1234      ti = gimplify_build2 (gsi, code, inner_type, ti, div);
1235
1236     if (bb_true)
1237       {
1238	 stmt = gimple_build_assign (rr, tr);
1239	 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1240	 stmt = gimple_build_assign (ri, ti);
1241	 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1242	 gsi_remove (gsi, true);
1243       }
1244    }
1245
1246  /* In the FALSE branch, we compute
1247      ratio = d/c;
1248      divisor = (d * ratio) + c;
1249      tr = (b * ratio) + a;
1250      ti = b - (a * ratio);
1251      tr = tr / div;
1252      ti = ti / div;  */
1253  if (bb_false || integer_zerop (compare))
1254    {
1255      if (bb_false)
1256	{
1257	  *gsi = gsi_last_bb (bb_false);
1258	  gsi_insert_after (gsi, gimple_build_nop (), GSI_NEW_STMT);
1259	}
1260
1261      ratio = gimplify_build2 (gsi, code, inner_type, bi, br);
1262
1263      t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, bi, ratio);
1264      div = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, br);
1265
1266      t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, ratio);
1267      tr = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t1, ar);
1268
1269      t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, ratio);
1270      ti = gimplify_build2 (gsi, MINUS_EXPR, inner_type, ai, t1);
1271
1272      tr = gimplify_build2 (gsi, code, inner_type, tr, div);
1273      ti = gimplify_build2 (gsi, code, inner_type, ti, div);
1274
1275     if (bb_false)
1276       {
1277	 stmt = gimple_build_assign (rr, tr);
1278	 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1279	 stmt = gimple_build_assign (ri, ti);
1280	 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1281	 gsi_remove (gsi, true);
1282       }
1283    }
1284
1285  if (bb_join)
1286    *gsi = gsi_start_bb (bb_join);
1287  else
1288    rr = tr, ri = ti;
1289
1290  update_complex_assignment (gsi, rr, ri);
1291}
1292
1293/* Expand complex division to scalars.  */
1294
1295static void
1296expand_complex_division (gimple_stmt_iterator *gsi, tree inner_type,
1297			 tree ar, tree ai, tree br, tree bi,
1298			 enum tree_code code,
1299			 complex_lattice_t al, complex_lattice_t bl)
1300{
1301  tree rr, ri;
1302
1303  switch (PAIR (al, bl))
1304    {
1305    case PAIR (ONLY_REAL, ONLY_REAL):
1306      rr = gimplify_build2 (gsi, code, inner_type, ar, br);
1307      ri = ai;
1308      break;
1309
1310    case PAIR (ONLY_REAL, ONLY_IMAG):
1311      rr = ai;
1312      ri = gimplify_build2 (gsi, code, inner_type, ar, bi);
1313      ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ri);
1314      break;
1315
1316    case PAIR (ONLY_IMAG, ONLY_REAL):
1317      rr = ar;
1318      ri = gimplify_build2 (gsi, code, inner_type, ai, br);
1319      break;
1320
1321    case PAIR (ONLY_IMAG, ONLY_IMAG):
1322      rr = gimplify_build2 (gsi, code, inner_type, ai, bi);
1323      ri = ar;
1324      break;
1325
1326    case PAIR (VARYING, ONLY_REAL):
1327      rr = gimplify_build2 (gsi, code, inner_type, ar, br);
1328      ri = gimplify_build2 (gsi, code, inner_type, ai, br);
1329      break;
1330
1331    case PAIR (VARYING, ONLY_IMAG):
1332      rr = gimplify_build2 (gsi, code, inner_type, ai, bi);
1333      ri = gimplify_build2 (gsi, code, inner_type, ar, bi);
1334      ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ri);
1335
1336    case PAIR (ONLY_REAL, VARYING):
1337    case PAIR (ONLY_IMAG, VARYING):
1338    case PAIR (VARYING, VARYING):
1339      switch (flag_complex_method)
1340	{
1341	case 0:
1342	  /* straightforward implementation of complex divide acceptable.  */
1343	  expand_complex_div_straight (gsi, inner_type, ar, ai, br, bi, code);
1344	  break;
1345
1346	case 2:
1347	  if (SCALAR_FLOAT_TYPE_P (inner_type))
1348	    {
1349	      expand_complex_libcall (gsi, ar, ai, br, bi, code);
1350	      break;
1351	    }
1352	  /* FALLTHRU */
1353
1354	case 1:
1355	  /* wide ranges of inputs must work for complex divide.  */
1356	  expand_complex_div_wide (gsi, inner_type, ar, ai, br, bi, code);
1357	  break;
1358
1359	default:
1360	  gcc_unreachable ();
1361	}
1362      return;
1363
1364    default:
1365      gcc_unreachable ();
1366    }
1367
1368  update_complex_assignment (gsi, rr, ri);
1369}
1370
1371/* Expand complex negation to scalars:
1372	-a = (-ar) + i(-ai)
1373*/
1374
1375static void
1376expand_complex_negation (gimple_stmt_iterator *gsi, tree inner_type,
1377			 tree ar, tree ai)
1378{
1379  tree rr, ri;
1380
1381  rr = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ar);
1382  ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ai);
1383
1384  update_complex_assignment (gsi, rr, ri);
1385}
1386
1387/* Expand complex conjugate to scalars:
1388	~a = (ar) + i(-ai)
1389*/
1390
1391static void
1392expand_complex_conjugate (gimple_stmt_iterator *gsi, tree inner_type,
1393			  tree ar, tree ai)
1394{
1395  tree ri;
1396
1397  ri = gimplify_build1 (gsi, NEGATE_EXPR, inner_type, ai);
1398
1399  update_complex_assignment (gsi, ar, ri);
1400}
1401
1402/* Expand complex comparison (EQ or NE only).  */
1403
1404static void
1405expand_complex_comparison (gimple_stmt_iterator *gsi, tree ar, tree ai,
1406			   tree br, tree bi, enum tree_code code)
1407{
1408  tree cr, ci, cc, type;
1409  gimple stmt;
1410
1411  cr = gimplify_build2 (gsi, code, boolean_type_node, ar, br);
1412  ci = gimplify_build2 (gsi, code, boolean_type_node, ai, bi);
1413  cc = gimplify_build2 (gsi,
1414			(code == EQ_EXPR ? TRUTH_AND_EXPR : TRUTH_OR_EXPR),
1415			boolean_type_node, cr, ci);
1416
1417  stmt = gsi_stmt (*gsi);
1418
1419  switch (gimple_code (stmt))
1420    {
1421    case GIMPLE_RETURN:
1422      {
1423	greturn *return_stmt = as_a <greturn *> (stmt);
1424	type = TREE_TYPE (gimple_return_retval (return_stmt));
1425	gimple_return_set_retval (return_stmt, fold_convert (type, cc));
1426      }
1427      break;
1428
1429    case GIMPLE_ASSIGN:
1430      type = TREE_TYPE (gimple_assign_lhs (stmt));
1431      gimple_assign_set_rhs_from_tree (gsi, fold_convert (type, cc));
1432      stmt = gsi_stmt (*gsi);
1433      break;
1434
1435    case GIMPLE_COND:
1436      {
1437	gcond *cond_stmt = as_a <gcond *> (stmt);
1438	gimple_cond_set_code (cond_stmt, EQ_EXPR);
1439	gimple_cond_set_lhs (cond_stmt, cc);
1440	gimple_cond_set_rhs (cond_stmt, boolean_true_node);
1441      }
1442      break;
1443
1444    default:
1445      gcc_unreachable ();
1446    }
1447
1448  update_stmt (stmt);
1449}
1450
1451/* Expand inline asm that sets some complex SSA_NAMEs.  */
1452
1453static void
1454expand_complex_asm (gimple_stmt_iterator *gsi)
1455{
1456  gasm *stmt = as_a <gasm *> (gsi_stmt (*gsi));
1457  unsigned int i;
1458
1459  for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1460    {
1461      tree link = gimple_asm_output_op (stmt, i);
1462      tree op = TREE_VALUE (link);
1463      if (TREE_CODE (op) == SSA_NAME
1464	  && TREE_CODE (TREE_TYPE (op)) == COMPLEX_TYPE)
1465	{
1466	  tree type = TREE_TYPE (op);
1467	  tree inner_type = TREE_TYPE (type);
1468	  tree r = build1 (REALPART_EXPR, inner_type, op);
1469	  tree i = build1 (IMAGPART_EXPR, inner_type, op);
1470	  gimple_seq list = set_component_ssa_name (op, false, r);
1471
1472	  if (list)
1473	    gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING);
1474
1475	  list = set_component_ssa_name (op, true, i);
1476	  if (list)
1477	    gsi_insert_seq_after (gsi, list, GSI_CONTINUE_LINKING);
1478	}
1479    }
1480}
1481
1482/* Process one statement.  If we identify a complex operation, expand it.  */
1483
1484static void
1485expand_complex_operations_1 (gimple_stmt_iterator *gsi)
1486{
1487  gimple stmt = gsi_stmt (*gsi);
1488  tree type, inner_type, lhs;
1489  tree ac, ar, ai, bc, br, bi;
1490  complex_lattice_t al, bl;
1491  enum tree_code code;
1492
1493  if (gimple_code (stmt) == GIMPLE_ASM)
1494    {
1495      expand_complex_asm (gsi);
1496      return;
1497    }
1498
1499  lhs = gimple_get_lhs (stmt);
1500  if (!lhs && gimple_code (stmt) != GIMPLE_COND)
1501    return;
1502
1503  type = TREE_TYPE (gimple_op (stmt, 0));
1504  code = gimple_expr_code (stmt);
1505
1506  /* Initial filter for operations we handle.  */
1507  switch (code)
1508    {
1509    case PLUS_EXPR:
1510    case MINUS_EXPR:
1511    case MULT_EXPR:
1512    case TRUNC_DIV_EXPR:
1513    case CEIL_DIV_EXPR:
1514    case FLOOR_DIV_EXPR:
1515    case ROUND_DIV_EXPR:
1516    case RDIV_EXPR:
1517    case NEGATE_EXPR:
1518    case CONJ_EXPR:
1519      if (TREE_CODE (type) != COMPLEX_TYPE)
1520	return;
1521      inner_type = TREE_TYPE (type);
1522      break;
1523
1524    case EQ_EXPR:
1525    case NE_EXPR:
1526      /* Note, both GIMPLE_ASSIGN and GIMPLE_COND may have an EQ_EXPR
1527	 subcode, so we need to access the operands using gimple_op.  */
1528      inner_type = TREE_TYPE (gimple_op (stmt, 1));
1529      if (TREE_CODE (inner_type) != COMPLEX_TYPE)
1530	return;
1531      break;
1532
1533    default:
1534      {
1535	tree rhs;
1536
1537	/* GIMPLE_COND may also fallthru here, but we do not need to
1538	   do anything with it.  */
1539	if (gimple_code (stmt) == GIMPLE_COND)
1540	  return;
1541
1542	if (TREE_CODE (type) == COMPLEX_TYPE)
1543	  expand_complex_move (gsi, type);
1544	else if (is_gimple_assign (stmt)
1545		 && (gimple_assign_rhs_code (stmt) == REALPART_EXPR
1546		     || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR)
1547		 && TREE_CODE (lhs) == SSA_NAME)
1548	  {
1549	    rhs = gimple_assign_rhs1 (stmt);
1550	    rhs = extract_component (gsi, TREE_OPERAND (rhs, 0),
1551		                     gimple_assign_rhs_code (stmt)
1552				       == IMAGPART_EXPR,
1553				     false);
1554	    gimple_assign_set_rhs_from_tree (gsi, rhs);
1555	    stmt = gsi_stmt (*gsi);
1556	    update_stmt (stmt);
1557	  }
1558      }
1559      return;
1560    }
1561
1562  /* Extract the components of the two complex values.  Make sure and
1563     handle the common case of the same value used twice specially.  */
1564  if (is_gimple_assign (stmt))
1565    {
1566      ac = gimple_assign_rhs1 (stmt);
1567      bc = (gimple_num_ops (stmt) > 2) ? gimple_assign_rhs2 (stmt) : NULL;
1568    }
1569  /* GIMPLE_CALL can not get here.  */
1570  else
1571    {
1572      ac = gimple_cond_lhs (stmt);
1573      bc = gimple_cond_rhs (stmt);
1574    }
1575
1576  ar = extract_component (gsi, ac, false, true);
1577  ai = extract_component (gsi, ac, true, true);
1578
1579  if (ac == bc)
1580    br = ar, bi = ai;
1581  else if (bc)
1582    {
1583      br = extract_component (gsi, bc, 0, true);
1584      bi = extract_component (gsi, bc, 1, true);
1585    }
1586  else
1587    br = bi = NULL_TREE;
1588
1589  if (gimple_in_ssa_p (cfun))
1590    {
1591      al = find_lattice_value (ac);
1592      if (al == UNINITIALIZED)
1593	al = VARYING;
1594
1595      if (TREE_CODE_CLASS (code) == tcc_unary)
1596	bl = UNINITIALIZED;
1597      else if (ac == bc)
1598	bl = al;
1599      else
1600	{
1601	  bl = find_lattice_value (bc);
1602	  if (bl == UNINITIALIZED)
1603	    bl = VARYING;
1604	}
1605    }
1606  else
1607    al = bl = VARYING;
1608
1609  switch (code)
1610    {
1611    case PLUS_EXPR:
1612    case MINUS_EXPR:
1613      expand_complex_addition (gsi, inner_type, ar, ai, br, bi, code, al, bl);
1614      break;
1615
1616    case MULT_EXPR:
1617      expand_complex_multiplication (gsi, inner_type, ar, ai, br, bi, al, bl);
1618      break;
1619
1620    case TRUNC_DIV_EXPR:
1621    case CEIL_DIV_EXPR:
1622    case FLOOR_DIV_EXPR:
1623    case ROUND_DIV_EXPR:
1624    case RDIV_EXPR:
1625      expand_complex_division (gsi, inner_type, ar, ai, br, bi, code, al, bl);
1626      break;
1627
1628    case NEGATE_EXPR:
1629      expand_complex_negation (gsi, inner_type, ar, ai);
1630      break;
1631
1632    case CONJ_EXPR:
1633      expand_complex_conjugate (gsi, inner_type, ar, ai);
1634      break;
1635
1636    case EQ_EXPR:
1637    case NE_EXPR:
1638      expand_complex_comparison (gsi, ar, ai, br, bi, code);
1639      break;
1640
1641    default:
1642      gcc_unreachable ();
1643    }
1644}
1645
1646
1647/* Entry point for complex operation lowering during optimization.  */
1648
1649static unsigned int
1650tree_lower_complex (void)
1651{
1652  int old_last_basic_block;
1653  gimple_stmt_iterator gsi;
1654  basic_block bb;
1655
1656  if (!init_dont_simulate_again ())
1657    return 0;
1658
1659  complex_lattice_values.create (num_ssa_names);
1660  complex_lattice_values.safe_grow_cleared (num_ssa_names);
1661
1662  init_parameter_lattice_values ();
1663  ssa_propagate (complex_visit_stmt, complex_visit_phi);
1664
1665  complex_variable_components = new int_tree_htab_type (10);
1666
1667  complex_ssa_name_components.create (2 * num_ssa_names);
1668  complex_ssa_name_components.safe_grow_cleared (2 * num_ssa_names);
1669
1670  update_parameter_components ();
1671
1672  /* ??? Ideally we'd traverse the blocks in breadth-first order.  */
1673  old_last_basic_block = last_basic_block_for_fn (cfun);
1674  FOR_EACH_BB_FN (bb, cfun)
1675    {
1676      if (bb->index >= old_last_basic_block)
1677	continue;
1678
1679      update_phi_components (bb);
1680      for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1681	expand_complex_operations_1 (&gsi);
1682    }
1683
1684  gsi_commit_edge_inserts ();
1685
1686  delete complex_variable_components;
1687  complex_variable_components = NULL;
1688  complex_ssa_name_components.release ();
1689  complex_lattice_values.release ();
1690  return 0;
1691}
1692
1693namespace {
1694
1695const pass_data pass_data_lower_complex =
1696{
1697  GIMPLE_PASS, /* type */
1698  "cplxlower", /* name */
1699  OPTGROUP_NONE, /* optinfo_flags */
1700  TV_NONE, /* tv_id */
1701  PROP_ssa, /* properties_required */
1702  PROP_gimple_lcx, /* properties_provided */
1703  0, /* properties_destroyed */
1704  0, /* todo_flags_start */
1705  TODO_update_ssa, /* todo_flags_finish */
1706};
1707
1708class pass_lower_complex : public gimple_opt_pass
1709{
1710public:
1711  pass_lower_complex (gcc::context *ctxt)
1712    : gimple_opt_pass (pass_data_lower_complex, ctxt)
1713  {}
1714
1715  /* opt_pass methods: */
1716  opt_pass * clone () { return new pass_lower_complex (m_ctxt); }
1717  virtual unsigned int execute (function *) { return tree_lower_complex (); }
1718
1719}; // class pass_lower_complex
1720
1721} // anon namespace
1722
1723gimple_opt_pass *
1724make_pass_lower_complex (gcc::context *ctxt)
1725{
1726  return new pass_lower_complex (ctxt);
1727}
1728
1729
1730namespace {
1731
1732const pass_data pass_data_lower_complex_O0 =
1733{
1734  GIMPLE_PASS, /* type */
1735  "cplxlower0", /* name */
1736  OPTGROUP_NONE, /* optinfo_flags */
1737  TV_NONE, /* tv_id */
1738  PROP_cfg, /* properties_required */
1739  PROP_gimple_lcx, /* properties_provided */
1740  0, /* properties_destroyed */
1741  0, /* todo_flags_start */
1742  TODO_update_ssa, /* todo_flags_finish */
1743};
1744
1745class pass_lower_complex_O0 : public gimple_opt_pass
1746{
1747public:
1748  pass_lower_complex_O0 (gcc::context *ctxt)
1749    : gimple_opt_pass (pass_data_lower_complex_O0, ctxt)
1750  {}
1751
1752  /* opt_pass methods: */
1753  virtual bool gate (function *fun)
1754    {
1755      /* With errors, normal optimization passes are not run.  If we don't
1756	 lower complex operations at all, rtl expansion will abort.  */
1757      return !(fun->curr_properties & PROP_gimple_lcx);
1758    }
1759
1760  virtual unsigned int execute (function *) { return tree_lower_complex (); }
1761
1762}; // class pass_lower_complex_O0
1763
1764} // anon namespace
1765
1766gimple_opt_pass *
1767make_pass_lower_complex_O0 (gcc::context *ctxt)
1768{
1769  return new pass_lower_complex_O0 (ctxt);
1770}
1771