1/* Expands front end tree to back end RTL for GCC
2   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3   1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4   2010 Free Software Foundation, Inc.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3.  If not see
20<http://www.gnu.org/licenses/>.  */
21
22/* This file handles the generation of rtl code from tree structure
23   above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24   The functions whose names start with `expand_' are called by the
25   expander to generate RTL instructions for various kinds of constructs.  */
26
27#include "config.h"
28#include "system.h"
29#include "coretypes.h"
30#include "tm.h"
31
32#include "rtl.h"
33#include "hard-reg-set.h"
34#include "tree.h"
35#include "tm_p.h"
36#include "flags.h"
37#include "except.h"
38#include "function.h"
39#include "insn-config.h"
40#include "expr.h"
41#include "libfuncs.h"
42#include "recog.h"
43#include "machmode.h"
44#include "toplev.h"
45#include "output.h"
46#include "ggc.h"
47#include "langhooks.h"
48#include "predict.h"
49#include "optabs.h"
50#include "target.h"
51#include "gimple.h"
52#include "regs.h"
53#include "alloc-pool.h"
54#include "pretty-print.h"
55
56/* Functions and data structures for expanding case statements.  */
57
58/* Case label structure, used to hold info on labels within case
59   statements.  We handle "range" labels; for a single-value label
60   as in C, the high and low limits are the same.
61
62   We start with a vector of case nodes sorted in ascending order, and
63   the default label as the last element in the vector.  Before expanding
64   to RTL, we transform this vector into a list linked via the RIGHT
65   fields in the case_node struct.  Nodes with higher case values are
66   later in the list.
67
68   Switch statements can be output in three forms.  A branch table is
69   used if there are more than a few labels and the labels are dense
70   within the range between the smallest and largest case value.  If a
71   branch table is used, no further manipulations are done with the case
72   node chain.
73
74   The alternative to the use of a branch table is to generate a series
75   of compare and jump insns.  When that is done, we use the LEFT, RIGHT,
76   and PARENT fields to hold a binary tree.  Initially the tree is
77   totally unbalanced, with everything on the right.  We balance the tree
78   with nodes on the left having lower case values than the parent
79   and nodes on the right having higher values.  We then output the tree
80   in order.
81
82   For very small, suitable switch statements, we can generate a series
83   of simple bit test and branches instead.  */
84
85struct case_node
86{
87  struct case_node	*left;	/* Left son in binary tree */
88  struct case_node	*right;	/* Right son in binary tree; also node chain */
89  struct case_node	*parent; /* Parent of node in binary tree */
90  tree			low;	/* Lowest index value for this label */
91  tree			high;	/* Highest index value for this label */
92  tree			code_label; /* Label to jump to when node matches */
93};
94
95typedef struct case_node case_node;
96typedef struct case_node *case_node_ptr;
97
98/* These are used by estimate_case_costs and balance_case_nodes.  */
99
100/* This must be a signed type, and non-ANSI compilers lack signed char.  */
101static short cost_table_[129];
102static int use_cost_table;
103static int cost_table_initialized;
104
105/* Special care is needed because we allow -1, but TREE_INT_CST_LOW
106   is unsigned.  */
107#define COST_TABLE(I)  cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
108
109static int n_occurrences (int, const char *);
110static bool tree_conflicts_with_clobbers_p (tree, HARD_REG_SET *);
111static void expand_nl_goto_receiver (void);
112static bool check_operand_nalternatives (tree, tree);
113static bool check_unique_operand_names (tree, tree, tree);
114static char *resolve_operand_name_1 (char *, tree, tree, tree);
115static void expand_null_return_1 (void);
116static void expand_value_return (rtx);
117static int estimate_case_costs (case_node_ptr);
118static bool lshift_cheap_p (void);
119static int case_bit_test_cmp (const void *, const void *);
120static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
121static void balance_case_nodes (case_node_ptr *, case_node_ptr);
122static int node_has_low_bound (case_node_ptr, tree);
123static int node_has_high_bound (case_node_ptr, tree);
124static int node_is_bounded (case_node_ptr, tree);
125static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
126static struct case_node *add_case_node (struct case_node *, tree,
127                                        tree, tree, tree, alloc_pool);
128
129
130/* Return the rtx-label that corresponds to a LABEL_DECL,
131   creating it if necessary.  */
132
133rtx
134label_rtx (tree label)
135{
136  gcc_assert (TREE_CODE (label) == LABEL_DECL);
137
138  if (!DECL_RTL_SET_P (label))
139    {
140      rtx r = gen_label_rtx ();
141      SET_DECL_RTL (label, r);
142      if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
143	LABEL_PRESERVE_P (r) = 1;
144    }
145
146  return DECL_RTL (label);
147}
148
149/* As above, but also put it on the forced-reference list of the
150   function that contains it.  */
151rtx
152force_label_rtx (tree label)
153{
154  rtx ref = label_rtx (label);
155  tree function = decl_function_context (label);
156
157  gcc_assert (function);
158
159  forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref, forced_labels);
160  return ref;
161}
162
163/* Add an unconditional jump to LABEL as the next sequential instruction.  */
164
165void
166emit_jump (rtx label)
167{
168  do_pending_stack_adjust ();
169  emit_jump_insn (gen_jump (label));
170  emit_barrier ();
171}
172
173/* Emit code to jump to the address
174   specified by the pointer expression EXP.  */
175
176void
177expand_computed_goto (tree exp)
178{
179  rtx x = expand_normal (exp);
180
181  x = convert_memory_address (Pmode, x);
182
183  do_pending_stack_adjust ();
184  emit_indirect_jump (x);
185}
186
187/* Handle goto statements and the labels that they can go to.  */
188
189/* Specify the location in the RTL code of a label LABEL,
190   which is a LABEL_DECL tree node.
191
192   This is used for the kind of label that the user can jump to with a
193   goto statement, and for alternatives of a switch or case statement.
194   RTL labels generated for loops and conditionals don't go through here;
195   they are generated directly at the RTL level, by other functions below.
196
197   Note that this has nothing to do with defining label *names*.
198   Languages vary in how they do that and what that even means.  */
199
200void
201expand_label (tree label)
202{
203  rtx label_r = label_rtx (label);
204
205  do_pending_stack_adjust ();
206  emit_label (label_r);
207  if (DECL_NAME (label))
208    LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
209
210  if (DECL_NONLOCAL (label))
211    {
212      expand_nl_goto_receiver ();
213      nonlocal_goto_handler_labels
214	= gen_rtx_EXPR_LIST (VOIDmode, label_r,
215			     nonlocal_goto_handler_labels);
216    }
217
218  if (FORCED_LABEL (label))
219    forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
220
221  if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
222    maybe_set_first_label_num (label_r);
223}
224
225/* Generate RTL code for a `goto' statement with target label LABEL.
226   LABEL should be a LABEL_DECL tree node that was or will later be
227   defined with `expand_label'.  */
228
229void
230expand_goto (tree label)
231{
232#ifdef ENABLE_CHECKING
233  /* Check for a nonlocal goto to a containing function.  Should have
234     gotten translated to __builtin_nonlocal_goto.  */
235  tree context = decl_function_context (label);
236  gcc_assert (!context || context == current_function_decl);
237#endif
238
239  emit_jump (label_rtx (label));
240}
241
242/* Return the number of times character C occurs in string S.  */
243static int
244n_occurrences (int c, const char *s)
245{
246  int n = 0;
247  while (*s)
248    n += (*s++ == c);
249  return n;
250}
251
252/* Generate RTL for an asm statement (explicit assembler code).
253   STRING is a STRING_CST node containing the assembler code text,
254   or an ADDR_EXPR containing a STRING_CST.  VOL nonzero means the
255   insn is volatile; don't optimize it.  */
256
257static void
258expand_asm_loc (tree string, int vol, location_t locus)
259{
260  rtx body;
261
262  if (TREE_CODE (string) == ADDR_EXPR)
263    string = TREE_OPERAND (string, 0);
264
265  body = gen_rtx_ASM_INPUT_loc (VOIDmode,
266				ggc_strdup (TREE_STRING_POINTER (string)),
267				locus);
268
269  MEM_VOLATILE_P (body) = vol;
270
271  emit_insn (body);
272}
273
274/* Parse the output constraint pointed to by *CONSTRAINT_P.  It is the
275   OPERAND_NUMth output operand, indexed from zero.  There are NINPUTS
276   inputs and NOUTPUTS outputs to this extended-asm.  Upon return,
277   *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
278   memory operand.  Similarly, *ALLOWS_REG will be TRUE iff the
279   constraint allows the use of a register operand.  And, *IS_INOUT
280   will be true if the operand is read-write, i.e., if it is used as
281   an input as well as an output.  If *CONSTRAINT_P is not in
282   canonical form, it will be made canonical.  (Note that `+' will be
283   replaced with `=' as part of this process.)
284
285   Returns TRUE if all went well; FALSE if an error occurred.  */
286
287bool
288parse_output_constraint (const char **constraint_p, int operand_num,
289			 int ninputs, int noutputs, bool *allows_mem,
290			 bool *allows_reg, bool *is_inout)
291{
292  const char *constraint = *constraint_p;
293  const char *p;
294
295  /* Assume the constraint doesn't allow the use of either a register
296     or memory.  */
297  *allows_mem = false;
298  *allows_reg = false;
299
300  /* Allow the `=' or `+' to not be at the beginning of the string,
301     since it wasn't explicitly documented that way, and there is a
302     large body of code that puts it last.  Swap the character to
303     the front, so as not to uglify any place else.  */
304  p = strchr (constraint, '=');
305  if (!p)
306    p = strchr (constraint, '+');
307
308  /* If the string doesn't contain an `=', issue an error
309     message.  */
310  if (!p)
311    {
312      error ("output operand constraint lacks %<=%>");
313      return false;
314    }
315
316  /* If the constraint begins with `+', then the operand is both read
317     from and written to.  */
318  *is_inout = (*p == '+');
319
320  /* Canonicalize the output constraint so that it begins with `='.  */
321  if (p != constraint || *is_inout)
322    {
323      char *buf;
324      size_t c_len = strlen (constraint);
325
326      if (p != constraint)
327	warning (0, "output constraint %qc for operand %d "
328		 "is not at the beginning",
329		 *p, operand_num);
330
331      /* Make a copy of the constraint.  */
332      buf = XALLOCAVEC (char, c_len + 1);
333      strcpy (buf, constraint);
334      /* Swap the first character and the `=' or `+'.  */
335      buf[p - constraint] = buf[0];
336      /* Make sure the first character is an `='.  (Until we do this,
337	 it might be a `+'.)  */
338      buf[0] = '=';
339      /* Replace the constraint with the canonicalized string.  */
340      *constraint_p = ggc_alloc_string (buf, c_len);
341      constraint = *constraint_p;
342    }
343
344  /* Loop through the constraint string.  */
345  for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
346    switch (*p)
347      {
348      case '+':
349      case '=':
350	error ("operand constraint contains incorrectly positioned "
351	       "%<+%> or %<=%>");
352	return false;
353
354      case '%':
355	if (operand_num + 1 == ninputs + noutputs)
356	  {
357	    error ("%<%%%> constraint used with last operand");
358	    return false;
359	  }
360	break;
361
362      case 'V':  case TARGET_MEM_CONSTRAINT:  case 'o':
363	*allows_mem = true;
364	break;
365
366      case '?':  case '!':  case '*':  case '&':  case '#':
367      case 'E':  case 'F':  case 'G':  case 'H':
368      case 's':  case 'i':  case 'n':
369      case 'I':  case 'J':  case 'K':  case 'L':  case 'M':
370      case 'N':  case 'O':  case 'P':  case ',':
371	break;
372
373      case '0':  case '1':  case '2':  case '3':  case '4':
374      case '5':  case '6':  case '7':  case '8':  case '9':
375      case '[':
376	error ("matching constraint not valid in output operand");
377	return false;
378
379      case '<':  case '>':
380	/* ??? Before flow, auto inc/dec insns are not supposed to exist,
381	   excepting those that expand_call created.  So match memory
382	   and hope.  */
383	*allows_mem = true;
384	break;
385
386      case 'g':  case 'X':
387	*allows_reg = true;
388	*allows_mem = true;
389	break;
390
391      case 'p': case 'r':
392	*allows_reg = true;
393	break;
394
395      default:
396	if (!ISALPHA (*p))
397	  break;
398	if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
399	  *allows_reg = true;
400#ifdef EXTRA_CONSTRAINT_STR
401	else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
402	  *allows_reg = true;
403	else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
404	  *allows_mem = true;
405	else
406	  {
407	    /* Otherwise we can't assume anything about the nature of
408	       the constraint except that it isn't purely registers.
409	       Treat it like "g" and hope for the best.  */
410	    *allows_reg = true;
411	    *allows_mem = true;
412	  }
413#endif
414	break;
415      }
416
417  return true;
418}
419
420/* Similar, but for input constraints.  */
421
422bool
423parse_input_constraint (const char **constraint_p, int input_num,
424			int ninputs, int noutputs, int ninout,
425			const char * const * constraints,
426			bool *allows_mem, bool *allows_reg)
427{
428  const char *constraint = *constraint_p;
429  const char *orig_constraint = constraint;
430  size_t c_len = strlen (constraint);
431  size_t j;
432  bool saw_match = false;
433
434  /* Assume the constraint doesn't allow the use of either
435     a register or memory.  */
436  *allows_mem = false;
437  *allows_reg = false;
438
439  /* Make sure constraint has neither `=', `+', nor '&'.  */
440
441  for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
442    switch (constraint[j])
443      {
444      case '+':  case '=':  case '&':
445	if (constraint == orig_constraint)
446	  {
447	    error ("input operand constraint contains %qc", constraint[j]);
448	    return false;
449	  }
450	break;
451
452      case '%':
453	if (constraint == orig_constraint
454	    && input_num + 1 == ninputs - ninout)
455	  {
456	    error ("%<%%%> constraint used with last operand");
457	    return false;
458	  }
459	break;
460
461      case 'V':  case TARGET_MEM_CONSTRAINT:  case 'o':
462	*allows_mem = true;
463	break;
464
465      case '<':  case '>':
466      case '?':  case '!':  case '*':  case '#':
467      case 'E':  case 'F':  case 'G':  case 'H':
468      case 's':  case 'i':  case 'n':
469      case 'I':  case 'J':  case 'K':  case 'L':  case 'M':
470      case 'N':  case 'O':  case 'P':  case ',':
471	break;
472
473	/* Whether or not a numeric constraint allows a register is
474	   decided by the matching constraint, and so there is no need
475	   to do anything special with them.  We must handle them in
476	   the default case, so that we don't unnecessarily force
477	   operands to memory.  */
478      case '0':  case '1':  case '2':  case '3':  case '4':
479      case '5':  case '6':  case '7':  case '8':  case '9':
480	{
481	  char *end;
482	  unsigned long match;
483
484	  saw_match = true;
485
486	  match = strtoul (constraint + j, &end, 10);
487	  if (match >= (unsigned long) noutputs)
488	    {
489	      error ("matching constraint references invalid operand number");
490	      return false;
491	    }
492
493	  /* Try and find the real constraint for this dup.  Only do this
494	     if the matching constraint is the only alternative.  */
495	  if (*end == '\0'
496	      && (j == 0 || (j == 1 && constraint[0] == '%')))
497	    {
498	      constraint = constraints[match];
499	      *constraint_p = constraint;
500	      c_len = strlen (constraint);
501	      j = 0;
502	      /* ??? At the end of the loop, we will skip the first part of
503		 the matched constraint.  This assumes not only that the
504		 other constraint is an output constraint, but also that
505		 the '=' or '+' come first.  */
506	      break;
507	    }
508	  else
509	    j = end - constraint;
510	  /* Anticipate increment at end of loop.  */
511	  j--;
512	}
513	/* Fall through.  */
514
515      case 'p':  case 'r':
516	*allows_reg = true;
517	break;
518
519      case 'g':  case 'X':
520	*allows_reg = true;
521	*allows_mem = true;
522	break;
523
524      default:
525	if (! ISALPHA (constraint[j]))
526	  {
527	    error ("invalid punctuation %qc in constraint", constraint[j]);
528	    return false;
529	  }
530	if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
531	    != NO_REGS)
532	  *allows_reg = true;
533#ifdef EXTRA_CONSTRAINT_STR
534	else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
535	  *allows_reg = true;
536	else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
537	  *allows_mem = true;
538	else
539	  {
540	    /* Otherwise we can't assume anything about the nature of
541	       the constraint except that it isn't purely registers.
542	       Treat it like "g" and hope for the best.  */
543	    *allows_reg = true;
544	    *allows_mem = true;
545	  }
546#endif
547	break;
548      }
549
550  if (saw_match && !*allows_reg)
551    warning (0, "matching constraint does not allow a register");
552
553  return true;
554}
555
556/* Return DECL iff there's an overlap between *REGS and DECL, where DECL
557   can be an asm-declared register.  Called via walk_tree.  */
558
559static tree
560decl_overlaps_hard_reg_set_p (tree *declp, int *walk_subtrees ATTRIBUTE_UNUSED,
561			      void *data)
562{
563  tree decl = *declp;
564  const HARD_REG_SET *const regs = (const HARD_REG_SET *) data;
565
566  if (TREE_CODE (decl) == VAR_DECL)
567    {
568      if (DECL_HARD_REGISTER (decl)
569	  && REG_P (DECL_RTL (decl))
570	  && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
571	{
572	  rtx reg = DECL_RTL (decl);
573
574	  if (overlaps_hard_reg_set_p (*regs, GET_MODE (reg), REGNO (reg)))
575	    return decl;
576	}
577      walk_subtrees = 0;
578    }
579  else if (TYPE_P (decl) || TREE_CODE (decl) == PARM_DECL)
580    walk_subtrees = 0;
581  return NULL_TREE;
582}
583
584/* If there is an overlap between *REGS and DECL, return the first overlap
585   found.  */
586tree
587tree_overlaps_hard_reg_set (tree decl, HARD_REG_SET *regs)
588{
589  return walk_tree (&decl, decl_overlaps_hard_reg_set_p, regs, NULL);
590}
591
592/* Check for overlap between registers marked in CLOBBERED_REGS and
593   anything inappropriate in T.  Emit error and return the register
594   variable definition for error, NULL_TREE for ok.  */
595
596static bool
597tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
598{
599  /* Conflicts between asm-declared register variables and the clobber
600     list are not allowed.  */
601  tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
602
603  if (overlap)
604    {
605      error ("asm-specifier for variable %qE conflicts with asm clobber list",
606	     DECL_NAME (overlap));
607
608      /* Reset registerness to stop multiple errors emitted for a single
609	 variable.  */
610      DECL_REGISTER (overlap) = 0;
611      return true;
612    }
613
614  return false;
615}
616
617/* Generate RTL for an asm statement with arguments.
618   STRING is the instruction template.
619   OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
620   Each output or input has an expression in the TREE_VALUE and
621   a tree list in TREE_PURPOSE which in turn contains a constraint
622   name in TREE_VALUE (or NULL_TREE) and a constraint string
623   in TREE_PURPOSE.
624   CLOBBERS is a list of STRING_CST nodes each naming a hard register
625   that is clobbered by this insn.
626
627   Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
628   Some elements of OUTPUTS may be replaced with trees representing temporary
629   values.  The caller should copy those temporary values to the originally
630   specified lvalues.
631
632   VOL nonzero means the insn is volatile; don't optimize it.  */
633
634static void
635expand_asm_operands (tree string, tree outputs, tree inputs,
636		     tree clobbers, tree labels, int vol, location_t locus)
637{
638  rtvec argvec, constraintvec, labelvec;
639  rtx body;
640  int ninputs = list_length (inputs);
641  int noutputs = list_length (outputs);
642  int nlabels = list_length (labels);
643  int ninout;
644  int nclobbers;
645  HARD_REG_SET clobbered_regs;
646  int clobber_conflict_found = 0;
647  tree tail;
648  tree t;
649  int i;
650  /* Vector of RTX's of evaluated output operands.  */
651  rtx *output_rtx = XALLOCAVEC (rtx, noutputs);
652  int *inout_opnum = XALLOCAVEC (int, noutputs);
653  rtx *real_output_rtx = XALLOCAVEC (rtx, noutputs);
654  enum machine_mode *inout_mode = XALLOCAVEC (enum machine_mode, noutputs);
655  const char **constraints = XALLOCAVEC (const char *, noutputs + ninputs);
656  int old_generating_concat_p = generating_concat_p;
657
658  /* An ASM with no outputs needs to be treated as volatile, for now.  */
659  if (noutputs == 0)
660    vol = 1;
661
662  if (! check_operand_nalternatives (outputs, inputs))
663    return;
664
665  string = resolve_asm_operand_names (string, outputs, inputs, labels);
666
667  /* Collect constraints.  */
668  i = 0;
669  for (t = outputs; t ; t = TREE_CHAIN (t), i++)
670    constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
671  for (t = inputs; t ; t = TREE_CHAIN (t), i++)
672    constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
673
674  /* Sometimes we wish to automatically clobber registers across an asm.
675     Case in point is when the i386 backend moved from cc0 to a hard reg --
676     maintaining source-level compatibility means automatically clobbering
677     the flags register.  */
678  clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
679
680  /* Count the number of meaningful clobbered registers, ignoring what
681     we would ignore later.  */
682  nclobbers = 0;
683  CLEAR_HARD_REG_SET (clobbered_regs);
684  for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
685    {
686      const char *regname;
687
688      if (TREE_VALUE (tail) == error_mark_node)
689	return;
690      regname = TREE_STRING_POINTER (TREE_VALUE (tail));
691
692      i = decode_reg_name (regname);
693      if (i >= 0 || i == -4)
694	++nclobbers;
695      else if (i == -2)
696	error ("unknown register name %qs in %<asm%>", regname);
697
698      /* Mark clobbered registers.  */
699      if (i >= 0)
700        {
701	  /* Clobbering the PIC register is an error.  */
702	  if (i == (int) PIC_OFFSET_TABLE_REGNUM)
703	    {
704	      error ("PIC register %qs clobbered in %<asm%>", regname);
705	      return;
706	    }
707
708	  SET_HARD_REG_BIT (clobbered_regs, i);
709	}
710    }
711
712  /* First pass over inputs and outputs checks validity and sets
713     mark_addressable if needed.  */
714
715  ninout = 0;
716  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
717    {
718      tree val = TREE_VALUE (tail);
719      tree type = TREE_TYPE (val);
720      const char *constraint;
721      bool is_inout;
722      bool allows_reg;
723      bool allows_mem;
724
725      /* If there's an erroneous arg, emit no insn.  */
726      if (type == error_mark_node)
727	return;
728
729      /* Try to parse the output constraint.  If that fails, there's
730	 no point in going further.  */
731      constraint = constraints[i];
732      if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
733				    &allows_mem, &allows_reg, &is_inout))
734	return;
735
736      if (! allows_reg
737	  && (allows_mem
738	      || is_inout
739	      || (DECL_P (val)
740		  && REG_P (DECL_RTL (val))
741		  && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
742	mark_addressable (val);
743
744      if (is_inout)
745	ninout++;
746    }
747
748  ninputs += ninout;
749  if (ninputs + noutputs > MAX_RECOG_OPERANDS)
750    {
751      error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
752      return;
753    }
754
755  for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
756    {
757      bool allows_reg, allows_mem;
758      const char *constraint;
759
760      /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
761	 would get VOIDmode and that could cause a crash in reload.  */
762      if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
763	return;
764
765      constraint = constraints[i + noutputs];
766      if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
767				    constraints, &allows_mem, &allows_reg))
768	return;
769
770      if (! allows_reg && allows_mem)
771	mark_addressable (TREE_VALUE (tail));
772    }
773
774  /* Second pass evaluates arguments.  */
775
776  /* Make sure stack is consistent for asm goto.  */
777  if (nlabels > 0)
778    do_pending_stack_adjust ();
779
780  ninout = 0;
781  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
782    {
783      tree val = TREE_VALUE (tail);
784      tree type = TREE_TYPE (val);
785      bool is_inout;
786      bool allows_reg;
787      bool allows_mem;
788      rtx op;
789      bool ok;
790
791      ok = parse_output_constraint (&constraints[i], i, ninputs,
792				    noutputs, &allows_mem, &allows_reg,
793				    &is_inout);
794      gcc_assert (ok);
795
796      /* If an output operand is not a decl or indirect ref and our constraint
797	 allows a register, make a temporary to act as an intermediate.
798	 Make the asm insn write into that, then our caller will copy it to
799	 the real output operand.  Likewise for promoted variables.  */
800
801      generating_concat_p = 0;
802
803      real_output_rtx[i] = NULL_RTX;
804      if ((TREE_CODE (val) == INDIRECT_REF
805	   && allows_mem)
806	  || (DECL_P (val)
807	      && (allows_mem || REG_P (DECL_RTL (val)))
808	      && ! (REG_P (DECL_RTL (val))
809		    && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
810	  || ! allows_reg
811	  || is_inout)
812	{
813	  op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
814	  if (MEM_P (op))
815	    op = validize_mem (op);
816
817	  if (! allows_reg && !MEM_P (op))
818	    error ("output number %d not directly addressable", i);
819	  if ((! allows_mem && MEM_P (op))
820	      || GET_CODE (op) == CONCAT)
821	    {
822	      real_output_rtx[i] = op;
823	      op = gen_reg_rtx (GET_MODE (op));
824	      if (is_inout)
825		emit_move_insn (op, real_output_rtx[i]);
826	    }
827	}
828      else
829	{
830	  op = assign_temp (type, 0, 0, 1);
831	  op = validize_mem (op);
832	  if (!MEM_P (op) && TREE_CODE (TREE_VALUE (tail)) == SSA_NAME)
833	    set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (TREE_VALUE (tail)), op);
834	  TREE_VALUE (tail) = make_tree (type, op);
835	}
836      output_rtx[i] = op;
837
838      generating_concat_p = old_generating_concat_p;
839
840      if (is_inout)
841	{
842	  inout_mode[ninout] = TYPE_MODE (type);
843	  inout_opnum[ninout++] = i;
844	}
845
846      if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
847	clobber_conflict_found = 1;
848    }
849
850  /* Make vectors for the expression-rtx, constraint strings,
851     and named operands.  */
852
853  argvec = rtvec_alloc (ninputs);
854  constraintvec = rtvec_alloc (ninputs);
855  labelvec = rtvec_alloc (nlabels);
856
857  body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
858				: GET_MODE (output_rtx[0])),
859			       ggc_strdup (TREE_STRING_POINTER (string)),
860			       empty_string, 0, argvec, constraintvec,
861			       labelvec, locus);
862
863  MEM_VOLATILE_P (body) = vol;
864
865  /* Eval the inputs and put them into ARGVEC.
866     Put their constraints into ASM_INPUTs and store in CONSTRAINTS.  */
867
868  for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
869    {
870      bool allows_reg, allows_mem;
871      const char *constraint;
872      tree val, type;
873      rtx op;
874      bool ok;
875
876      constraint = constraints[i + noutputs];
877      ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
878				   constraints, &allows_mem, &allows_reg);
879      gcc_assert (ok);
880
881      generating_concat_p = 0;
882
883      val = TREE_VALUE (tail);
884      type = TREE_TYPE (val);
885      /* EXPAND_INITIALIZER will not generate code for valid initializer
886	 constants, but will still generate code for other types of operand.
887	 This is the behavior we want for constant constraints.  */
888      op = expand_expr (val, NULL_RTX, VOIDmode,
889			allows_reg ? EXPAND_NORMAL
890			: allows_mem ? EXPAND_MEMORY
891			: EXPAND_INITIALIZER);
892
893      /* Never pass a CONCAT to an ASM.  */
894      if (GET_CODE (op) == CONCAT)
895	op = force_reg (GET_MODE (op), op);
896      else if (MEM_P (op))
897	op = validize_mem (op);
898
899      if (asm_operand_ok (op, constraint, NULL) <= 0)
900	{
901	  if (allows_reg && TYPE_MODE (type) != BLKmode)
902	    op = force_reg (TYPE_MODE (type), op);
903	  else if (!allows_mem)
904	    warning (0, "asm operand %d probably doesn%'t match constraints",
905		     i + noutputs);
906	  else if (MEM_P (op))
907	    {
908	      /* We won't recognize either volatile memory or memory
909		 with a queued address as available a memory_operand
910		 at this point.  Ignore it: clearly this *is* a memory.  */
911	    }
912	  else
913	    {
914	      warning (0, "use of memory input without lvalue in "
915		       "asm operand %d is deprecated", i + noutputs);
916
917	      if (CONSTANT_P (op))
918		{
919		  rtx mem = force_const_mem (TYPE_MODE (type), op);
920		  if (mem)
921		    op = validize_mem (mem);
922		  else
923		    op = force_reg (TYPE_MODE (type), op);
924		}
925	      if (REG_P (op)
926		  || GET_CODE (op) == SUBREG
927		  || GET_CODE (op) == CONCAT)
928		{
929		  tree qual_type = build_qualified_type (type,
930							 (TYPE_QUALS (type)
931							  | TYPE_QUAL_CONST));
932		  rtx memloc = assign_temp (qual_type, 1, 1, 1);
933		  memloc = validize_mem (memloc);
934		  emit_move_insn (memloc, op);
935		  op = memloc;
936		}
937	    }
938	}
939
940      generating_concat_p = old_generating_concat_p;
941      ASM_OPERANDS_INPUT (body, i) = op;
942
943      ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
944	= gen_rtx_ASM_INPUT (TYPE_MODE (type),
945			     ggc_strdup (constraints[i + noutputs]));
946
947      if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
948	clobber_conflict_found = 1;
949    }
950
951  /* Protect all the operands from the queue now that they have all been
952     evaluated.  */
953
954  generating_concat_p = 0;
955
956  /* For in-out operands, copy output rtx to input rtx.  */
957  for (i = 0; i < ninout; i++)
958    {
959      int j = inout_opnum[i];
960      char buffer[16];
961
962      ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
963	= output_rtx[j];
964
965      sprintf (buffer, "%d", j);
966      ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
967	= gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
968    }
969
970  /* Copy labels to the vector.  */
971  for (i = 0, tail = labels; i < nlabels; ++i, tail = TREE_CHAIN (tail))
972    ASM_OPERANDS_LABEL (body, i)
973      = gen_rtx_LABEL_REF (Pmode, label_rtx (TREE_VALUE (tail)));
974
975  generating_concat_p = old_generating_concat_p;
976
977  /* Now, for each output, construct an rtx
978     (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
979			       ARGVEC CONSTRAINTS OPNAMES))
980     If there is more than one, put them inside a PARALLEL.  */
981
982  if (nlabels > 0 && nclobbers == 0)
983    {
984      gcc_assert (noutputs == 0);
985      emit_jump_insn (body);
986    }
987  else if (noutputs == 0 && nclobbers == 0)
988    {
989      /* No output operands: put in a raw ASM_OPERANDS rtx.  */
990      emit_insn (body);
991    }
992  else if (noutputs == 1 && nclobbers == 0)
993    {
994      ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
995      emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
996    }
997  else
998    {
999      rtx obody = body;
1000      int num = noutputs;
1001
1002      if (num == 0)
1003	num = 1;
1004
1005      body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1006
1007      /* For each output operand, store a SET.  */
1008      for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1009	{
1010	  XVECEXP (body, 0, i)
1011	    = gen_rtx_SET (VOIDmode,
1012			   output_rtx[i],
1013			   gen_rtx_ASM_OPERANDS
1014			   (GET_MODE (output_rtx[i]),
1015			    ggc_strdup (TREE_STRING_POINTER (string)),
1016			    ggc_strdup (constraints[i]),
1017			    i, argvec, constraintvec, labelvec, locus));
1018
1019	  MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1020	}
1021
1022      /* If there are no outputs (but there are some clobbers)
1023	 store the bare ASM_OPERANDS into the PARALLEL.  */
1024
1025      if (i == 0)
1026	XVECEXP (body, 0, i++) = obody;
1027
1028      /* Store (clobber REG) for each clobbered register specified.  */
1029
1030      for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1031	{
1032	  const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1033	  int j = decode_reg_name (regname);
1034	  rtx clobbered_reg;
1035
1036	  if (j < 0)
1037	    {
1038	      if (j == -3)	/* `cc', which is not a register */
1039		continue;
1040
1041	      if (j == -4)	/* `memory', don't cache memory across asm */
1042		{
1043		  XVECEXP (body, 0, i++)
1044		    = gen_rtx_CLOBBER (VOIDmode,
1045				       gen_rtx_MEM
1046				       (BLKmode,
1047					gen_rtx_SCRATCH (VOIDmode)));
1048		  continue;
1049		}
1050
1051	      /* Ignore unknown register, error already signaled.  */
1052	      continue;
1053	    }
1054
1055	  /* Use QImode since that's guaranteed to clobber just one reg.  */
1056	  clobbered_reg = gen_rtx_REG (QImode, j);
1057
1058	  /* Do sanity check for overlap between clobbers and respectively
1059	     input and outputs that hasn't been handled.  Such overlap
1060	     should have been detected and reported above.  */
1061	  if (!clobber_conflict_found)
1062	    {
1063	      int opno;
1064
1065	      /* We test the old body (obody) contents to avoid tripping
1066		 over the under-construction body.  */
1067	      for (opno = 0; opno < noutputs; opno++)
1068		if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1069		  internal_error ("asm clobber conflict with output operand");
1070
1071	      for (opno = 0; opno < ninputs - ninout; opno++)
1072		if (reg_overlap_mentioned_p (clobbered_reg,
1073					     ASM_OPERANDS_INPUT (obody, opno)))
1074		  internal_error ("asm clobber conflict with input operand");
1075	    }
1076
1077	  XVECEXP (body, 0, i++)
1078	    = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1079	}
1080
1081      if (nlabels > 0)
1082	emit_jump_insn (body);
1083      else
1084	emit_insn (body);
1085    }
1086
1087  /* For any outputs that needed reloading into registers, spill them
1088     back to where they belong.  */
1089  for (i = 0; i < noutputs; ++i)
1090    if (real_output_rtx[i])
1091      emit_move_insn (real_output_rtx[i], output_rtx[i]);
1092
1093  crtl->has_asm_statement = 1;
1094  free_temp_slots ();
1095}
1096
1097void
1098expand_asm_stmt (gimple stmt)
1099{
1100  int noutputs;
1101  tree outputs, tail, t;
1102  tree *o;
1103  size_t i, n;
1104  const char *s;
1105  tree str, out, in, cl, labels;
1106  location_t locus = gimple_location (stmt);
1107
1108  /* Meh... convert the gimple asm operands into real tree lists.
1109     Eventually we should make all routines work on the vectors instead
1110     of relying on TREE_CHAIN.  */
1111  out = NULL_TREE;
1112  n = gimple_asm_noutputs (stmt);
1113  if (n > 0)
1114    {
1115      t = out = gimple_asm_output_op (stmt, 0);
1116      for (i = 1; i < n; i++)
1117	t = TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
1118    }
1119
1120  in = NULL_TREE;
1121  n = gimple_asm_ninputs (stmt);
1122  if (n > 0)
1123    {
1124      t = in = gimple_asm_input_op (stmt, 0);
1125      for (i = 1; i < n; i++)
1126	t = TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
1127    }
1128
1129  cl = NULL_TREE;
1130  n = gimple_asm_nclobbers (stmt);
1131  if (n > 0)
1132    {
1133      t = cl = gimple_asm_clobber_op (stmt, 0);
1134      for (i = 1; i < n; i++)
1135	t = TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
1136    }
1137
1138  labels = NULL_TREE;
1139  n = gimple_asm_nlabels (stmt);
1140  if (n > 0)
1141    {
1142      t = labels = gimple_asm_label_op (stmt, 0);
1143      for (i = 1; i < n; i++)
1144	t = TREE_CHAIN (t) = gimple_asm_label_op (stmt, i);
1145    }
1146
1147  s = gimple_asm_string (stmt);
1148  str = build_string (strlen (s), s);
1149
1150  if (gimple_asm_input_p (stmt))
1151    {
1152      expand_asm_loc (str, gimple_asm_volatile_p (stmt), locus);
1153      return;
1154    }
1155
1156  outputs = out;
1157  noutputs = gimple_asm_noutputs (stmt);
1158  /* o[I] is the place that output number I should be written.  */
1159  o = (tree *) alloca (noutputs * sizeof (tree));
1160
1161  /* Record the contents of OUTPUTS before it is modified.  */
1162  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1163    o[i] = TREE_VALUE (tail);
1164
1165  /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1166     OUTPUTS some trees for where the values were actually stored.  */
1167  expand_asm_operands (str, outputs, in, cl, labels,
1168		       gimple_asm_volatile_p (stmt), locus);
1169
1170  /* Copy all the intermediate outputs into the specified outputs.  */
1171  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1172    {
1173      if (o[i] != TREE_VALUE (tail))
1174	{
1175	  expand_assignment (o[i], TREE_VALUE (tail), false);
1176	  free_temp_slots ();
1177
1178	  /* Restore the original value so that it's correct the next
1179	     time we expand this function.  */
1180	  TREE_VALUE (tail) = o[i];
1181	}
1182    }
1183}
1184
1185/* A subroutine of expand_asm_operands.  Check that all operands have
1186   the same number of alternatives.  Return true if so.  */
1187
1188static bool
1189check_operand_nalternatives (tree outputs, tree inputs)
1190{
1191  if (outputs || inputs)
1192    {
1193      tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1194      int nalternatives
1195	= n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1196      tree next = inputs;
1197
1198      if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1199	{
1200	  error ("too many alternatives in %<asm%>");
1201	  return false;
1202	}
1203
1204      tmp = outputs;
1205      while (tmp)
1206	{
1207	  const char *constraint
1208	    = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1209
1210	  if (n_occurrences (',', constraint) != nalternatives)
1211	    {
1212	      error ("operand constraints for %<asm%> differ "
1213		     "in number of alternatives");
1214	      return false;
1215	    }
1216
1217	  if (TREE_CHAIN (tmp))
1218	    tmp = TREE_CHAIN (tmp);
1219	  else
1220	    tmp = next, next = 0;
1221	}
1222    }
1223
1224  return true;
1225}
1226
1227/* A subroutine of expand_asm_operands.  Check that all operand names
1228   are unique.  Return true if so.  We rely on the fact that these names
1229   are identifiers, and so have been canonicalized by get_identifier,
1230   so all we need are pointer comparisons.  */
1231
1232static bool
1233check_unique_operand_names (tree outputs, tree inputs, tree labels)
1234{
1235  tree i, j;
1236
1237  for (i = outputs; i ; i = TREE_CHAIN (i))
1238    {
1239      tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1240      if (! i_name)
1241	continue;
1242
1243      for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1244	if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1245	  goto failure;
1246    }
1247
1248  for (i = inputs; i ; i = TREE_CHAIN (i))
1249    {
1250      tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1251      if (! i_name)
1252	continue;
1253
1254      for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1255	if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1256	  goto failure;
1257      for (j = outputs; j ; j = TREE_CHAIN (j))
1258	if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1259	  goto failure;
1260    }
1261
1262  for (i = labels; i ; i = TREE_CHAIN (i))
1263    {
1264      tree i_name = TREE_PURPOSE (i);
1265      if (! i_name)
1266	continue;
1267
1268      for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1269	if (simple_cst_equal (i_name, TREE_PURPOSE (j)))
1270	  goto failure;
1271      for (j = inputs; j ; j = TREE_CHAIN (j))
1272	if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1273	  goto failure;
1274    }
1275
1276  return true;
1277
1278 failure:
1279  error ("duplicate asm operand name %qs",
1280	 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1281  return false;
1282}
1283
1284/* A subroutine of expand_asm_operands.  Resolve the names of the operands
1285   in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1286   STRING and in the constraints to those numbers.  */
1287
1288tree
1289resolve_asm_operand_names (tree string, tree outputs, tree inputs, tree labels)
1290{
1291  char *buffer;
1292  char *p;
1293  const char *c;
1294  tree t;
1295
1296  check_unique_operand_names (outputs, inputs, labels);
1297
1298  /* Substitute [<name>] in input constraint strings.  There should be no
1299     named operands in output constraints.  */
1300  for (t = inputs; t ; t = TREE_CHAIN (t))
1301    {
1302      c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1303      if (strchr (c, '[') != NULL)
1304	{
1305	  p = buffer = xstrdup (c);
1306	  while ((p = strchr (p, '[')) != NULL)
1307	    p = resolve_operand_name_1 (p, outputs, inputs, NULL);
1308	  TREE_VALUE (TREE_PURPOSE (t))
1309	    = build_string (strlen (buffer), buffer);
1310	  free (buffer);
1311	}
1312    }
1313
1314  /* Now check for any needed substitutions in the template.  */
1315  c = TREE_STRING_POINTER (string);
1316  while ((c = strchr (c, '%')) != NULL)
1317    {
1318      if (c[1] == '[')
1319	break;
1320      else if (ISALPHA (c[1]) && c[2] == '[')
1321	break;
1322      else
1323	{
1324	  c += 1;
1325	  continue;
1326	}
1327    }
1328
1329  if (c)
1330    {
1331      /* OK, we need to make a copy so we can perform the substitutions.
1332	 Assume that we will not need extra space--we get to remove '['
1333	 and ']', which means we cannot have a problem until we have more
1334	 than 999 operands.  */
1335      buffer = xstrdup (TREE_STRING_POINTER (string));
1336      p = buffer + (c - TREE_STRING_POINTER (string));
1337
1338      while ((p = strchr (p, '%')) != NULL)
1339	{
1340	  if (p[1] == '[')
1341	    p += 1;
1342	  else if (ISALPHA (p[1]) && p[2] == '[')
1343	    p += 2;
1344	  else
1345	    {
1346	      p += 1;
1347	      continue;
1348	    }
1349
1350	  p = resolve_operand_name_1 (p, outputs, inputs, labels);
1351	}
1352
1353      string = build_string (strlen (buffer), buffer);
1354      free (buffer);
1355    }
1356
1357  return string;
1358}
1359
1360/* A subroutine of resolve_operand_names.  P points to the '[' for a
1361   potential named operand of the form [<name>].  In place, replace
1362   the name and brackets with a number.  Return a pointer to the
1363   balance of the string after substitution.  */
1364
1365static char *
1366resolve_operand_name_1 (char *p, tree outputs, tree inputs, tree labels)
1367{
1368  char *q;
1369  int op;
1370  tree t;
1371
1372  /* Collect the operand name.  */
1373  q = strchr (++p, ']');
1374  if (!q)
1375    {
1376      error ("missing close brace for named operand");
1377      return strchr (p, '\0');
1378    }
1379  *q = '\0';
1380
1381  /* Resolve the name to a number.  */
1382  for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1383    {
1384      tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1385      if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1386	goto found;
1387    }
1388  for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1389    {
1390      tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1391      if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1392	goto found;
1393    }
1394  for (t = labels; t ; t = TREE_CHAIN (t), op++)
1395    {
1396      tree name = TREE_PURPOSE (t);
1397      if (name && strcmp (TREE_STRING_POINTER (name), p) == 0)
1398	goto found;
1399    }
1400
1401  error ("undefined named operand %qs", identifier_to_locale (p));
1402  op = 0;
1403
1404 found:
1405  /* Replace the name with the number.  Unfortunately, not all libraries
1406     get the return value of sprintf correct, so search for the end of the
1407     generated string by hand.  */
1408  sprintf (--p, "%d", op);
1409  p = strchr (p, '\0');
1410
1411  /* Verify the no extra buffer space assumption.  */
1412  gcc_assert (p <= q);
1413
1414  /* Shift the rest of the buffer down to fill the gap.  */
1415  memmove (p, q + 1, strlen (q + 1) + 1);
1416
1417  return p;
1418}
1419
1420/* Generate RTL to evaluate the expression EXP.  */
1421
1422void
1423expand_expr_stmt (tree exp)
1424{
1425  rtx value;
1426  tree type;
1427
1428  value = expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
1429  type = TREE_TYPE (exp);
1430
1431  /* If all we do is reference a volatile value in memory,
1432     copy it to a register to be sure it is actually touched.  */
1433  if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
1434    {
1435      if (TYPE_MODE (type) == VOIDmode)
1436	;
1437      else if (TYPE_MODE (type) != BLKmode)
1438	value = copy_to_reg (value);
1439      else
1440	{
1441	  rtx lab = gen_label_rtx ();
1442
1443	  /* Compare the value with itself to reference it.  */
1444	  emit_cmp_and_jump_insns (value, value, EQ,
1445				   expand_normal (TYPE_SIZE (type)),
1446				   BLKmode, 0, lab);
1447	  emit_label (lab);
1448	}
1449    }
1450
1451  /* Free any temporaries used to evaluate this expression.  */
1452  free_temp_slots ();
1453}
1454
1455/* Warn if EXP contains any computations whose results are not used.
1456   Return 1 if a warning is printed; 0 otherwise.  LOCUS is the
1457   (potential) location of the expression.  */
1458
1459int
1460warn_if_unused_value (const_tree exp, location_t locus)
1461{
1462 restart:
1463  if (TREE_USED (exp) || TREE_NO_WARNING (exp))
1464    return 0;
1465
1466  /* Don't warn about void constructs.  This includes casting to void,
1467     void function calls, and statement expressions with a final cast
1468     to void.  */
1469  if (VOID_TYPE_P (TREE_TYPE (exp)))
1470    return 0;
1471
1472  if (EXPR_HAS_LOCATION (exp))
1473    locus = EXPR_LOCATION (exp);
1474
1475  switch (TREE_CODE (exp))
1476    {
1477    case PREINCREMENT_EXPR:
1478    case POSTINCREMENT_EXPR:
1479    case PREDECREMENT_EXPR:
1480    case POSTDECREMENT_EXPR:
1481    case MODIFY_EXPR:
1482    case INIT_EXPR:
1483    case TARGET_EXPR:
1484    case CALL_EXPR:
1485    case TRY_CATCH_EXPR:
1486    case WITH_CLEANUP_EXPR:
1487    case EXIT_EXPR:
1488    case VA_ARG_EXPR:
1489      return 0;
1490
1491    case BIND_EXPR:
1492      /* For a binding, warn if no side effect within it.  */
1493      exp = BIND_EXPR_BODY (exp);
1494      goto restart;
1495
1496    case SAVE_EXPR:
1497    case NON_LVALUE_EXPR:
1498      exp = TREE_OPERAND (exp, 0);
1499      goto restart;
1500
1501    case TRUTH_ORIF_EXPR:
1502    case TRUTH_ANDIF_EXPR:
1503      /* In && or ||, warn if 2nd operand has no side effect.  */
1504      exp = TREE_OPERAND (exp, 1);
1505      goto restart;
1506
1507    case COMPOUND_EXPR:
1508      if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
1509	return 1;
1510      /* Let people do `(foo (), 0)' without a warning.  */
1511      if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1512	return 0;
1513      exp = TREE_OPERAND (exp, 1);
1514      goto restart;
1515
1516    case COND_EXPR:
1517      /* If this is an expression with side effects, don't warn; this
1518	 case commonly appears in macro expansions.  */
1519      if (TREE_SIDE_EFFECTS (exp))
1520	return 0;
1521      goto warn;
1522
1523    case INDIRECT_REF:
1524      /* Don't warn about automatic dereferencing of references, since
1525	 the user cannot control it.  */
1526      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1527	{
1528	  exp = TREE_OPERAND (exp, 0);
1529	  goto restart;
1530	}
1531      /* Fall through.  */
1532
1533    default:
1534      /* Referencing a volatile value is a side effect, so don't warn.  */
1535      if ((DECL_P (exp) || REFERENCE_CLASS_P (exp))
1536	  && TREE_THIS_VOLATILE (exp))
1537	return 0;
1538
1539      /* If this is an expression which has no operands, there is no value
1540	 to be unused.  There are no such language-independent codes,
1541	 but front ends may define such.  */
1542      if (EXPRESSION_CLASS_P (exp) && TREE_OPERAND_LENGTH (exp) == 0)
1543	return 0;
1544
1545    warn:
1546      warning_at (locus, OPT_Wunused_value, "value computed is not used");
1547      return 1;
1548    }
1549}
1550
1551
1552/* Generate RTL to return from the current function, with no value.
1553   (That is, we do not do anything about returning any value.)  */
1554
1555void
1556expand_null_return (void)
1557{
1558  /* If this function was declared to return a value, but we
1559     didn't, clobber the return registers so that they are not
1560     propagated live to the rest of the function.  */
1561  clobber_return_register ();
1562
1563  expand_null_return_1 ();
1564}
1565
1566/* Generate RTL to return directly from the current function.
1567   (That is, we bypass any return value.)  */
1568
1569void
1570expand_naked_return (void)
1571{
1572  rtx end_label;
1573
1574  clear_pending_stack_adjust ();
1575  do_pending_stack_adjust ();
1576
1577  end_label = naked_return_label;
1578  if (end_label == 0)
1579    end_label = naked_return_label = gen_label_rtx ();
1580
1581  emit_jump (end_label);
1582}
1583
1584/* Generate RTL to return from the current function, with value VAL.  */
1585
1586static void
1587expand_value_return (rtx val)
1588{
1589  /* Copy the value to the return location unless it's already there.  */
1590
1591  tree decl = DECL_RESULT (current_function_decl);
1592  rtx return_reg = DECL_RTL (decl);
1593  if (return_reg != val)
1594    {
1595      tree funtype = TREE_TYPE (current_function_decl);
1596      tree type = TREE_TYPE (decl);
1597      int unsignedp = TYPE_UNSIGNED (type);
1598      enum machine_mode old_mode = DECL_MODE (decl);
1599      enum machine_mode mode = promote_function_mode (type, old_mode,
1600						      &unsignedp, funtype, 1);
1601
1602      if (mode != old_mode)
1603	val = convert_modes (mode, old_mode, val, unsignedp);
1604
1605      if (GET_CODE (return_reg) == PARALLEL)
1606	emit_group_load (return_reg, val, type, int_size_in_bytes (type));
1607      else
1608	emit_move_insn (return_reg, val);
1609    }
1610
1611  expand_null_return_1 ();
1612}
1613
1614/* Output a return with no value.  */
1615
1616static void
1617expand_null_return_1 (void)
1618{
1619  clear_pending_stack_adjust ();
1620  do_pending_stack_adjust ();
1621  emit_jump (return_label);
1622}
1623
1624/* Generate RTL to evaluate the expression RETVAL and return it
1625   from the current function.  */
1626
1627void
1628expand_return (tree retval)
1629{
1630  rtx result_rtl;
1631  rtx val = 0;
1632  tree retval_rhs;
1633
1634  /* If function wants no value, give it none.  */
1635  if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
1636    {
1637      expand_normal (retval);
1638      expand_null_return ();
1639      return;
1640    }
1641
1642  if (retval == error_mark_node)
1643    {
1644      /* Treat this like a return of no value from a function that
1645	 returns a value.  */
1646      expand_null_return ();
1647      return;
1648    }
1649  else if ((TREE_CODE (retval) == MODIFY_EXPR
1650	    || TREE_CODE (retval) == INIT_EXPR)
1651	   && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
1652    retval_rhs = TREE_OPERAND (retval, 1);
1653  else
1654    retval_rhs = retval;
1655
1656  result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
1657
1658  /* If we are returning the RESULT_DECL, then the value has already
1659     been stored into it, so we don't have to do anything special.  */
1660  if (TREE_CODE (retval_rhs) == RESULT_DECL)
1661    expand_value_return (result_rtl);
1662
1663  /* If the result is an aggregate that is being returned in one (or more)
1664     registers, load the registers here.  The compiler currently can't handle
1665     copying a BLKmode value into registers.  We could put this code in a
1666     more general area (for use by everyone instead of just function
1667     call/return), but until this feature is generally usable it is kept here
1668     (and in expand_call).  */
1669
1670  else if (retval_rhs != 0
1671	   && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
1672	   && REG_P (result_rtl))
1673    {
1674      int i;
1675      unsigned HOST_WIDE_INT bitpos, xbitpos;
1676      unsigned HOST_WIDE_INT padding_correction = 0;
1677      unsigned HOST_WIDE_INT bytes
1678	= int_size_in_bytes (TREE_TYPE (retval_rhs));
1679      int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1680      unsigned int bitsize
1681	= MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
1682      rtx *result_pseudos = XALLOCAVEC (rtx, n_regs);
1683      rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
1684      rtx result_val = expand_normal (retval_rhs);
1685      enum machine_mode tmpmode, result_reg_mode;
1686
1687      if (bytes == 0)
1688	{
1689	  expand_null_return ();
1690	  return;
1691	}
1692
1693      /* If the structure doesn't take up a whole number of words, see
1694	 whether the register value should be padded on the left or on
1695	 the right.  Set PADDING_CORRECTION to the number of padding
1696	 bits needed on the left side.
1697
1698	 In most ABIs, the structure will be returned at the least end of
1699	 the register, which translates to right padding on little-endian
1700	 targets and left padding on big-endian targets.  The opposite
1701	 holds if the structure is returned at the most significant
1702	 end of the register.  */
1703      if (bytes % UNITS_PER_WORD != 0
1704	  && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
1705	      ? !BYTES_BIG_ENDIAN
1706	      : BYTES_BIG_ENDIAN))
1707	padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
1708					       * BITS_PER_UNIT));
1709
1710      /* Copy the structure BITSIZE bits at a time.  */
1711      for (bitpos = 0, xbitpos = padding_correction;
1712	   bitpos < bytes * BITS_PER_UNIT;
1713	   bitpos += bitsize, xbitpos += bitsize)
1714	{
1715	  /* We need a new destination pseudo each time xbitpos is
1716	     on a word boundary and when xbitpos == padding_correction
1717	     (the first time through).  */
1718	  if (xbitpos % BITS_PER_WORD == 0
1719	      || xbitpos == padding_correction)
1720	    {
1721	      /* Generate an appropriate register.  */
1722	      dst = gen_reg_rtx (word_mode);
1723	      result_pseudos[xbitpos / BITS_PER_WORD] = dst;
1724
1725	      /* Clear the destination before we move anything into it.  */
1726	      emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
1727	    }
1728
1729	  /* We need a new source operand each time bitpos is on a word
1730	     boundary.  */
1731	  if (bitpos % BITS_PER_WORD == 0)
1732	    src = operand_subword_force (result_val,
1733					 bitpos / BITS_PER_WORD,
1734					 BLKmode);
1735
1736	  /* Use bitpos for the source extraction (left justified) and
1737	     xbitpos for the destination store (right justified).  */
1738	  store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
1739			   extract_bit_field (src, bitsize,
1740					      bitpos % BITS_PER_WORD, 1,
1741					      NULL_RTX, word_mode, word_mode));
1742	}
1743
1744      tmpmode = GET_MODE (result_rtl);
1745      if (tmpmode == BLKmode)
1746	{
1747	  /* Find the smallest integer mode large enough to hold the
1748	     entire structure and use that mode instead of BLKmode
1749	     on the USE insn for the return register.  */
1750	  for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1751	       tmpmode != VOIDmode;
1752	       tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1753	    /* Have we found a large enough mode?  */
1754	    if (GET_MODE_SIZE (tmpmode) >= bytes)
1755	      break;
1756
1757	  /* A suitable mode should have been found.  */
1758	  gcc_assert (tmpmode != VOIDmode);
1759
1760	  PUT_MODE (result_rtl, tmpmode);
1761	}
1762
1763      if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
1764	result_reg_mode = word_mode;
1765      else
1766	result_reg_mode = tmpmode;
1767      result_reg = gen_reg_rtx (result_reg_mode);
1768
1769      for (i = 0; i < n_regs; i++)
1770	emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
1771			result_pseudos[i]);
1772
1773      if (tmpmode != result_reg_mode)
1774	result_reg = gen_lowpart (tmpmode, result_reg);
1775
1776      expand_value_return (result_reg);
1777    }
1778  else if (retval_rhs != 0
1779	   && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
1780	   && (REG_P (result_rtl)
1781	       || (GET_CODE (result_rtl) == PARALLEL)))
1782    {
1783      /* Calculate the return value into a temporary (usually a pseudo
1784         reg).  */
1785      tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
1786      tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
1787
1788      val = assign_temp (nt, 0, 0, 1);
1789      val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
1790      val = force_not_mem (val);
1791      /* Return the calculated value.  */
1792      expand_value_return (val);
1793    }
1794  else
1795    {
1796      /* No hard reg used; calculate value into hard return reg.  */
1797      expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
1798      expand_value_return (result_rtl);
1799    }
1800}
1801
1802/* Emit code to restore vital registers at the beginning of a nonlocal goto
1803   handler.  */
1804static void
1805expand_nl_goto_receiver (void)
1806{
1807  rtx chain;
1808
1809  /* Clobber the FP when we get here, so we have to make sure it's
1810     marked as used by this function.  */
1811  emit_use (hard_frame_pointer_rtx);
1812
1813  /* Mark the static chain as clobbered here so life information
1814     doesn't get messed up for it.  */
1815  chain = targetm.calls.static_chain (current_function_decl, true);
1816  if (chain && REG_P (chain))
1817    emit_clobber (chain);
1818
1819#ifdef HAVE_nonlocal_goto
1820  if (! HAVE_nonlocal_goto)
1821#endif
1822    /* First adjust our frame pointer to its actual value.  It was
1823       previously set to the start of the virtual area corresponding to
1824       the stacked variables when we branched here and now needs to be
1825       adjusted to the actual hardware fp value.
1826
1827       Assignments are to virtual registers are converted by
1828       instantiate_virtual_regs into the corresponding assignment
1829       to the underlying register (fp in this case) that makes
1830       the original assignment true.
1831       So the following insn will actually be
1832       decrementing fp by STARTING_FRAME_OFFSET.  */
1833    emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1834
1835#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1836  if (fixed_regs[ARG_POINTER_REGNUM])
1837    {
1838#ifdef ELIMINABLE_REGS
1839      /* If the argument pointer can be eliminated in favor of the
1840	 frame pointer, we don't need to restore it.  We assume here
1841	 that if such an elimination is present, it can always be used.
1842	 This is the case on all known machines; if we don't make this
1843	 assumption, we do unnecessary saving on many machines.  */
1844      static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1845      size_t i;
1846
1847      for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1848	if (elim_regs[i].from == ARG_POINTER_REGNUM
1849	    && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1850	  break;
1851
1852      if (i == ARRAY_SIZE (elim_regs))
1853#endif
1854	{
1855	  /* Now restore our arg pointer from the address at which it
1856	     was saved in our stack frame.  */
1857	  emit_move_insn (crtl->args.internal_arg_pointer,
1858			  copy_to_reg (get_arg_pointer_save_area ()));
1859	}
1860    }
1861#endif
1862
1863#ifdef HAVE_nonlocal_goto_receiver
1864  if (HAVE_nonlocal_goto_receiver)
1865    emit_insn (gen_nonlocal_goto_receiver ());
1866#endif
1867
1868  /* We must not allow the code we just generated to be reordered by
1869     scheduling.  Specifically, the update of the frame pointer must
1870     happen immediately, not later.  */
1871  emit_insn (gen_blockage ());
1872}
1873
1874/* Generate RTL for the automatic variable declaration DECL.
1875   (Other kinds of declarations are simply ignored if seen here.)  */
1876
1877void
1878expand_decl (tree decl)
1879{
1880  tree type;
1881
1882  type = TREE_TYPE (decl);
1883
1884  /* For a CONST_DECL, set mode, alignment, and sizes from those of the
1885     type in case this node is used in a reference.  */
1886  if (TREE_CODE (decl) == CONST_DECL)
1887    {
1888      DECL_MODE (decl) = TYPE_MODE (type);
1889      DECL_ALIGN (decl) = TYPE_ALIGN (type);
1890      DECL_SIZE (decl) = TYPE_SIZE (type);
1891      DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
1892      return;
1893    }
1894
1895  /* Otherwise, only automatic variables need any expansion done.  Static and
1896     external variables, and external functions, will be handled by
1897     `assemble_variable' (called from finish_decl).  TYPE_DECL requires
1898     nothing.  PARM_DECLs are handled in `assign_parms'.  */
1899  if (TREE_CODE (decl) != VAR_DECL)
1900    return;
1901
1902  if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1903    return;
1904
1905  /* Create the RTL representation for the variable.  */
1906
1907  if (type == error_mark_node)
1908    SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
1909
1910  else if (DECL_SIZE (decl) == 0)
1911    {
1912      /* Variable with incomplete type.  */
1913      rtx x;
1914      if (DECL_INITIAL (decl) == 0)
1915	/* Error message was already done; now avoid a crash.  */
1916	x = gen_rtx_MEM (BLKmode, const0_rtx);
1917      else
1918	/* An initializer is going to decide the size of this array.
1919	   Until we know the size, represent its address with a reg.  */
1920	x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
1921
1922      set_mem_attributes (x, decl, 1);
1923      SET_DECL_RTL (decl, x);
1924    }
1925  else if (use_register_for_decl (decl))
1926    {
1927      /* Automatic variable that can go in a register.  */
1928      enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1929
1930      SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
1931
1932      /* Note if the object is a user variable.  */
1933      if (!DECL_ARTIFICIAL (decl))
1934	  mark_user_reg (DECL_RTL (decl));
1935
1936      if (POINTER_TYPE_P (type))
1937	mark_reg_pointer (DECL_RTL (decl),
1938			  TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
1939    }
1940
1941  else
1942    {
1943      rtx oldaddr = 0;
1944      rtx addr;
1945      rtx x;
1946
1947      /* Variable-sized decls are dealt with in the gimplifier.  */
1948      gcc_assert (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST);
1949
1950      /* If we previously made RTL for this decl, it must be an array
1951	 whose size was determined by the initializer.
1952	 The old address was a register; set that register now
1953	 to the proper address.  */
1954      if (DECL_RTL_SET_P (decl))
1955	{
1956	  gcc_assert (MEM_P (DECL_RTL (decl)));
1957	  gcc_assert (REG_P (XEXP (DECL_RTL (decl), 0)));
1958	  oldaddr = XEXP (DECL_RTL (decl), 0);
1959	}
1960
1961      /* Set alignment we actually gave this decl.  */
1962      DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
1963			   : GET_MODE_BITSIZE (DECL_MODE (decl)));
1964      DECL_USER_ALIGN (decl) = 0;
1965
1966      x = assign_temp (decl, 1, 1, 1);
1967      set_mem_attributes (x, decl, 1);
1968      SET_DECL_RTL (decl, x);
1969
1970      if (oldaddr)
1971	{
1972	  addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
1973	  if (addr != oldaddr)
1974	    emit_move_insn (oldaddr, addr);
1975	}
1976    }
1977}
1978
1979/* Emit code to save the current value of stack.  */
1980rtx
1981expand_stack_save (void)
1982{
1983  rtx ret = NULL_RTX;
1984
1985  do_pending_stack_adjust ();
1986  emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
1987  return ret;
1988}
1989
1990/* Emit code to restore the current value of stack.  */
1991void
1992expand_stack_restore (tree var)
1993{
1994  rtx sa = expand_normal (var);
1995
1996  sa = convert_memory_address (Pmode, sa);
1997  emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
1998}
1999
2000/* Do the insertion of a case label into case_list.  The labels are
2001   fed to us in descending order from the sorted vector of case labels used
2002   in the tree part of the middle end.  So the list we construct is
2003   sorted in ascending order.  The bounds on the case range, LOW and HIGH,
2004   are converted to case's index type TYPE.  */
2005
2006static struct case_node *
2007add_case_node (struct case_node *head, tree type, tree low, tree high,
2008               tree label, alloc_pool case_node_pool)
2009{
2010  tree min_value, max_value;
2011  struct case_node *r;
2012
2013  gcc_assert (TREE_CODE (low) == INTEGER_CST);
2014  gcc_assert (!high || TREE_CODE (high) == INTEGER_CST);
2015
2016  min_value = TYPE_MIN_VALUE (type);
2017  max_value = TYPE_MAX_VALUE (type);
2018
2019  /* If there's no HIGH value, then this is not a case range; it's
2020     just a simple case label.  But that's just a degenerate case
2021     range.
2022     If the bounds are equal, turn this into the one-value case.  */
2023  if (!high || tree_int_cst_equal (low, high))
2024    {
2025      /* If the simple case value is unreachable, ignore it.  */
2026      if ((TREE_CODE (min_value) == INTEGER_CST
2027            && tree_int_cst_compare (low, min_value) < 0)
2028	  || (TREE_CODE (max_value) == INTEGER_CST
2029	      && tree_int_cst_compare (low, max_value) > 0))
2030	return head;
2031      low = fold_convert (type, low);
2032      high = low;
2033    }
2034  else
2035    {
2036      /* If the entire case range is unreachable, ignore it.  */
2037      if ((TREE_CODE (min_value) == INTEGER_CST
2038            && tree_int_cst_compare (high, min_value) < 0)
2039	  || (TREE_CODE (max_value) == INTEGER_CST
2040	      && tree_int_cst_compare (low, max_value) > 0))
2041	return head;
2042
2043      /* If the lower bound is less than the index type's minimum
2044	 value, truncate the range bounds.  */
2045      if (TREE_CODE (min_value) == INTEGER_CST
2046            && tree_int_cst_compare (low, min_value) < 0)
2047	low = min_value;
2048      low = fold_convert (type, low);
2049
2050      /* If the upper bound is greater than the index type's maximum
2051	 value, truncate the range bounds.  */
2052      if (TREE_CODE (max_value) == INTEGER_CST
2053	  && tree_int_cst_compare (high, max_value) > 0)
2054	high = max_value;
2055      high = fold_convert (type, high);
2056    }
2057
2058
2059  /* Add this label to the chain.  Make sure to drop overflow flags.  */
2060  r = (struct case_node *) pool_alloc (case_node_pool);
2061  r->low = build_int_cst_wide (TREE_TYPE (low), TREE_INT_CST_LOW (low),
2062			       TREE_INT_CST_HIGH (low));
2063  r->high = build_int_cst_wide (TREE_TYPE (high), TREE_INT_CST_LOW (high),
2064				TREE_INT_CST_HIGH (high));
2065  r->code_label = label;
2066  r->parent = r->left = NULL;
2067  r->right = head;
2068  return r;
2069}
2070
2071/* Maximum number of case bit tests.  */
2072#define MAX_CASE_BIT_TESTS  3
2073
2074/* By default, enable case bit tests on targets with ashlsi3.  */
2075#ifndef CASE_USE_BIT_TESTS
2076#define CASE_USE_BIT_TESTS  (optab_handler (ashl_optab, word_mode)->insn_code \
2077			     != CODE_FOR_nothing)
2078#endif
2079
2080
2081/* A case_bit_test represents a set of case nodes that may be
2082   selected from using a bit-wise comparison.  HI and LO hold
2083   the integer to be tested against, LABEL contains the label
2084   to jump to upon success and BITS counts the number of case
2085   nodes handled by this test, typically the number of bits
2086   set in HI:LO.  */
2087
2088struct case_bit_test
2089{
2090  HOST_WIDE_INT hi;
2091  HOST_WIDE_INT lo;
2092  rtx label;
2093  int bits;
2094};
2095
2096/* Determine whether "1 << x" is relatively cheap in word_mode.  */
2097
2098static
2099bool lshift_cheap_p (void)
2100{
2101  static bool init = false;
2102  static bool cheap = true;
2103
2104  if (!init)
2105    {
2106      rtx reg = gen_rtx_REG (word_mode, 10000);
2107      int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET,
2108      			   optimize_insn_for_speed_p ());
2109      cheap = cost < COSTS_N_INSNS (3);
2110      init = true;
2111    }
2112
2113  return cheap;
2114}
2115
2116/* Comparison function for qsort to order bit tests by decreasing
2117   number of case nodes, i.e. the node with the most cases gets
2118   tested first.  */
2119
2120static int
2121case_bit_test_cmp (const void *p1, const void *p2)
2122{
2123  const struct case_bit_test *const d1 = (const struct case_bit_test *) p1;
2124  const struct case_bit_test *const d2 = (const struct case_bit_test *) p2;
2125
2126  if (d2->bits != d1->bits)
2127    return d2->bits - d1->bits;
2128
2129  /* Stabilize the sort.  */
2130  return CODE_LABEL_NUMBER (d2->label) - CODE_LABEL_NUMBER (d1->label);
2131}
2132
2133/*  Expand a switch statement by a short sequence of bit-wise
2134    comparisons.  "switch(x)" is effectively converted into
2135    "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
2136    integer constants.
2137
2138    INDEX_EXPR is the value being switched on, which is of
2139    type INDEX_TYPE.  MINVAL is the lowest case value of in
2140    the case nodes, of INDEX_TYPE type, and RANGE is highest
2141    value minus MINVAL, also of type INDEX_TYPE.  NODES is
2142    the set of case nodes, and DEFAULT_LABEL is the label to
2143    branch to should none of the cases match.
2144
2145    There *MUST* be MAX_CASE_BIT_TESTS or less unique case
2146    node targets.  */
2147
2148static void
2149emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
2150		     tree range, case_node_ptr nodes, rtx default_label)
2151{
2152  struct case_bit_test test[MAX_CASE_BIT_TESTS];
2153  enum machine_mode mode;
2154  rtx expr, index, label;
2155  unsigned int i,j,lo,hi;
2156  struct case_node *n;
2157  unsigned int count;
2158
2159  count = 0;
2160  for (n = nodes; n; n = n->right)
2161    {
2162      label = label_rtx (n->code_label);
2163      for (i = 0; i < count; i++)
2164	if (label == test[i].label)
2165	  break;
2166
2167      if (i == count)
2168	{
2169	  gcc_assert (count < MAX_CASE_BIT_TESTS);
2170	  test[i].hi = 0;
2171	  test[i].lo = 0;
2172	  test[i].label = label;
2173	  test[i].bits = 1;
2174	  count++;
2175	}
2176      else
2177        test[i].bits++;
2178
2179      lo = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2180				      n->low, minval), 1);
2181      hi = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2182				      n->high, minval), 1);
2183      for (j = lo; j <= hi; j++)
2184        if (j >= HOST_BITS_PER_WIDE_INT)
2185	  test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
2186	else
2187	  test[i].lo |= (HOST_WIDE_INT) 1 << j;
2188    }
2189
2190  qsort (test, count, sizeof(*test), case_bit_test_cmp);
2191
2192  index_expr = fold_build2 (MINUS_EXPR, index_type,
2193			    fold_convert (index_type, index_expr),
2194			    fold_convert (index_type, minval));
2195  index = expand_normal (index_expr);
2196  do_pending_stack_adjust ();
2197
2198  mode = TYPE_MODE (index_type);
2199  expr = expand_normal (range);
2200  if (default_label)
2201    emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
2202			     default_label);
2203
2204  index = convert_to_mode (word_mode, index, 0);
2205  index = expand_binop (word_mode, ashl_optab, const1_rtx,
2206			index, NULL_RTX, 1, OPTAB_WIDEN);
2207
2208  for (i = 0; i < count; i++)
2209    {
2210      expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
2211      expr = expand_binop (word_mode, and_optab, index, expr,
2212			   NULL_RTX, 1, OPTAB_WIDEN);
2213      emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
2214			       word_mode, 1, test[i].label);
2215    }
2216
2217  if (default_label)
2218    emit_jump (default_label);
2219}
2220
2221#ifndef HAVE_casesi
2222#define HAVE_casesi 0
2223#endif
2224
2225#ifndef HAVE_tablejump
2226#define HAVE_tablejump 0
2227#endif
2228
2229/* Terminate a case (Pascal/Ada) or switch (C) statement
2230   in which ORIG_INDEX is the expression to be tested.
2231   If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
2232   type as given in the source before any compiler conversions.
2233   Generate the code to test it and jump to the right place.  */
2234
2235void
2236expand_case (gimple stmt)
2237{
2238  tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
2239  rtx default_label = 0;
2240  struct case_node *n;
2241  unsigned int count, uniq;
2242  rtx index;
2243  rtx table_label;
2244  int ncases;
2245  rtx *labelvec;
2246  int i;
2247  rtx before_case, end, lab;
2248
2249  tree index_expr = gimple_switch_index (stmt);
2250  tree index_type = TREE_TYPE (index_expr);
2251  int unsignedp = TYPE_UNSIGNED (index_type);
2252
2253  /* The insn after which the case dispatch should finally
2254     be emitted.  Zero for a dummy.  */
2255  rtx start;
2256
2257  /* A list of case labels; it is first built as a list and it may then
2258     be rearranged into a nearly balanced binary tree.  */
2259  struct case_node *case_list = 0;
2260
2261  /* Label to jump to if no case matches.  */
2262  tree default_label_decl = NULL_TREE;
2263
2264  alloc_pool case_node_pool = create_alloc_pool ("struct case_node pool",
2265                                                 sizeof (struct case_node),
2266                                                 100);
2267
2268  do_pending_stack_adjust ();
2269
2270  /* An ERROR_MARK occurs for various reasons including invalid data type.  */
2271  if (index_type != error_mark_node)
2272    {
2273      tree elt;
2274      bitmap label_bitmap;
2275      int stopi = 0;
2276
2277      /* cleanup_tree_cfg removes all SWITCH_EXPR with their index
2278	 expressions being INTEGER_CST.  */
2279      gcc_assert (TREE_CODE (index_expr) != INTEGER_CST);
2280
2281      /* The default case, if ever taken, is the first element.  */
2282      elt = gimple_switch_label (stmt, 0);
2283      if (!CASE_LOW (elt) && !CASE_HIGH (elt))
2284	{
2285	  default_label_decl = CASE_LABEL (elt);
2286	  stopi = 1;
2287	}
2288
2289      for (i = gimple_switch_num_labels (stmt) - 1; i >= stopi; --i)
2290	{
2291	  tree low, high;
2292	  elt = gimple_switch_label (stmt, i);
2293
2294	  low = CASE_LOW (elt);
2295	  gcc_assert (low);
2296	  high = CASE_HIGH (elt);
2297
2298	  /* Discard empty ranges.  */
2299	  if (high && tree_int_cst_lt (high, low))
2300	    continue;
2301
2302	  case_list = add_case_node (case_list, index_type, low, high,
2303                                     CASE_LABEL (elt), case_node_pool);
2304	}
2305
2306
2307      before_case = start = get_last_insn ();
2308      if (default_label_decl)
2309	default_label = label_rtx (default_label_decl);
2310
2311      /* Get upper and lower bounds of case values.  */
2312
2313      uniq = 0;
2314      count = 0;
2315      label_bitmap = BITMAP_ALLOC (NULL);
2316      for (n = case_list; n; n = n->right)
2317	{
2318	  /* Count the elements and track the largest and smallest
2319	     of them (treating them as signed even if they are not).  */
2320	  if (count++ == 0)
2321	    {
2322	      minval = n->low;
2323	      maxval = n->high;
2324	    }
2325	  else
2326	    {
2327	      if (tree_int_cst_lt (n->low, minval))
2328		minval = n->low;
2329	      if (tree_int_cst_lt (maxval, n->high))
2330		maxval = n->high;
2331	    }
2332	  /* A range counts double, since it requires two compares.  */
2333	  if (! tree_int_cst_equal (n->low, n->high))
2334	    count++;
2335
2336	  /* If we have not seen this label yet, then increase the
2337	     number of unique case node targets seen.  */
2338	  lab = label_rtx (n->code_label);
2339	  if (!bitmap_bit_p (label_bitmap, CODE_LABEL_NUMBER (lab)))
2340	    {
2341	      bitmap_set_bit (label_bitmap, CODE_LABEL_NUMBER (lab));
2342	      uniq++;
2343	    }
2344	}
2345
2346      BITMAP_FREE (label_bitmap);
2347
2348      /* cleanup_tree_cfg removes all SWITCH_EXPR with a single
2349	 destination, such as one with a default case only.  However,
2350	 it doesn't remove cases that are out of range for the switch
2351	 type, so we may still get a zero here.  */
2352      if (count == 0)
2353	{
2354	  if (default_label)
2355	    emit_jump (default_label);
2356          free_alloc_pool (case_node_pool);
2357	  return;
2358	}
2359
2360      /* Compute span of values.  */
2361      range = fold_build2 (MINUS_EXPR, index_type, maxval, minval);
2362
2363      /* Try implementing this switch statement by a short sequence of
2364	 bit-wise comparisons.  However, we let the binary-tree case
2365	 below handle constant index expressions.  */
2366      if (CASE_USE_BIT_TESTS
2367	  && ! TREE_CONSTANT (index_expr)
2368	  && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
2369	  && compare_tree_int (range, 0) > 0
2370	  && lshift_cheap_p ()
2371	  && ((uniq == 1 && count >= 3)
2372	      || (uniq == 2 && count >= 5)
2373	      || (uniq == 3 && count >= 6)))
2374	{
2375	  /* Optimize the case where all the case values fit in a
2376	     word without having to subtract MINVAL.  In this case,
2377	     we can optimize away the subtraction.  */
2378	  if (compare_tree_int (minval, 0) > 0
2379	      && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
2380	    {
2381	      minval = build_int_cst (index_type, 0);
2382	      range = maxval;
2383	    }
2384	  emit_case_bit_tests (index_type, index_expr, minval, range,
2385			       case_list, default_label);
2386	}
2387
2388      /* If range of values is much bigger than number of values,
2389	 make a sequence of conditional branches instead of a dispatch.
2390	 If the switch-index is a constant, do it this way
2391	 because we can optimize it.  */
2392
2393      else if (count < targetm.case_values_threshold ()
2394	       || compare_tree_int (range,
2395				    (optimize_insn_for_size_p () ? 3 : 10) * count) > 0
2396	       /* RANGE may be signed, and really large ranges will show up
2397		  as negative numbers.  */
2398	       || compare_tree_int (range, 0) < 0
2399#ifndef ASM_OUTPUT_ADDR_DIFF_ELT
2400	       || flag_pic
2401#endif
2402	       || !flag_jump_tables
2403	       || TREE_CONSTANT (index_expr)
2404	       /* If neither casesi or tablejump is available, we can
2405		  only go this way.  */
2406	       || (!HAVE_casesi && !HAVE_tablejump))
2407	{
2408	  index = expand_normal (index_expr);
2409
2410	  /* If the index is a short or char that we do not have
2411	     an insn to handle comparisons directly, convert it to
2412	     a full integer now, rather than letting each comparison
2413	     generate the conversion.  */
2414
2415	  if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
2416	      && ! have_insn_for (COMPARE, GET_MODE (index)))
2417	    {
2418	      enum machine_mode wider_mode;
2419	      for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
2420		   wider_mode = GET_MODE_WIDER_MODE (wider_mode))
2421		if (have_insn_for (COMPARE, wider_mode))
2422		  {
2423		    index = convert_to_mode (wider_mode, index, unsignedp);
2424		    break;
2425		  }
2426	    }
2427
2428	  do_pending_stack_adjust ();
2429
2430	  if (MEM_P (index))
2431	    index = copy_to_reg (index);
2432
2433	  /* We generate a binary decision tree to select the
2434	     appropriate target code.  This is done as follows:
2435
2436	     The list of cases is rearranged into a binary tree,
2437	     nearly optimal assuming equal probability for each case.
2438
2439	     The tree is transformed into RTL, eliminating
2440	     redundant test conditions at the same time.
2441
2442	     If program flow could reach the end of the
2443	     decision tree an unconditional jump to the
2444	     default code is emitted.  */
2445
2446	  use_cost_table = estimate_case_costs (case_list);
2447	  balance_case_nodes (&case_list, NULL);
2448	  emit_case_nodes (index, case_list, default_label, index_type);
2449	  if (default_label)
2450	    emit_jump (default_label);
2451	}
2452      else
2453	{
2454	  rtx fallback_label = label_rtx (case_list->code_label);
2455	  table_label = gen_label_rtx ();
2456	  if (! try_casesi (index_type, index_expr, minval, range,
2457			    table_label, default_label, fallback_label))
2458	    {
2459	      bool ok;
2460
2461	      /* Index jumptables from zero for suitable values of
2462                 minval to avoid a subtraction.  */
2463	      if (optimize_insn_for_speed_p ()
2464		  && compare_tree_int (minval, 0) > 0
2465		  && compare_tree_int (minval, 3) < 0)
2466		{
2467		  minval = build_int_cst (index_type, 0);
2468		  range = maxval;
2469		}
2470
2471	      ok = try_tablejump (index_type, index_expr, minval, range,
2472				  table_label, default_label);
2473	      gcc_assert (ok);
2474	    }
2475
2476	  /* Get table of labels to jump to, in order of case index.  */
2477
2478	  ncases = tree_low_cst (range, 0) + 1;
2479	  labelvec = XALLOCAVEC (rtx, ncases);
2480	  memset (labelvec, 0, ncases * sizeof (rtx));
2481
2482	  for (n = case_list; n; n = n->right)
2483	    {
2484	      /* Compute the low and high bounds relative to the minimum
2485		 value since that should fit in a HOST_WIDE_INT while the
2486		 actual values may not.  */
2487	      HOST_WIDE_INT i_low
2488		= tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2489					     n->low, minval), 1);
2490	      HOST_WIDE_INT i_high
2491		= tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2492					     n->high, minval), 1);
2493	      HOST_WIDE_INT i;
2494
2495	      for (i = i_low; i <= i_high; i ++)
2496		labelvec[i]
2497		  = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
2498	    }
2499
2500	  /* Fill in the gaps with the default.  We may have gaps at
2501	     the beginning if we tried to avoid the minval subtraction,
2502	     so substitute some label even if the default label was
2503	     deemed unreachable.  */
2504	  if (!default_label)
2505	    default_label = fallback_label;
2506	  for (i = 0; i < ncases; i++)
2507	    if (labelvec[i] == 0)
2508	      labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
2509
2510	  /* Output the table.  */
2511	  emit_label (table_label);
2512
2513	  if (CASE_VECTOR_PC_RELATIVE || flag_pic)
2514	    emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
2515						   gen_rtx_LABEL_REF (Pmode, table_label),
2516						   gen_rtvec_v (ncases, labelvec),
2517						   const0_rtx, const0_rtx));
2518	  else
2519	    emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
2520					      gen_rtvec_v (ncases, labelvec)));
2521
2522	  /* Record no drop-through after the table.  */
2523	  emit_barrier ();
2524	}
2525
2526      before_case = NEXT_INSN (before_case);
2527      end = get_last_insn ();
2528      reorder_insns (before_case, end, start);
2529    }
2530
2531  free_temp_slots ();
2532  free_alloc_pool (case_node_pool);
2533}
2534
2535/* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE.  */
2536
2537static void
2538do_jump_if_equal (enum machine_mode mode, rtx op0, rtx op1, rtx label,
2539		  int unsignedp)
2540{
2541  do_compare_rtx_and_jump (op0, op1, EQ, unsignedp, mode,
2542			   NULL_RTX, NULL_RTX, label, -1);
2543}
2544
2545/* Not all case values are encountered equally.  This function
2546   uses a heuristic to weight case labels, in cases where that
2547   looks like a reasonable thing to do.
2548
2549   Right now, all we try to guess is text, and we establish the
2550   following weights:
2551
2552	chars above space:	16
2553	digits:			16
2554	default:		12
2555	space, punct:		8
2556	tab:			4
2557	newline:		2
2558	other "\" chars:	1
2559	remaining chars:	0
2560
2561   If we find any cases in the switch that are not either -1 or in the range
2562   of valid ASCII characters, or are control characters other than those
2563   commonly used with "\", don't treat this switch scanning text.
2564
2565   Return 1 if these nodes are suitable for cost estimation, otherwise
2566   return 0.  */
2567
2568static int
2569estimate_case_costs (case_node_ptr node)
2570{
2571  tree min_ascii = integer_minus_one_node;
2572  tree max_ascii = build_int_cst (TREE_TYPE (node->high), 127);
2573  case_node_ptr n;
2574  int i;
2575
2576  /* If we haven't already made the cost table, make it now.  Note that the
2577     lower bound of the table is -1, not zero.  */
2578
2579  if (! cost_table_initialized)
2580    {
2581      cost_table_initialized = 1;
2582
2583      for (i = 0; i < 128; i++)
2584	{
2585	  if (ISALNUM (i))
2586	    COST_TABLE (i) = 16;
2587	  else if (ISPUNCT (i))
2588	    COST_TABLE (i) = 8;
2589	  else if (ISCNTRL (i))
2590	    COST_TABLE (i) = -1;
2591	}
2592
2593      COST_TABLE (' ') = 8;
2594      COST_TABLE ('\t') = 4;
2595      COST_TABLE ('\0') = 4;
2596      COST_TABLE ('\n') = 2;
2597      COST_TABLE ('\f') = 1;
2598      COST_TABLE ('\v') = 1;
2599      COST_TABLE ('\b') = 1;
2600    }
2601
2602  /* See if all the case expressions look like text.  It is text if the
2603     constant is >= -1 and the highest constant is <= 127.  Do all comparisons
2604     as signed arithmetic since we don't want to ever access cost_table with a
2605     value less than -1.  Also check that none of the constants in a range
2606     are strange control characters.  */
2607
2608  for (n = node; n; n = n->right)
2609    {
2610      if (tree_int_cst_lt (n->low, min_ascii)
2611	  || tree_int_cst_lt (max_ascii, n->high))
2612	return 0;
2613
2614      for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
2615	   i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
2616	if (COST_TABLE (i) < 0)
2617	  return 0;
2618    }
2619
2620  /* All interesting values are within the range of interesting
2621     ASCII characters.  */
2622  return 1;
2623}
2624
2625/* Take an ordered list of case nodes
2626   and transform them into a near optimal binary tree,
2627   on the assumption that any target code selection value is as
2628   likely as any other.
2629
2630   The transformation is performed by splitting the ordered
2631   list into two equal sections plus a pivot.  The parts are
2632   then attached to the pivot as left and right branches.  Each
2633   branch is then transformed recursively.  */
2634
2635static void
2636balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
2637{
2638  case_node_ptr np;
2639
2640  np = *head;
2641  if (np)
2642    {
2643      int cost = 0;
2644      int i = 0;
2645      int ranges = 0;
2646      case_node_ptr *npp;
2647      case_node_ptr left;
2648
2649      /* Count the number of entries on branch.  Also count the ranges.  */
2650
2651      while (np)
2652	{
2653	  if (!tree_int_cst_equal (np->low, np->high))
2654	    {
2655	      ranges++;
2656	      if (use_cost_table)
2657		cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
2658	    }
2659
2660	  if (use_cost_table)
2661	    cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
2662
2663	  i++;
2664	  np = np->right;
2665	}
2666
2667      if (i > 2)
2668	{
2669	  /* Split this list if it is long enough for that to help.  */
2670	  npp = head;
2671	  left = *npp;
2672	  if (use_cost_table)
2673	    {
2674	      /* Find the place in the list that bisects the list's total cost,
2675		 Here I gets half the total cost.  */
2676	      int n_moved = 0;
2677	      i = (cost + 1) / 2;
2678	      while (1)
2679		{
2680		  /* Skip nodes while their cost does not reach that amount.  */
2681		  if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2682		    i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
2683		  i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
2684		  if (i <= 0)
2685		    break;
2686		  npp = &(*npp)->right;
2687		  n_moved += 1;
2688		}
2689	      if (n_moved == 0)
2690		{
2691		  /* Leave this branch lopsided, but optimize left-hand
2692		     side and fill in `parent' fields for right-hand side.  */
2693		  np = *head;
2694		  np->parent = parent;
2695		  balance_case_nodes (&np->left, np);
2696		  for (; np->right; np = np->right)
2697		    np->right->parent = np;
2698		  return;
2699		}
2700	    }
2701	  /* If there are just three nodes, split at the middle one.  */
2702	  else if (i == 3)
2703	    npp = &(*npp)->right;
2704	  else
2705	    {
2706	      /* Find the place in the list that bisects the list's total cost,
2707		 where ranges count as 2.
2708		 Here I gets half the total cost.  */
2709	      i = (i + ranges + 1) / 2;
2710	      while (1)
2711		{
2712		  /* Skip nodes while their cost does not reach that amount.  */
2713		  if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2714		    i--;
2715		  i--;
2716		  if (i <= 0)
2717		    break;
2718		  npp = &(*npp)->right;
2719		}
2720	    }
2721	  *head = np = *npp;
2722	  *npp = 0;
2723	  np->parent = parent;
2724	  np->left = left;
2725
2726	  /* Optimize each of the two split parts.  */
2727	  balance_case_nodes (&np->left, np);
2728	  balance_case_nodes (&np->right, np);
2729	}
2730      else
2731	{
2732	  /* Else leave this branch as one level,
2733	     but fill in `parent' fields.  */
2734	  np = *head;
2735	  np->parent = parent;
2736	  for (; np->right; np = np->right)
2737	    np->right->parent = np;
2738	}
2739    }
2740}
2741
2742/* Search the parent sections of the case node tree
2743   to see if a test for the lower bound of NODE would be redundant.
2744   INDEX_TYPE is the type of the index expression.
2745
2746   The instructions to generate the case decision tree are
2747   output in the same order as nodes are processed so it is
2748   known that if a parent node checks the range of the current
2749   node minus one that the current node is bounded at its lower
2750   span.  Thus the test would be redundant.  */
2751
2752static int
2753node_has_low_bound (case_node_ptr node, tree index_type)
2754{
2755  tree low_minus_one;
2756  case_node_ptr pnode;
2757
2758  /* If the lower bound of this node is the lowest value in the index type,
2759     we need not test it.  */
2760
2761  if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
2762    return 1;
2763
2764  /* If this node has a left branch, the value at the left must be less
2765     than that at this node, so it cannot be bounded at the bottom and
2766     we need not bother testing any further.  */
2767
2768  if (node->left)
2769    return 0;
2770
2771  low_minus_one = fold_build2 (MINUS_EXPR, TREE_TYPE (node->low),
2772			       node->low,
2773			       build_int_cst (TREE_TYPE (node->low), 1));
2774
2775  /* If the subtraction above overflowed, we can't verify anything.
2776     Otherwise, look for a parent that tests our value - 1.  */
2777
2778  if (! tree_int_cst_lt (low_minus_one, node->low))
2779    return 0;
2780
2781  for (pnode = node->parent; pnode; pnode = pnode->parent)
2782    if (tree_int_cst_equal (low_minus_one, pnode->high))
2783      return 1;
2784
2785  return 0;
2786}
2787
2788/* Search the parent sections of the case node tree
2789   to see if a test for the upper bound of NODE would be redundant.
2790   INDEX_TYPE is the type of the index expression.
2791
2792   The instructions to generate the case decision tree are
2793   output in the same order as nodes are processed so it is
2794   known that if a parent node checks the range of the current
2795   node plus one that the current node is bounded at its upper
2796   span.  Thus the test would be redundant.  */
2797
2798static int
2799node_has_high_bound (case_node_ptr node, tree index_type)
2800{
2801  tree high_plus_one;
2802  case_node_ptr pnode;
2803
2804  /* If there is no upper bound, obviously no test is needed.  */
2805
2806  if (TYPE_MAX_VALUE (index_type) == NULL)
2807    return 1;
2808
2809  /* If the upper bound of this node is the highest value in the type
2810     of the index expression, we need not test against it.  */
2811
2812  if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
2813    return 1;
2814
2815  /* If this node has a right branch, the value at the right must be greater
2816     than that at this node, so it cannot be bounded at the top and
2817     we need not bother testing any further.  */
2818
2819  if (node->right)
2820    return 0;
2821
2822  high_plus_one = fold_build2 (PLUS_EXPR, TREE_TYPE (node->high),
2823			       node->high,
2824			       build_int_cst (TREE_TYPE (node->high), 1));
2825
2826  /* If the addition above overflowed, we can't verify anything.
2827     Otherwise, look for a parent that tests our value + 1.  */
2828
2829  if (! tree_int_cst_lt (node->high, high_plus_one))
2830    return 0;
2831
2832  for (pnode = node->parent; pnode; pnode = pnode->parent)
2833    if (tree_int_cst_equal (high_plus_one, pnode->low))
2834      return 1;
2835
2836  return 0;
2837}
2838
2839/* Search the parent sections of the
2840   case node tree to see if both tests for the upper and lower
2841   bounds of NODE would be redundant.  */
2842
2843static int
2844node_is_bounded (case_node_ptr node, tree index_type)
2845{
2846  return (node_has_low_bound (node, index_type)
2847	  && node_has_high_bound (node, index_type));
2848}
2849
2850/* Emit step-by-step code to select a case for the value of INDEX.
2851   The thus generated decision tree follows the form of the
2852   case-node binary tree NODE, whose nodes represent test conditions.
2853   INDEX_TYPE is the type of the index of the switch.
2854
2855   Care is taken to prune redundant tests from the decision tree
2856   by detecting any boundary conditions already checked by
2857   emitted rtx.  (See node_has_high_bound, node_has_low_bound
2858   and node_is_bounded, above.)
2859
2860   Where the test conditions can be shown to be redundant we emit
2861   an unconditional jump to the target code.  As a further
2862   optimization, the subordinates of a tree node are examined to
2863   check for bounded nodes.  In this case conditional and/or
2864   unconditional jumps as a result of the boundary check for the
2865   current node are arranged to target the subordinates associated
2866   code for out of bound conditions on the current node.
2867
2868   We can assume that when control reaches the code generated here,
2869   the index value has already been compared with the parents
2870   of this node, and determined to be on the same side of each parent
2871   as this node is.  Thus, if this node tests for the value 51,
2872   and a parent tested for 52, we don't need to consider
2873   the possibility of a value greater than 51.  If another parent
2874   tests for the value 50, then this node need not test anything.  */
2875
2876static void
2877emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
2878		 tree index_type)
2879{
2880  /* If INDEX has an unsigned type, we must make unsigned branches.  */
2881  int unsignedp = TYPE_UNSIGNED (index_type);
2882  enum machine_mode mode = GET_MODE (index);
2883  enum machine_mode imode = TYPE_MODE (index_type);
2884
2885  /* Handle indices detected as constant during RTL expansion.  */
2886  if (mode == VOIDmode)
2887    mode = imode;
2888
2889  /* See if our parents have already tested everything for us.
2890     If they have, emit an unconditional jump for this node.  */
2891  if (node_is_bounded (node, index_type))
2892    emit_jump (label_rtx (node->code_label));
2893
2894  else if (tree_int_cst_equal (node->low, node->high))
2895    {
2896      /* Node is single valued.  First see if the index expression matches
2897	 this node and then check our children, if any.  */
2898
2899      do_jump_if_equal (mode, index,
2900			convert_modes (mode, imode,
2901				       expand_normal (node->low),
2902				       unsignedp),
2903			label_rtx (node->code_label), unsignedp);
2904
2905      if (node->right != 0 && node->left != 0)
2906	{
2907	  /* This node has children on both sides.
2908	     Dispatch to one side or the other
2909	     by comparing the index value with this node's value.
2910	     If one subtree is bounded, check that one first,
2911	     so we can avoid real branches in the tree.  */
2912
2913	  if (node_is_bounded (node->right, index_type))
2914	    {
2915	      emit_cmp_and_jump_insns (index,
2916				       convert_modes
2917				       (mode, imode,
2918					expand_normal (node->high),
2919					unsignedp),
2920				       GT, NULL_RTX, mode, unsignedp,
2921				       label_rtx (node->right->code_label));
2922	      emit_case_nodes (index, node->left, default_label, index_type);
2923	    }
2924
2925	  else if (node_is_bounded (node->left, index_type))
2926	    {
2927	      emit_cmp_and_jump_insns (index,
2928				       convert_modes
2929				       (mode, imode,
2930					expand_normal (node->high),
2931					unsignedp),
2932				       LT, NULL_RTX, mode, unsignedp,
2933				       label_rtx (node->left->code_label));
2934	      emit_case_nodes (index, node->right, default_label, index_type);
2935	    }
2936
2937	  /* If both children are single-valued cases with no
2938	     children, finish up all the work.  This way, we can save
2939	     one ordered comparison.  */
2940	  else if (tree_int_cst_equal (node->right->low, node->right->high)
2941		   && node->right->left == 0
2942		   && node->right->right == 0
2943		   && tree_int_cst_equal (node->left->low, node->left->high)
2944		   && node->left->left == 0
2945		   && node->left->right == 0)
2946	    {
2947	      /* Neither node is bounded.  First distinguish the two sides;
2948		 then emit the code for one side at a time.  */
2949
2950	      /* See if the value matches what the right hand side
2951		 wants.  */
2952	      do_jump_if_equal (mode, index,
2953				convert_modes (mode, imode,
2954					       expand_normal (node->right->low),
2955					       unsignedp),
2956				label_rtx (node->right->code_label),
2957				unsignedp);
2958
2959	      /* See if the value matches what the left hand side
2960		 wants.  */
2961	      do_jump_if_equal (mode, index,
2962				convert_modes (mode, imode,
2963					       expand_normal (node->left->low),
2964					       unsignedp),
2965				label_rtx (node->left->code_label),
2966				unsignedp);
2967	    }
2968
2969	  else
2970	    {
2971	      /* Neither node is bounded.  First distinguish the two sides;
2972		 then emit the code for one side at a time.  */
2973
2974	      tree test_label
2975		= build_decl (CURR_INSN_LOCATION,
2976			      LABEL_DECL, NULL_TREE, NULL_TREE);
2977
2978	      /* See if the value is on the right.  */
2979	      emit_cmp_and_jump_insns (index,
2980				       convert_modes
2981				       (mode, imode,
2982					expand_normal (node->high),
2983					unsignedp),
2984				       GT, NULL_RTX, mode, unsignedp,
2985				       label_rtx (test_label));
2986
2987	      /* Value must be on the left.
2988		 Handle the left-hand subtree.  */
2989	      emit_case_nodes (index, node->left, default_label, index_type);
2990	      /* If left-hand subtree does nothing,
2991		 go to default.  */
2992	      if (default_label)
2993	        emit_jump (default_label);
2994
2995	      /* Code branches here for the right-hand subtree.  */
2996	      expand_label (test_label);
2997	      emit_case_nodes (index, node->right, default_label, index_type);
2998	    }
2999	}
3000
3001      else if (node->right != 0 && node->left == 0)
3002	{
3003	  /* Here we have a right child but no left so we issue a conditional
3004	     branch to default and process the right child.
3005
3006	     Omit the conditional branch to default if the right child
3007	     does not have any children and is single valued; it would
3008	     cost too much space to save so little time.  */
3009
3010	  if (node->right->right || node->right->left
3011	      || !tree_int_cst_equal (node->right->low, node->right->high))
3012	    {
3013	      if (!node_has_low_bound (node, index_type))
3014		{
3015		  emit_cmp_and_jump_insns (index,
3016					   convert_modes
3017					   (mode, imode,
3018					    expand_normal (node->high),
3019					    unsignedp),
3020					   LT, NULL_RTX, mode, unsignedp,
3021					   default_label);
3022		}
3023
3024	      emit_case_nodes (index, node->right, default_label, index_type);
3025	    }
3026	  else
3027	    /* We cannot process node->right normally
3028	       since we haven't ruled out the numbers less than
3029	       this node's value.  So handle node->right explicitly.  */
3030	    do_jump_if_equal (mode, index,
3031			      convert_modes
3032			      (mode, imode,
3033			       expand_normal (node->right->low),
3034			       unsignedp),
3035			      label_rtx (node->right->code_label), unsignedp);
3036	}
3037
3038      else if (node->right == 0 && node->left != 0)
3039	{
3040	  /* Just one subtree, on the left.  */
3041	  if (node->left->left || node->left->right
3042	      || !tree_int_cst_equal (node->left->low, node->left->high))
3043	    {
3044	      if (!node_has_high_bound (node, index_type))
3045		{
3046		  emit_cmp_and_jump_insns (index,
3047					   convert_modes
3048					   (mode, imode,
3049					    expand_normal (node->high),
3050					    unsignedp),
3051					   GT, NULL_RTX, mode, unsignedp,
3052					   default_label);
3053		}
3054
3055	      emit_case_nodes (index, node->left, default_label, index_type);
3056	    }
3057	  else
3058	    /* We cannot process node->left normally
3059	       since we haven't ruled out the numbers less than
3060	       this node's value.  So handle node->left explicitly.  */
3061	    do_jump_if_equal (mode, index,
3062			      convert_modes
3063			      (mode, imode,
3064			       expand_normal (node->left->low),
3065			       unsignedp),
3066			      label_rtx (node->left->code_label), unsignedp);
3067	}
3068    }
3069  else
3070    {
3071      /* Node is a range.  These cases are very similar to those for a single
3072	 value, except that we do not start by testing whether this node
3073	 is the one to branch to.  */
3074
3075      if (node->right != 0 && node->left != 0)
3076	{
3077	  /* Node has subtrees on both sides.
3078	     If the right-hand subtree is bounded,
3079	     test for it first, since we can go straight there.
3080	     Otherwise, we need to make a branch in the control structure,
3081	     then handle the two subtrees.  */
3082	  tree test_label = 0;
3083
3084	  if (node_is_bounded (node->right, index_type))
3085	    /* Right hand node is fully bounded so we can eliminate any
3086	       testing and branch directly to the target code.  */
3087	    emit_cmp_and_jump_insns (index,
3088				     convert_modes
3089				     (mode, imode,
3090				      expand_normal (node->high),
3091				      unsignedp),
3092				     GT, NULL_RTX, mode, unsignedp,
3093				     label_rtx (node->right->code_label));
3094	  else
3095	    {
3096	      /* Right hand node requires testing.
3097		 Branch to a label where we will handle it later.  */
3098
3099	      test_label = build_decl (CURR_INSN_LOCATION,
3100				       LABEL_DECL, NULL_TREE, NULL_TREE);
3101	      emit_cmp_and_jump_insns (index,
3102				       convert_modes
3103				       (mode, imode,
3104					expand_normal (node->high),
3105					unsignedp),
3106				       GT, NULL_RTX, mode, unsignedp,
3107				       label_rtx (test_label));
3108	    }
3109
3110	  /* Value belongs to this node or to the left-hand subtree.  */
3111
3112	  emit_cmp_and_jump_insns (index,
3113				   convert_modes
3114				   (mode, imode,
3115				    expand_normal (node->low),
3116				    unsignedp),
3117				   GE, NULL_RTX, mode, unsignedp,
3118				   label_rtx (node->code_label));
3119
3120	  /* Handle the left-hand subtree.  */
3121	  emit_case_nodes (index, node->left, default_label, index_type);
3122
3123	  /* If right node had to be handled later, do that now.  */
3124
3125	  if (test_label)
3126	    {
3127	      /* If the left-hand subtree fell through,
3128		 don't let it fall into the right-hand subtree.  */
3129	      if (default_label)
3130		emit_jump (default_label);
3131
3132	      expand_label (test_label);
3133	      emit_case_nodes (index, node->right, default_label, index_type);
3134	    }
3135	}
3136
3137      else if (node->right != 0 && node->left == 0)
3138	{
3139	  /* Deal with values to the left of this node,
3140	     if they are possible.  */
3141	  if (!node_has_low_bound (node, index_type))
3142	    {
3143	      emit_cmp_and_jump_insns (index,
3144				       convert_modes
3145				       (mode, imode,
3146					expand_normal (node->low),
3147					unsignedp),
3148				       LT, NULL_RTX, mode, unsignedp,
3149				       default_label);
3150	    }
3151
3152	  /* Value belongs to this node or to the right-hand subtree.  */
3153
3154	  emit_cmp_and_jump_insns (index,
3155				   convert_modes
3156				   (mode, imode,
3157				    expand_normal (node->high),
3158				    unsignedp),
3159				   LE, NULL_RTX, mode, unsignedp,
3160				   label_rtx (node->code_label));
3161
3162	  emit_case_nodes (index, node->right, default_label, index_type);
3163	}
3164
3165      else if (node->right == 0 && node->left != 0)
3166	{
3167	  /* Deal with values to the right of this node,
3168	     if they are possible.  */
3169	  if (!node_has_high_bound (node, index_type))
3170	    {
3171	      emit_cmp_and_jump_insns (index,
3172				       convert_modes
3173				       (mode, imode,
3174					expand_normal (node->high),
3175					unsignedp),
3176				       GT, NULL_RTX, mode, unsignedp,
3177				       default_label);
3178	    }
3179
3180	  /* Value belongs to this node or to the left-hand subtree.  */
3181
3182	  emit_cmp_and_jump_insns (index,
3183				   convert_modes
3184				   (mode, imode,
3185				    expand_normal (node->low),
3186				    unsignedp),
3187				   GE, NULL_RTX, mode, unsignedp,
3188				   label_rtx (node->code_label));
3189
3190	  emit_case_nodes (index, node->left, default_label, index_type);
3191	}
3192
3193      else
3194	{
3195	  /* Node has no children so we check low and high bounds to remove
3196	     redundant tests.  Only one of the bounds can exist,
3197	     since otherwise this node is bounded--a case tested already.  */
3198	  int high_bound = node_has_high_bound (node, index_type);
3199	  int low_bound = node_has_low_bound (node, index_type);
3200
3201	  if (!high_bound && low_bound)
3202	    {
3203	      emit_cmp_and_jump_insns (index,
3204				       convert_modes
3205				       (mode, imode,
3206					expand_normal (node->high),
3207					unsignedp),
3208				       GT, NULL_RTX, mode, unsignedp,
3209				       default_label);
3210	    }
3211
3212	  else if (!low_bound && high_bound)
3213	    {
3214	      emit_cmp_and_jump_insns (index,
3215				       convert_modes
3216				       (mode, imode,
3217					expand_normal (node->low),
3218					unsignedp),
3219				       LT, NULL_RTX, mode, unsignedp,
3220				       default_label);
3221	    }
3222	  else if (!low_bound && !high_bound)
3223	    {
3224	      /* Widen LOW and HIGH to the same width as INDEX.  */
3225	      tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
3226	      tree low = build1 (CONVERT_EXPR, type, node->low);
3227	      tree high = build1 (CONVERT_EXPR, type, node->high);
3228	      rtx low_rtx, new_index, new_bound;
3229
3230	      /* Instead of doing two branches, emit one unsigned branch for
3231		 (index-low) > (high-low).  */
3232	      low_rtx = expand_expr (low, NULL_RTX, mode, EXPAND_NORMAL);
3233	      new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
3234					       NULL_RTX, unsignedp,
3235					       OPTAB_WIDEN);
3236	      new_bound = expand_expr (fold_build2 (MINUS_EXPR, type,
3237						    high, low),
3238				       NULL_RTX, mode, EXPAND_NORMAL);
3239
3240	      emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
3241				       mode, 1, default_label);
3242	    }
3243
3244	  emit_jump (label_rtx (node->code_label));
3245	}
3246    }
3247}
3248