1/* Expands front end tree to back end RTL for GCC
2   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3   1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
4   Free Software Foundation, Inc.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING.  If not, write to the Free
20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2102110-1301, USA.  */
22
23/* This file handles the generation of rtl code from tree structure
24   above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
25   The functions whose names start with `expand_' are called by the
26   expander to generate RTL instructions for various kinds of constructs.  */
27
28#include "config.h"
29#include "system.h"
30#include "coretypes.h"
31#include "tm.h"
32
33#include "rtl.h"
34#include "hard-reg-set.h"
35#include "tree.h"
36#include "tm_p.h"
37#include "flags.h"
38#include "except.h"
39#include "function.h"
40#include "insn-config.h"
41#include "expr.h"
42#include "libfuncs.h"
43#include "recog.h"
44#include "machmode.h"
45#include "toplev.h"
46#include "output.h"
47#include "ggc.h"
48#include "langhooks.h"
49#include "predict.h"
50#include "optabs.h"
51#include "target.h"
52#include "regs.h"
53
54/* Functions and data structures for expanding case statements.  */
55
56/* Case label structure, used to hold info on labels within case
57   statements.  We handle "range" labels; for a single-value label
58   as in C, the high and low limits are the same.
59
60   We start with a vector of case nodes sorted in ascending order, and
61   the default label as the last element in the vector.  Before expanding
62   to RTL, we transform this vector into a list linked via the RIGHT
63   fields in the case_node struct.  Nodes with higher case values are
64   later in the list.
65
66   Switch statements can be output in three forms.  A branch table is
67   used if there are more than a few labels and the labels are dense
68   within the range between the smallest and largest case value.  If a
69   branch table is used, no further manipulations are done with the case
70   node chain.
71
72   The alternative to the use of a branch table is to generate a series
73   of compare and jump insns.  When that is done, we use the LEFT, RIGHT,
74   and PARENT fields to hold a binary tree.  Initially the tree is
75   totally unbalanced, with everything on the right.  We balance the tree
76   with nodes on the left having lower case values than the parent
77   and nodes on the right having higher values.  We then output the tree
78   in order.
79
80   For very small, suitable switch statements, we can generate a series
81   of simple bit test and branches instead.  */
82
83struct case_node GTY(())
84{
85  struct case_node	*left;	/* Left son in binary tree */
86  struct case_node	*right;	/* Right son in binary tree; also node chain */
87  struct case_node	*parent; /* Parent of node in binary tree */
88  tree			low;	/* Lowest index value for this label */
89  tree			high;	/* Highest index value for this label */
90  tree			code_label; /* Label to jump to when node matches */
91};
92
93typedef struct case_node case_node;
94typedef struct case_node *case_node_ptr;
95
96/* These are used by estimate_case_costs and balance_case_nodes.  */
97
98/* This must be a signed type, and non-ANSI compilers lack signed char.  */
99static short cost_table_[129];
100static int use_cost_table;
101static int cost_table_initialized;
102
103/* Special care is needed because we allow -1, but TREE_INT_CST_LOW
104   is unsigned.  */
105#define COST_TABLE(I)  cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
106
107static int n_occurrences (int, const char *);
108static bool tree_conflicts_with_clobbers_p (tree, HARD_REG_SET *);
109static void expand_nl_goto_receiver (void);
110static bool check_operand_nalternatives (tree, tree);
111static bool check_unique_operand_names (tree, tree);
112static char *resolve_operand_name_1 (char *, tree, tree);
113static void expand_null_return_1 (void);
114static void expand_value_return (rtx);
115static int estimate_case_costs (case_node_ptr);
116static bool lshift_cheap_p (void);
117static int case_bit_test_cmp (const void *, const void *);
118static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
119static void balance_case_nodes (case_node_ptr *, case_node_ptr);
120static int node_has_low_bound (case_node_ptr, tree);
121static int node_has_high_bound (case_node_ptr, tree);
122static int node_is_bounded (case_node_ptr, tree);
123static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
124static struct case_node *add_case_node (struct case_node *, tree,
125					tree, tree, tree);
126
127
128/* Return the rtx-label that corresponds to a LABEL_DECL,
129   creating it if necessary.  */
130
131rtx
132label_rtx (tree label)
133{
134  gcc_assert (TREE_CODE (label) == LABEL_DECL);
135
136  if (!DECL_RTL_SET_P (label))
137    {
138      rtx r = gen_label_rtx ();
139/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
140      unsigned align = DECL_ALIGN_UNIT (label);
141      int align_log2 = exact_log2 (align);
142
143/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
144      SET_DECL_RTL (label, r);
145      if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
146	LABEL_PRESERVE_P (r) = 1;
147/* APPLE LOCAL begin for-fsf-4_4 3274130 5295549 */ \
148
149      if (align_log2 >= 0 && align_log2 <= 0xFF)
150	SET_LABEL_ALIGN (r, align_log2, align - 1);
151/* APPLE LOCAL end for-fsf-4_4 3274130 5295549 */ \
152    }
153
154  return DECL_RTL (label);
155}
156
157/* As above, but also put it on the forced-reference list of the
158   function that contains it.  */
159rtx
160force_label_rtx (tree label)
161{
162  rtx ref = label_rtx (label);
163  tree function = decl_function_context (label);
164  struct function *p;
165
166  gcc_assert (function);
167
168  if (function != current_function_decl)
169    p = find_function_data (function);
170  else
171    p = cfun;
172
173  p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
174						p->expr->x_forced_labels);
175  return ref;
176}
177
178/* Add an unconditional jump to LABEL as the next sequential instruction.  */
179
180void
181emit_jump (rtx label)
182{
183  do_pending_stack_adjust ();
184  emit_jump_insn (gen_jump (label));
185  emit_barrier ();
186}
187
188/* Emit code to jump to the address
189   specified by the pointer expression EXP.  */
190
191void
192expand_computed_goto (tree exp)
193{
194  rtx x = expand_normal (exp);
195
196  x = convert_memory_address (Pmode, x);
197
198  do_pending_stack_adjust ();
199  emit_indirect_jump (x);
200}
201
202/* Handle goto statements and the labels that they can go to.  */
203
204/* Specify the location in the RTL code of a label LABEL,
205   which is a LABEL_DECL tree node.
206
207   APPLE LOCAL begin for-fsf-4_4 3274130 5295549
208   This is used for those labels created by the front-end that survive
209   through CFG generation, including all user labels.  (Some labels
210   are removed by cleanup_dead_labels in tree-cfg.c.)
211
212   APPLE LOCAL end for-fsf-4_4 3274130 5295549
213   Note that this has nothing to do with defining label *names*.
214   Languages vary in how they do that and what that even means.  */
215
216void
217expand_label (tree label)
218{
219  rtx label_r = label_rtx (label);
220
221  do_pending_stack_adjust ();
222  emit_label (label_r);
223  if (DECL_NAME (label))
224    LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
225
226  if (DECL_NONLOCAL (label))
227    {
228      expand_nl_goto_receiver ();
229      nonlocal_goto_handler_labels
230	= gen_rtx_EXPR_LIST (VOIDmode, label_r,
231			     nonlocal_goto_handler_labels);
232    }
233
234  if (FORCED_LABEL (label))
235    forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
236
237  if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
238    maybe_set_first_label_num (label_r);
239}
240
241/* Generate RTL code for a `goto' statement with target label LABEL.
242   LABEL should be a LABEL_DECL tree node that was or will later be
243   defined with `expand_label'.  */
244
245void
246expand_goto (tree label)
247{
248#ifdef ENABLE_CHECKING
249  /* Check for a nonlocal goto to a containing function.  Should have
250     gotten translated to __builtin_nonlocal_goto.  */
251  tree context = decl_function_context (label);
252  gcc_assert (!context || context == current_function_decl);
253#endif
254
255  emit_jump (label_rtx (label));
256}
257
258/* Return the number of times character C occurs in string S.  */
259static int
260n_occurrences (int c, const char *s)
261{
262  int n = 0;
263  while (*s)
264    n += (*s++ == c);
265  return n;
266}
267
268/* Generate RTL for an asm statement (explicit assembler code).
269   STRING is a STRING_CST node containing the assembler code text,
270   or an ADDR_EXPR containing a STRING_CST.  VOL nonzero means the
271   insn is volatile; don't optimize it.  */
272
273static void
274expand_asm (tree string, int vol)
275{
276  rtx body;
277
278  if (TREE_CODE (string) == ADDR_EXPR)
279    string = TREE_OPERAND (string, 0);
280
281  body = gen_rtx_ASM_INPUT (VOIDmode,
282			    ggc_strdup (TREE_STRING_POINTER (string)));
283
284  MEM_VOLATILE_P (body) = vol;
285
286  emit_insn (body);
287}
288
289/* Parse the output constraint pointed to by *CONSTRAINT_P.  It is the
290   OPERAND_NUMth output operand, indexed from zero.  There are NINPUTS
291   inputs and NOUTPUTS outputs to this extended-asm.  Upon return,
292   *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
293   memory operand.  Similarly, *ALLOWS_REG will be TRUE iff the
294   constraint allows the use of a register operand.  And, *IS_INOUT
295   will be true if the operand is read-write, i.e., if it is used as
296   an input as well as an output.  If *CONSTRAINT_P is not in
297   canonical form, it will be made canonical.  (Note that `+' will be
298   replaced with `=' as part of this process.)
299
300   Returns TRUE if all went well; FALSE if an error occurred.  */
301
302bool
303parse_output_constraint (const char **constraint_p, int operand_num,
304			 int ninputs, int noutputs, bool *allows_mem,
305			 bool *allows_reg, bool *is_inout)
306{
307  const char *constraint = *constraint_p;
308  const char *p;
309
310  /* Assume the constraint doesn't allow the use of either a register
311     or memory.  */
312  *allows_mem = false;
313  *allows_reg = false;
314
315  /* Allow the `=' or `+' to not be at the beginning of the string,
316     since it wasn't explicitly documented that way, and there is a
317     large body of code that puts it last.  Swap the character to
318     the front, so as not to uglify any place else.  */
319  p = strchr (constraint, '=');
320  if (!p)
321    p = strchr (constraint, '+');
322
323  /* If the string doesn't contain an `=', issue an error
324     message.  */
325  if (!p)
326    {
327      error ("output operand constraint lacks %<=%>");
328      return false;
329    }
330
331  /* If the constraint begins with `+', then the operand is both read
332     from and written to.  */
333  *is_inout = (*p == '+');
334
335  /* Canonicalize the output constraint so that it begins with `='.  */
336  if (p != constraint || *is_inout)
337    {
338      char *buf;
339      size_t c_len = strlen (constraint);
340
341      if (p != constraint)
342	warning (0, "output constraint %qc for operand %d "
343		 "is not at the beginning",
344		 *p, operand_num);
345
346      /* Make a copy of the constraint.  */
347      buf = alloca (c_len + 1);
348      strcpy (buf, constraint);
349      /* Swap the first character and the `=' or `+'.  */
350      buf[p - constraint] = buf[0];
351      /* Make sure the first character is an `='.  (Until we do this,
352	 it might be a `+'.)  */
353      buf[0] = '=';
354      /* Replace the constraint with the canonicalized string.  */
355      *constraint_p = ggc_alloc_string (buf, c_len);
356      constraint = *constraint_p;
357    }
358
359  /* Loop through the constraint string.  */
360  for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
361    switch (*p)
362      {
363      case '+':
364      case '=':
365	error ("operand constraint contains incorrectly positioned "
366	       "%<+%> or %<=%>");
367	return false;
368
369      case '%':
370	if (operand_num + 1 == ninputs + noutputs)
371	  {
372	    error ("%<%%%> constraint used with last operand");
373	    return false;
374	  }
375	break;
376
377      case 'V':  case 'm':  case 'o':
378	*allows_mem = true;
379	break;
380
381      case '?':  case '!':  case '*':  case '&':  case '#':
382      case 'E':  case 'F':  case 'G':  case 'H':
383      case 's':  case 'i':  case 'n':
384      case 'I':  case 'J':  case 'K':  case 'L':  case 'M':
385      case 'N':  case 'O':  case 'P':  case ',':
386	break;
387
388      case '0':  case '1':  case '2':  case '3':  case '4':
389      case '5':  case '6':  case '7':  case '8':  case '9':
390      case '[':
391	error ("matching constraint not valid in output operand");
392	return false;
393
394      case '<':  case '>':
395	/* ??? Before flow, auto inc/dec insns are not supposed to exist,
396	   excepting those that expand_call created.  So match memory
397	   and hope.  */
398	*allows_mem = true;
399	break;
400
401      case 'g':  case 'X':
402	*allows_reg = true;
403	*allows_mem = true;
404	break;
405
406      case 'p': case 'r':
407	*allows_reg = true;
408	break;
409
410      default:
411	if (!ISALPHA (*p))
412	  break;
413	if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
414	  *allows_reg = true;
415#ifdef EXTRA_CONSTRAINT_STR
416	else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
417	  *allows_reg = true;
418	else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
419	  *allows_mem = true;
420	else
421	  {
422	    /* Otherwise we can't assume anything about the nature of
423	       the constraint except that it isn't purely registers.
424	       Treat it like "g" and hope for the best.  */
425	    *allows_reg = true;
426	    *allows_mem = true;
427	  }
428#endif
429	break;
430      }
431
432  return true;
433}
434
435/* Similar, but for input constraints.  */
436
437bool
438parse_input_constraint (const char **constraint_p, int input_num,
439			int ninputs, int noutputs, int ninout,
440			const char * const * constraints,
441			bool *allows_mem, bool *allows_reg)
442{
443  const char *constraint = *constraint_p;
444  const char *orig_constraint = constraint;
445  size_t c_len = strlen (constraint);
446  size_t j;
447  bool saw_match = false;
448
449  /* Assume the constraint doesn't allow the use of either
450     a register or memory.  */
451  *allows_mem = false;
452  *allows_reg = false;
453
454  /* Make sure constraint has neither `=', `+', nor '&'.  */
455
456  for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
457    switch (constraint[j])
458      {
459      case '+':  case '=':  case '&':
460	if (constraint == orig_constraint)
461	  {
462	    error ("input operand constraint contains %qc", constraint[j]);
463	    return false;
464	  }
465	break;
466
467      case '%':
468	if (constraint == orig_constraint
469	    && input_num + 1 == ninputs - ninout)
470	  {
471	    error ("%<%%%> constraint used with last operand");
472	    return false;
473	  }
474	break;
475
476      case 'V':  case 'm':  case 'o':
477	*allows_mem = true;
478	break;
479
480      case '<':  case '>':
481      case '?':  case '!':  case '*':  case '#':
482      case 'E':  case 'F':  case 'G':  case 'H':
483      case 's':  case 'i':  case 'n':
484      case 'I':  case 'J':  case 'K':  case 'L':  case 'M':
485      case 'N':  case 'O':  case 'P':  case ',':
486	break;
487
488	/* Whether or not a numeric constraint allows a register is
489	   decided by the matching constraint, and so there is no need
490	   to do anything special with them.  We must handle them in
491	   the default case, so that we don't unnecessarily force
492	   operands to memory.  */
493      case '0':  case '1':  case '2':  case '3':  case '4':
494      case '5':  case '6':  case '7':  case '8':  case '9':
495	{
496	  char *end;
497	  unsigned long match;
498
499	  saw_match = true;
500
501	  match = strtoul (constraint + j, &end, 10);
502	  if (match >= (unsigned long) noutputs)
503	    {
504	      error ("matching constraint references invalid operand number");
505	      return false;
506	    }
507
508	  /* Try and find the real constraint for this dup.  Only do this
509	     if the matching constraint is the only alternative.  */
510	  if (*end == '\0'
511	      && (j == 0 || (j == 1 && constraint[0] == '%')))
512	    {
513	      constraint = constraints[match];
514	      *constraint_p = constraint;
515	      c_len = strlen (constraint);
516	      j = 0;
517	      /* ??? At the end of the loop, we will skip the first part of
518		 the matched constraint.  This assumes not only that the
519		 other constraint is an output constraint, but also that
520		 the '=' or '+' come first.  */
521	      break;
522	    }
523	  else
524	    j = end - constraint;
525	  /* Anticipate increment at end of loop.  */
526	  j--;
527	}
528	/* Fall through.  */
529
530      case 'p':  case 'r':
531	*allows_reg = true;
532	break;
533
534      case 'g':  case 'X':
535	*allows_reg = true;
536	*allows_mem = true;
537	break;
538
539      default:
540	if (! ISALPHA (constraint[j]))
541	  {
542	    error ("invalid punctuation %qc in constraint", constraint[j]);
543	    return false;
544	  }
545	if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
546	    != NO_REGS)
547	  *allows_reg = true;
548#ifdef EXTRA_CONSTRAINT_STR
549	else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
550	  *allows_reg = true;
551	else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
552	  *allows_mem = true;
553	else
554	  {
555	    /* Otherwise we can't assume anything about the nature of
556	       the constraint except that it isn't purely registers.
557	       Treat it like "g" and hope for the best.  */
558	    *allows_reg = true;
559	    *allows_mem = true;
560	  }
561#endif
562	break;
563      }
564
565  if (saw_match && !*allows_reg)
566    warning (0, "matching constraint does not allow a register");
567
568  return true;
569}
570
571/* Return DECL iff there's an overlap between *REGS and DECL, where DECL
572   can be an asm-declared register.  Called via walk_tree.  */
573
574static tree
575decl_overlaps_hard_reg_set_p (tree *declp, int *walk_subtrees ATTRIBUTE_UNUSED,
576			      void *data)
577{
578  tree decl = *declp;
579  const HARD_REG_SET *regs = data;
580
581  if (TREE_CODE (decl) == VAR_DECL)
582    {
583      if (DECL_HARD_REGISTER (decl)
584	  && REG_P (DECL_RTL (decl))
585	  && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
586	{
587	  rtx reg = DECL_RTL (decl);
588	  unsigned int regno;
589
590	  for (regno = REGNO (reg);
591	       regno < (REGNO (reg)
592			+ hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]);
593	       regno++)
594	    if (TEST_HARD_REG_BIT (*regs, regno))
595	      return decl;
596	}
597      walk_subtrees = 0;
598    }
599  else if (TYPE_P (decl) || TREE_CODE (decl) == PARM_DECL)
600    walk_subtrees = 0;
601  return NULL_TREE;
602}
603
604/* If there is an overlap between *REGS and DECL, return the first overlap
605   found.  */
606tree
607tree_overlaps_hard_reg_set (tree decl, HARD_REG_SET *regs)
608{
609  return walk_tree (&decl, decl_overlaps_hard_reg_set_p, regs, NULL);
610}
611
612/* Check for overlap between registers marked in CLOBBERED_REGS and
613   anything inappropriate in T.  Emit error and return the register
614   variable definition for error, NULL_TREE for ok.  */
615
616static bool
617tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
618{
619  /* Conflicts between asm-declared register variables and the clobber
620     list are not allowed.  */
621  tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
622
623  if (overlap)
624    {
625      error ("asm-specifier for variable %qs conflicts with asm clobber list",
626	     IDENTIFIER_POINTER (DECL_NAME (overlap)));
627
628      /* Reset registerness to stop multiple errors emitted for a single
629	 variable.  */
630      DECL_REGISTER (overlap) = 0;
631      return true;
632    }
633
634  return false;
635}
636
637/* Generate RTL for an asm statement with arguments.
638   STRING is the instruction template.
639   OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
640   Each output or input has an expression in the TREE_VALUE and
641   and a tree list in TREE_PURPOSE which in turn contains a constraint
642   name in TREE_VALUE (or NULL_TREE) and a constraint string
643   in TREE_PURPOSE.
644   CLOBBERS is a list of STRING_CST nodes each naming a hard register
645   that is clobbered by this insn.
646
647   Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
648   Some elements of OUTPUTS may be replaced with trees representing temporary
649   values.  The caller should copy those temporary values to the originally
650   specified lvalues.
651
652   VOL nonzero means the insn is volatile; don't optimize it.  */
653
654static void
655expand_asm_operands (tree string, tree outputs, tree inputs,
656		     tree clobbers, int vol, location_t locus)
657{
658  rtvec argvec, constraintvec;
659  rtx body;
660  int ninputs = list_length (inputs);
661  int noutputs = list_length (outputs);
662  int ninout;
663  int nclobbers;
664  HARD_REG_SET clobbered_regs;
665  int clobber_conflict_found = 0;
666  tree tail;
667  tree t;
668  int i;
669  /* Vector of RTX's of evaluated output operands.  */
670  rtx *output_rtx = alloca (noutputs * sizeof (rtx));
671  int *inout_opnum = alloca (noutputs * sizeof (int));
672  rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
673  enum machine_mode *inout_mode
674    = alloca (noutputs * sizeof (enum machine_mode));
675  const char **constraints
676    = alloca ((noutputs + ninputs) * sizeof (const char *));
677  int old_generating_concat_p = generating_concat_p;
678
679  /* An ASM with no outputs needs to be treated as volatile, for now.  */
680  if (noutputs == 0)
681    vol = 1;
682
683  if (! check_operand_nalternatives (outputs, inputs))
684    return;
685
686  string = resolve_asm_operand_names (string, outputs, inputs);
687
688  /* Collect constraints.  */
689  i = 0;
690  for (t = outputs; t ; t = TREE_CHAIN (t), i++)
691    constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
692  for (t = inputs; t ; t = TREE_CHAIN (t), i++)
693    constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
694
695  /* Sometimes we wish to automatically clobber registers across an asm.
696     Case in point is when the i386 backend moved from cc0 to a hard reg --
697     maintaining source-level compatibility means automatically clobbering
698     the flags register.  */
699  clobbers = targetm.md_asm_clobbers (outputs, inputs, clobbers);
700
701  /* Count the number of meaningful clobbered registers, ignoring what
702     we would ignore later.  */
703  nclobbers = 0;
704  CLEAR_HARD_REG_SET (clobbered_regs);
705  for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
706    {
707      const char *regname;
708
709      if (TREE_VALUE (tail) == error_mark_node)
710	return;
711      regname = TREE_STRING_POINTER (TREE_VALUE (tail));
712
713      i = decode_reg_name (regname);
714      if (i >= 0 || i == -4)
715	++nclobbers;
716      else if (i == -2)
717	error ("unknown register name %qs in %<asm%>", regname);
718
719      /* Mark clobbered registers.  */
720      if (i >= 0)
721        {
722	  /* Clobbering the PIC register is an error.  */
723	  if (i == (int) PIC_OFFSET_TABLE_REGNUM)
724	    {
725	      error ("PIC register %qs clobbered in %<asm%>", regname);
726	      return;
727	    }
728
729	  SET_HARD_REG_BIT (clobbered_regs, i);
730	}
731    }
732
733  /* First pass over inputs and outputs checks validity and sets
734     mark_addressable if needed.  */
735
736  ninout = 0;
737  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
738    {
739      tree val = TREE_VALUE (tail);
740      tree type = TREE_TYPE (val);
741      const char *constraint;
742      bool is_inout;
743      bool allows_reg;
744      bool allows_mem;
745
746      /* If there's an erroneous arg, emit no insn.  */
747      if (type == error_mark_node)
748	return;
749
750      /* Try to parse the output constraint.  If that fails, there's
751	 no point in going further.  */
752      constraint = constraints[i];
753      if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
754				    &allows_mem, &allows_reg, &is_inout))
755	return;
756
757      if (! allows_reg
758	  && (allows_mem
759	      || is_inout
760	      || (DECL_P (val)
761		  && REG_P (DECL_RTL (val))
762		  && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
763	lang_hooks.mark_addressable (val);
764
765      if (is_inout)
766	ninout++;
767    }
768
769  ninputs += ninout;
770  if (ninputs + noutputs > MAX_RECOG_OPERANDS)
771    {
772      error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
773      return;
774    }
775
776  for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
777    {
778      bool allows_reg, allows_mem;
779      const char *constraint;
780
781      /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
782	 would get VOIDmode and that could cause a crash in reload.  */
783      if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
784	return;
785
786      constraint = constraints[i + noutputs];
787      if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
788				    constraints, &allows_mem, &allows_reg))
789	return;
790
791      if (! allows_reg && allows_mem)
792	lang_hooks.mark_addressable (TREE_VALUE (tail));
793    }
794
795  /* Second pass evaluates arguments.  */
796
797  ninout = 0;
798  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
799    {
800      tree val = TREE_VALUE (tail);
801      tree type = TREE_TYPE (val);
802      bool is_inout;
803      bool allows_reg;
804      bool allows_mem;
805      rtx op;
806      bool ok;
807
808      ok = parse_output_constraint (&constraints[i], i, ninputs,
809				    noutputs, &allows_mem, &allows_reg,
810				    &is_inout);
811      gcc_assert (ok);
812
813      /* If an output operand is not a decl or indirect ref and our constraint
814	 allows a register, make a temporary to act as an intermediate.
815	 Make the asm insn write into that, then our caller will copy it to
816	 the real output operand.  Likewise for promoted variables.  */
817
818      generating_concat_p = 0;
819
820      real_output_rtx[i] = NULL_RTX;
821      if ((TREE_CODE (val) == INDIRECT_REF
822	   && allows_mem)
823	  || (DECL_P (val)
824	      && (allows_mem || REG_P (DECL_RTL (val)))
825	      && ! (REG_P (DECL_RTL (val))
826		    && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
827	  || ! allows_reg
828	  || is_inout)
829	{
830	  op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
831	  if (MEM_P (op))
832	    op = validize_mem (op);
833
834	  if (! allows_reg && !MEM_P (op))
835	    error ("output number %d not directly addressable", i);
836	  if ((! allows_mem && MEM_P (op))
837	      || GET_CODE (op) == CONCAT)
838	    {
839	      real_output_rtx[i] = op;
840	      op = gen_reg_rtx (GET_MODE (op));
841	      if (is_inout)
842		emit_move_insn (op, real_output_rtx[i]);
843	    }
844	}
845      else
846	{
847	  op = assign_temp (type, 0, 0, 1);
848	  op = validize_mem (op);
849	  TREE_VALUE (tail) = make_tree (type, op);
850	}
851      output_rtx[i] = op;
852
853      generating_concat_p = old_generating_concat_p;
854
855      if (is_inout)
856	{
857	  inout_mode[ninout] = TYPE_MODE (type);
858	  inout_opnum[ninout++] = i;
859	}
860
861      if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
862	clobber_conflict_found = 1;
863    }
864
865  /* Make vectors for the expression-rtx, constraint strings,
866     and named operands.  */
867
868  argvec = rtvec_alloc (ninputs);
869  constraintvec = rtvec_alloc (ninputs);
870
871  body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
872				: GET_MODE (output_rtx[0])),
873			       ggc_strdup (TREE_STRING_POINTER (string)),
874			       empty_string, 0, argvec, constraintvec,
875			       locus);
876
877  MEM_VOLATILE_P (body) = vol;
878
879  /* Eval the inputs and put them into ARGVEC.
880     Put their constraints into ASM_INPUTs and store in CONSTRAINTS.  */
881
882  for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
883    {
884      bool allows_reg, allows_mem;
885      const char *constraint;
886      tree val, type;
887      rtx op;
888      bool ok;
889
890      constraint = constraints[i + noutputs];
891      ok = parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
892				   constraints, &allows_mem, &allows_reg);
893      gcc_assert (ok);
894
895      generating_concat_p = 0;
896
897      val = TREE_VALUE (tail);
898      type = TREE_TYPE (val);
899      /* EXPAND_INITIALIZER will not generate code for valid initializer
900	 constants, but will still generate code for other types of operand.
901	 This is the behavior we want for constant constraints.  */
902      op = expand_expr (val, NULL_RTX, VOIDmode,
903			allows_reg ? EXPAND_NORMAL
904			: allows_mem ? EXPAND_MEMORY
905			: EXPAND_INITIALIZER);
906
907      /* Never pass a CONCAT to an ASM.  */
908      if (GET_CODE (op) == CONCAT)
909	op = force_reg (GET_MODE (op), op);
910      else if (MEM_P (op))
911	op = validize_mem (op);
912
913      if (asm_operand_ok (op, constraint) <= 0)
914	{
915	  if (allows_reg && TYPE_MODE (type) != BLKmode)
916	    op = force_reg (TYPE_MODE (type), op);
917	  else if (!allows_mem)
918	    warning (0, "asm operand %d probably doesn%'t match constraints",
919		     i + noutputs);
920	  else if (MEM_P (op))
921	    {
922	      /* We won't recognize either volatile memory or memory
923		 with a queued address as available a memory_operand
924		 at this point.  Ignore it: clearly this *is* a memory.  */
925	    }
926	  else
927	    {
928	      warning (0, "use of memory input without lvalue in "
929		       "asm operand %d is deprecated", i + noutputs);
930
931	      if (CONSTANT_P (op))
932		{
933		  rtx mem = force_const_mem (TYPE_MODE (type), op);
934		  if (mem)
935		    op = validize_mem (mem);
936		  else
937		    op = force_reg (TYPE_MODE (type), op);
938		}
939	      if (REG_P (op)
940		  || GET_CODE (op) == SUBREG
941		  || GET_CODE (op) == CONCAT)
942		{
943		  tree qual_type = build_qualified_type (type,
944							 (TYPE_QUALS (type)
945							  | TYPE_QUAL_CONST));
946		  rtx memloc = assign_temp (qual_type, 1, 1, 1);
947		  memloc = validize_mem (memloc);
948		  emit_move_insn (memloc, op);
949		  op = memloc;
950		}
951	    }
952	}
953
954      generating_concat_p = old_generating_concat_p;
955      ASM_OPERANDS_INPUT (body, i) = op;
956
957      ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
958	= gen_rtx_ASM_INPUT (TYPE_MODE (type),
959			     ggc_strdup (constraints[i + noutputs]));
960
961      if (tree_conflicts_with_clobbers_p (val, &clobbered_regs))
962	clobber_conflict_found = 1;
963    }
964
965  /* Protect all the operands from the queue now that they have all been
966     evaluated.  */
967
968  generating_concat_p = 0;
969
970  /* For in-out operands, copy output rtx to input rtx.  */
971  for (i = 0; i < ninout; i++)
972    {
973      int j = inout_opnum[i];
974      char buffer[16];
975
976      ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
977	= output_rtx[j];
978
979      sprintf (buffer, "%d", j);
980      ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
981	= gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
982    }
983
984  generating_concat_p = old_generating_concat_p;
985
986  /* Now, for each output, construct an rtx
987     (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
988			       ARGVEC CONSTRAINTS OPNAMES))
989     If there is more than one, put them inside a PARALLEL.  */
990
991  if (noutputs == 1 && nclobbers == 0)
992    {
993      ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = ggc_strdup (constraints[0]);
994      emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
995    }
996
997  else if (noutputs == 0 && nclobbers == 0)
998    {
999      /* No output operands: put in a raw ASM_OPERANDS rtx.  */
1000      emit_insn (body);
1001    }
1002
1003  else
1004    {
1005      rtx obody = body;
1006      int num = noutputs;
1007
1008      if (num == 0)
1009	num = 1;
1010
1011      body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1012
1013      /* For each output operand, store a SET.  */
1014      for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1015	{
1016	  XVECEXP (body, 0, i)
1017	    = gen_rtx_SET (VOIDmode,
1018			   output_rtx[i],
1019			   gen_rtx_ASM_OPERANDS
1020			   (GET_MODE (output_rtx[i]),
1021			    ggc_strdup (TREE_STRING_POINTER (string)),
1022			    ggc_strdup (constraints[i]),
1023			    i, argvec, constraintvec, locus));
1024
1025	  MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1026	}
1027
1028      /* If there are no outputs (but there are some clobbers)
1029	 store the bare ASM_OPERANDS into the PARALLEL.  */
1030
1031      if (i == 0)
1032	XVECEXP (body, 0, i++) = obody;
1033
1034      /* Store (clobber REG) for each clobbered register specified.  */
1035
1036      for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1037	{
1038	  const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1039	  int j = decode_reg_name (regname);
1040	  rtx clobbered_reg;
1041
1042	  if (j < 0)
1043	    {
1044	      if (j == -3)	/* `cc', which is not a register */
1045		continue;
1046
1047	      if (j == -4)	/* `memory', don't cache memory across asm */
1048		{
1049		  XVECEXP (body, 0, i++)
1050		    = gen_rtx_CLOBBER (VOIDmode,
1051				       gen_rtx_MEM
1052				       (BLKmode,
1053					gen_rtx_SCRATCH (VOIDmode)));
1054		  continue;
1055		}
1056
1057	      /* Ignore unknown register, error already signaled.  */
1058	      continue;
1059	    }
1060
1061	  /* Use QImode since that's guaranteed to clobber just one reg.  */
1062	  clobbered_reg = gen_rtx_REG (QImode, j);
1063
1064	  /* Do sanity check for overlap between clobbers and respectively
1065	     input and outputs that hasn't been handled.  Such overlap
1066	     should have been detected and reported above.  */
1067	  if (!clobber_conflict_found)
1068	    {
1069	      int opno;
1070
1071	      /* We test the old body (obody) contents to avoid tripping
1072		 over the under-construction body.  */
1073	      for (opno = 0; opno < noutputs; opno++)
1074		if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1075		  internal_error ("asm clobber conflict with output operand");
1076
1077	      for (opno = 0; opno < ninputs - ninout; opno++)
1078		if (reg_overlap_mentioned_p (clobbered_reg,
1079					     ASM_OPERANDS_INPUT (obody, opno)))
1080		  internal_error ("asm clobber conflict with input operand");
1081	    }
1082
1083	  XVECEXP (body, 0, i++)
1084	    = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1085	}
1086
1087      emit_insn (body);
1088    }
1089
1090  /* For any outputs that needed reloading into registers, spill them
1091     back to where they belong.  */
1092  for (i = 0; i < noutputs; ++i)
1093    if (real_output_rtx[i])
1094      emit_move_insn (real_output_rtx[i], output_rtx[i]);
1095
1096  free_temp_slots ();
1097}
1098
1099void
1100expand_asm_expr (tree exp)
1101{
1102  int noutputs, i;
1103  tree outputs, tail;
1104  tree *o;
1105
1106  if (ASM_INPUT_P (exp))
1107    {
1108      expand_asm (ASM_STRING (exp), ASM_VOLATILE_P (exp));
1109      return;
1110    }
1111
1112  outputs = ASM_OUTPUTS (exp);
1113  noutputs = list_length (outputs);
1114  /* o[I] is the place that output number I should be written.  */
1115  o = (tree *) alloca (noutputs * sizeof (tree));
1116
1117  /* Record the contents of OUTPUTS before it is modified.  */
1118  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1119    o[i] = TREE_VALUE (tail);
1120
1121  /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1122     OUTPUTS some trees for where the values were actually stored.  */
1123  expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp),
1124		       ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp),
1125		       input_location);
1126
1127  /* Copy all the intermediate outputs into the specified outputs.  */
1128  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1129    {
1130      if (o[i] != TREE_VALUE (tail))
1131	{
1132	  expand_assignment (o[i], TREE_VALUE (tail));
1133	  free_temp_slots ();
1134
1135	  /* Restore the original value so that it's correct the next
1136	     time we expand this function.  */
1137	  TREE_VALUE (tail) = o[i];
1138	}
1139    }
1140}
1141
1142/* A subroutine of expand_asm_operands.  Check that all operands have
1143   the same number of alternatives.  Return true if so.  */
1144
1145static bool
1146check_operand_nalternatives (tree outputs, tree inputs)
1147{
1148  if (outputs || inputs)
1149    {
1150      tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1151      int nalternatives
1152	= n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1153      tree next = inputs;
1154
1155      if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1156	{
1157	  error ("too many alternatives in %<asm%>");
1158	  return false;
1159	}
1160
1161      tmp = outputs;
1162      while (tmp)
1163	{
1164	  const char *constraint
1165	    = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1166
1167	  if (n_occurrences (',', constraint) != nalternatives)
1168	    {
1169	      error ("operand constraints for %<asm%> differ "
1170		     "in number of alternatives");
1171	      return false;
1172	    }
1173
1174	  if (TREE_CHAIN (tmp))
1175	    tmp = TREE_CHAIN (tmp);
1176	  else
1177	    tmp = next, next = 0;
1178	}
1179    }
1180
1181  return true;
1182}
1183
1184/* A subroutine of expand_asm_operands.  Check that all operand names
1185   are unique.  Return true if so.  We rely on the fact that these names
1186   are identifiers, and so have been canonicalized by get_identifier,
1187   so all we need are pointer comparisons.  */
1188
1189static bool
1190check_unique_operand_names (tree outputs, tree inputs)
1191{
1192  tree i, j;
1193
1194  for (i = outputs; i ; i = TREE_CHAIN (i))
1195    {
1196      tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1197      if (! i_name)
1198	continue;
1199
1200      for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1201	if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1202	  goto failure;
1203    }
1204
1205  for (i = inputs; i ; i = TREE_CHAIN (i))
1206    {
1207      tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1208      if (! i_name)
1209	continue;
1210
1211      for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1212	if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1213	  goto failure;
1214      for (j = outputs; j ; j = TREE_CHAIN (j))
1215	if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1216	  goto failure;
1217    }
1218
1219  return true;
1220
1221 failure:
1222  error ("duplicate asm operand name %qs",
1223	 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1224  return false;
1225}
1226
1227/* A subroutine of expand_asm_operands.  Resolve the names of the operands
1228   in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1229   STRING and in the constraints to those numbers.  */
1230
1231tree
1232resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1233{
1234  char *buffer;
1235  char *p;
1236  const char *c;
1237  tree t;
1238
1239  check_unique_operand_names (outputs, inputs);
1240
1241  /* Substitute [<name>] in input constraint strings.  There should be no
1242     named operands in output constraints.  */
1243  for (t = inputs; t ; t = TREE_CHAIN (t))
1244    {
1245      c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1246      if (strchr (c, '[') != NULL)
1247	{
1248	  p = buffer = xstrdup (c);
1249	  while ((p = strchr (p, '[')) != NULL)
1250	    p = resolve_operand_name_1 (p, outputs, inputs);
1251	  TREE_VALUE (TREE_PURPOSE (t))
1252	    = build_string (strlen (buffer), buffer);
1253	  free (buffer);
1254	}
1255    }
1256
1257  /* Now check for any needed substitutions in the template.  */
1258  c = TREE_STRING_POINTER (string);
1259  while ((c = strchr (c, '%')) != NULL)
1260    {
1261      if (c[1] == '[')
1262	break;
1263      else if (ISALPHA (c[1]) && c[2] == '[')
1264	break;
1265      else
1266	{
1267	  c += 1;
1268	  continue;
1269	}
1270    }
1271
1272  if (c)
1273    {
1274      /* OK, we need to make a copy so we can perform the substitutions.
1275	 Assume that we will not need extra space--we get to remove '['
1276	 and ']', which means we cannot have a problem until we have more
1277	 than 999 operands.  */
1278      buffer = xstrdup (TREE_STRING_POINTER (string));
1279      p = buffer + (c - TREE_STRING_POINTER (string));
1280
1281      while ((p = strchr (p, '%')) != NULL)
1282	{
1283	  if (p[1] == '[')
1284	    p += 1;
1285	  else if (ISALPHA (p[1]) && p[2] == '[')
1286	    p += 2;
1287	  else
1288	    {
1289	      p += 1;
1290	      continue;
1291	    }
1292
1293	  p = resolve_operand_name_1 (p, outputs, inputs);
1294	}
1295
1296      string = build_string (strlen (buffer), buffer);
1297      free (buffer);
1298    }
1299
1300  return string;
1301}
1302
1303/* A subroutine of resolve_operand_names.  P points to the '[' for a
1304   potential named operand of the form [<name>].  In place, replace
1305   the name and brackets with a number.  Return a pointer to the
1306   balance of the string after substitution.  */
1307
1308static char *
1309resolve_operand_name_1 (char *p, tree outputs, tree inputs)
1310{
1311  char *q;
1312  int op;
1313  tree t;
1314  size_t len;
1315
1316  /* Collect the operand name.  */
1317  q = strchr (p, ']');
1318  if (!q)
1319    {
1320      error ("missing close brace for named operand");
1321      return strchr (p, '\0');
1322    }
1323  len = q - p - 1;
1324
1325  /* Resolve the name to a number.  */
1326  for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1327    {
1328      tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1329      if (name)
1330	{
1331	  const char *c = TREE_STRING_POINTER (name);
1332	  if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1333	    goto found;
1334	}
1335    }
1336  for (t = inputs; t ; t = TREE_CHAIN (t), op++)
1337    {
1338      tree name = TREE_PURPOSE (TREE_PURPOSE (t));
1339      if (name)
1340	{
1341	  const char *c = TREE_STRING_POINTER (name);
1342	  if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
1343	    goto found;
1344	}
1345    }
1346
1347  *q = '\0';
1348  error ("undefined named operand %qs", p + 1);
1349  op = 0;
1350 found:
1351
1352  /* Replace the name with the number.  Unfortunately, not all libraries
1353     get the return value of sprintf correct, so search for the end of the
1354     generated string by hand.  */
1355  sprintf (p, "%d", op);
1356  p = strchr (p, '\0');
1357
1358  /* Verify the no extra buffer space assumption.  */
1359  gcc_assert (p <= q);
1360
1361  /* Shift the rest of the buffer down to fill the gap.  */
1362  memmove (p, q + 1, strlen (q + 1) + 1);
1363
1364  return p;
1365}
1366
1367/* Generate RTL to evaluate the expression EXP.  */
1368
1369void
1370expand_expr_stmt (tree exp)
1371{
1372  rtx value;
1373  tree type;
1374
1375  value = expand_expr (exp, const0_rtx, VOIDmode, 0);
1376  type = TREE_TYPE (exp);
1377
1378  /* If all we do is reference a volatile value in memory,
1379     copy it to a register to be sure it is actually touched.  */
1380  if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
1381    {
1382      if (TYPE_MODE (type) == VOIDmode)
1383	;
1384      else if (TYPE_MODE (type) != BLKmode)
1385	value = copy_to_reg (value);
1386      else
1387	{
1388	  rtx lab = gen_label_rtx ();
1389
1390	  /* Compare the value with itself to reference it.  */
1391	  emit_cmp_and_jump_insns (value, value, EQ,
1392				   expand_normal (TYPE_SIZE (type)),
1393				   BLKmode, 0, lab);
1394	  emit_label (lab);
1395	}
1396    }
1397
1398  /* Free any temporaries used to evaluate this expression.  */
1399  free_temp_slots ();
1400}
1401
1402/* Warn if EXP contains any computations whose results are not used.
1403   Return 1 if a warning is printed; 0 otherwise.  LOCUS is the
1404   (potential) location of the expression.  */
1405
1406int
1407warn_if_unused_value (tree exp, location_t locus)
1408{
1409 restart:
1410  if (TREE_USED (exp) || TREE_NO_WARNING (exp))
1411    return 0;
1412
1413  /* Don't warn about void constructs.  This includes casting to void,
1414     void function calls, and statement expressions with a final cast
1415     to void.  */
1416  if (VOID_TYPE_P (TREE_TYPE (exp)))
1417    return 0;
1418
1419  if (EXPR_HAS_LOCATION (exp))
1420    locus = EXPR_LOCATION (exp);
1421
1422  switch (TREE_CODE (exp))
1423    {
1424    case PREINCREMENT_EXPR:
1425    case POSTINCREMENT_EXPR:
1426    case PREDECREMENT_EXPR:
1427    case POSTDECREMENT_EXPR:
1428    case MODIFY_EXPR:
1429    case INIT_EXPR:
1430    case TARGET_EXPR:
1431    case CALL_EXPR:
1432    case TRY_CATCH_EXPR:
1433    case WITH_CLEANUP_EXPR:
1434    case EXIT_EXPR:
1435    case VA_ARG_EXPR:
1436      return 0;
1437
1438    case BIND_EXPR:
1439      /* For a binding, warn if no side effect within it.  */
1440      exp = BIND_EXPR_BODY (exp);
1441      goto restart;
1442
1443    case SAVE_EXPR:
1444      exp = TREE_OPERAND (exp, 0);
1445      goto restart;
1446
1447    case TRUTH_ORIF_EXPR:
1448    case TRUTH_ANDIF_EXPR:
1449      /* In && or ||, warn if 2nd operand has no side effect.  */
1450      exp = TREE_OPERAND (exp, 1);
1451      goto restart;
1452
1453    case COMPOUND_EXPR:
1454      if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
1455	return 1;
1456      /* Let people do `(foo (), 0)' without a warning.  */
1457      if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1458	return 0;
1459      exp = TREE_OPERAND (exp, 1);
1460      goto restart;
1461
1462    case COND_EXPR:
1463      /* If this is an expression with side effects, don't warn; this
1464	 case commonly appears in macro expansions.  */
1465      if (TREE_SIDE_EFFECTS (exp))
1466	return 0;
1467      goto warn;
1468
1469    case INDIRECT_REF:
1470      /* Don't warn about automatic dereferencing of references, since
1471	 the user cannot control it.  */
1472      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1473	{
1474	  exp = TREE_OPERAND (exp, 0);
1475	  goto restart;
1476	}
1477      /* Fall through.  */
1478
1479    default:
1480      /* Referencing a volatile value is a side effect, so don't warn.  */
1481      if ((DECL_P (exp) || REFERENCE_CLASS_P (exp))
1482	  && TREE_THIS_VOLATILE (exp))
1483	return 0;
1484
1485      /* If this is an expression which has no operands, there is no value
1486	 to be unused.  There are no such language-independent codes,
1487	 but front ends may define such.  */
1488      if (EXPRESSION_CLASS_P (exp) && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
1489	return 0;
1490
1491    warn:
1492      warning (0, "%Hvalue computed is not used", &locus);
1493      return 1;
1494    }
1495}
1496
1497
1498/* Generate RTL to return from the current function, with no value.
1499   (That is, we do not do anything about returning any value.)  */
1500
1501void
1502expand_null_return (void)
1503{
1504  /* If this function was declared to return a value, but we
1505     didn't, clobber the return registers so that they are not
1506     propagated live to the rest of the function.  */
1507  clobber_return_register ();
1508
1509  expand_null_return_1 ();
1510}
1511
1512/* Generate RTL to return directly from the current function.
1513   (That is, we bypass any return value.)  */
1514
1515void
1516expand_naked_return (void)
1517{
1518  rtx end_label;
1519
1520  clear_pending_stack_adjust ();
1521  do_pending_stack_adjust ();
1522
1523  end_label = naked_return_label;
1524  if (end_label == 0)
1525    end_label = naked_return_label = gen_label_rtx ();
1526
1527  emit_jump (end_label);
1528}
1529
1530/* Generate RTL to return from the current function, with value VAL.  */
1531
1532static void
1533expand_value_return (rtx val)
1534{
1535  /* Copy the value to the return location
1536     unless it's already there.  */
1537
1538  rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
1539  if (return_reg != val)
1540    {
1541      tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
1542      if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
1543      {
1544	int unsignedp = TYPE_UNSIGNED (type);
1545	enum machine_mode old_mode
1546	  = DECL_MODE (DECL_RESULT (current_function_decl));
1547	enum machine_mode mode
1548	  = promote_mode (type, old_mode, &unsignedp, 1);
1549
1550	if (mode != old_mode)
1551	  val = convert_modes (mode, old_mode, val, unsignedp);
1552      }
1553      if (GET_CODE (return_reg) == PARALLEL)
1554	emit_group_load (return_reg, val, type, int_size_in_bytes (type));
1555      else
1556	emit_move_insn (return_reg, val);
1557    }
1558
1559  expand_null_return_1 ();
1560}
1561
1562/* Output a return with no value.  */
1563
1564static void
1565expand_null_return_1 (void)
1566{
1567  clear_pending_stack_adjust ();
1568  do_pending_stack_adjust ();
1569  emit_jump (return_label);
1570}
1571
1572/* Generate RTL to evaluate the expression RETVAL and return it
1573   from the current function.  */
1574
1575void
1576expand_return (tree retval)
1577{
1578  rtx result_rtl;
1579  rtx val = 0;
1580  tree retval_rhs;
1581
1582  /* If function wants no value, give it none.  */
1583  if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
1584    {
1585      expand_normal (retval);
1586      expand_null_return ();
1587      return;
1588    }
1589
1590  if (retval == error_mark_node)
1591    {
1592      /* Treat this like a return of no value from a function that
1593	 returns a value.  */
1594      expand_null_return ();
1595      return;
1596    }
1597  else if ((TREE_CODE (retval) == MODIFY_EXPR
1598	    || TREE_CODE (retval) == INIT_EXPR)
1599	   && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
1600    retval_rhs = TREE_OPERAND (retval, 1);
1601  else
1602    retval_rhs = retval;
1603
1604  result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
1605
1606  /* If we are returning the RESULT_DECL, then the value has already
1607     been stored into it, so we don't have to do anything special.  */
1608  if (TREE_CODE (retval_rhs) == RESULT_DECL)
1609    expand_value_return (result_rtl);
1610
1611  /* If the result is an aggregate that is being returned in one (or more)
1612     registers, load the registers here.  The compiler currently can't handle
1613     copying a BLKmode value into registers.  We could put this code in a
1614     more general area (for use by everyone instead of just function
1615     call/return), but until this feature is generally usable it is kept here
1616     (and in expand_call).  */
1617
1618  else if (retval_rhs != 0
1619	   && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
1620	   && REG_P (result_rtl))
1621    {
1622      int i;
1623      unsigned HOST_WIDE_INT bitpos, xbitpos;
1624      unsigned HOST_WIDE_INT padding_correction = 0;
1625      unsigned HOST_WIDE_INT bytes
1626	= int_size_in_bytes (TREE_TYPE (retval_rhs));
1627      int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1628      unsigned int bitsize
1629	= MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
1630      rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
1631      rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
1632      rtx result_val = expand_normal (retval_rhs);
1633      enum machine_mode tmpmode, result_reg_mode;
1634
1635      if (bytes == 0)
1636	{
1637	  expand_null_return ();
1638	  return;
1639	}
1640
1641      /* If the structure doesn't take up a whole number of words, see
1642	 whether the register value should be padded on the left or on
1643	 the right.  Set PADDING_CORRECTION to the number of padding
1644	 bits needed on the left side.
1645
1646	 In most ABIs, the structure will be returned at the least end of
1647	 the register, which translates to right padding on little-endian
1648	 targets and left padding on big-endian targets.  The opposite
1649	 holds if the structure is returned at the most significant
1650	 end of the register.  */
1651      if (bytes % UNITS_PER_WORD != 0
1652	  && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
1653	      ? !BYTES_BIG_ENDIAN
1654	      : BYTES_BIG_ENDIAN))
1655	padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
1656					       * BITS_PER_UNIT));
1657
1658      /* Copy the structure BITSIZE bits at a time.  */
1659      for (bitpos = 0, xbitpos = padding_correction;
1660	   bitpos < bytes * BITS_PER_UNIT;
1661	   bitpos += bitsize, xbitpos += bitsize)
1662	{
1663	  /* We need a new destination pseudo each time xbitpos is
1664	     on a word boundary and when xbitpos == padding_correction
1665	     (the first time through).  */
1666	  if (xbitpos % BITS_PER_WORD == 0
1667	      || xbitpos == padding_correction)
1668	    {
1669	      /* Generate an appropriate register.  */
1670	      dst = gen_reg_rtx (word_mode);
1671	      result_pseudos[xbitpos / BITS_PER_WORD] = dst;
1672
1673	      /* Clear the destination before we move anything into it.  */
1674	      emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
1675	    }
1676
1677	  /* We need a new source operand each time bitpos is on a word
1678	     boundary.  */
1679	  if (bitpos % BITS_PER_WORD == 0)
1680	    src = operand_subword_force (result_val,
1681					 bitpos / BITS_PER_WORD,
1682					 BLKmode);
1683
1684	  /* Use bitpos for the source extraction (left justified) and
1685	     xbitpos for the destination store (right justified).  */
1686	  store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
1687			   extract_bit_field (src, bitsize,
1688					      bitpos % BITS_PER_WORD, 1,
1689					      NULL_RTX, word_mode, word_mode));
1690	}
1691
1692      tmpmode = GET_MODE (result_rtl);
1693      if (tmpmode == BLKmode)
1694	{
1695	  /* Find the smallest integer mode large enough to hold the
1696	     entire structure and use that mode instead of BLKmode
1697	     on the USE insn for the return register.  */
1698	  for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1699	       tmpmode != VOIDmode;
1700	       tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1701	    /* Have we found a large enough mode?  */
1702	    if (GET_MODE_SIZE (tmpmode) >= bytes)
1703	      break;
1704
1705	  /* A suitable mode should have been found.  */
1706	  gcc_assert (tmpmode != VOIDmode);
1707
1708	  PUT_MODE (result_rtl, tmpmode);
1709	}
1710
1711      if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
1712	result_reg_mode = word_mode;
1713      else
1714	result_reg_mode = tmpmode;
1715      result_reg = gen_reg_rtx (result_reg_mode);
1716
1717      for (i = 0; i < n_regs; i++)
1718	emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
1719			result_pseudos[i]);
1720
1721      if (tmpmode != result_reg_mode)
1722	result_reg = gen_lowpart (tmpmode, result_reg);
1723
1724      expand_value_return (result_reg);
1725    }
1726  else if (retval_rhs != 0
1727	   && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
1728	   && (REG_P (result_rtl)
1729	       || (GET_CODE (result_rtl) == PARALLEL)))
1730    {
1731      /* Calculate the return value into a temporary (usually a pseudo
1732         reg).  */
1733      tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
1734      tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
1735
1736      val = assign_temp (nt, 0, 0, 1);
1737      val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
1738      val = force_not_mem (val);
1739      /* Return the calculated value.  */
1740      expand_value_return (val);
1741    }
1742  else
1743    {
1744      /* No hard reg used; calculate value into hard return reg.  */
1745      expand_expr (retval, const0_rtx, VOIDmode, 0);
1746      expand_value_return (result_rtl);
1747    }
1748}
1749
1750/* Given a pointer to a BLOCK node return nonzero if (and only if) the node
1751   in question represents the outermost pair of curly braces (i.e. the "body
1752   block") of a function or method.
1753
1754   For any BLOCK node representing a "body block" of a function or method, the
1755   BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
1756   represents the outermost (function) scope for the function or method (i.e.
1757   the one which includes the formal parameters).  The BLOCK_SUPERCONTEXT of
1758   *that* node in turn will point to the relevant FUNCTION_DECL node.  */
1759
1760int
1761is_body_block (tree stmt)
1762{
1763  if (lang_hooks.no_body_blocks)
1764    return 0;
1765
1766  if (TREE_CODE (stmt) == BLOCK)
1767    {
1768      tree parent = BLOCK_SUPERCONTEXT (stmt);
1769
1770      if (parent && TREE_CODE (parent) == BLOCK)
1771	{
1772	  tree grandparent = BLOCK_SUPERCONTEXT (parent);
1773
1774	  if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
1775	    return 1;
1776	}
1777    }
1778
1779  return 0;
1780}
1781
1782/* Emit code to restore vital registers at the beginning of a nonlocal goto
1783   handler.  */
1784static void
1785expand_nl_goto_receiver (void)
1786{
1787  /* Clobber the FP when we get here, so we have to make sure it's
1788     marked as used by this function.  */
1789  emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
1790
1791  /* Mark the static chain as clobbered here so life information
1792     doesn't get messed up for it.  */
1793  emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
1794
1795#ifdef HAVE_nonlocal_goto
1796  if (! HAVE_nonlocal_goto)
1797#endif
1798    /* First adjust our frame pointer to its actual value.  It was
1799       previously set to the start of the virtual area corresponding to
1800       the stacked variables when we branched here and now needs to be
1801       adjusted to the actual hardware fp value.
1802
1803       Assignments are to virtual registers are converted by
1804       instantiate_virtual_regs into the corresponding assignment
1805       to the underlying register (fp in this case) that makes
1806       the original assignment true.
1807       So the following insn will actually be
1808       decrementing fp by STARTING_FRAME_OFFSET.  */
1809    emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1810
1811#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
1812  if (fixed_regs[ARG_POINTER_REGNUM])
1813    {
1814#ifdef ELIMINABLE_REGS
1815      /* If the argument pointer can be eliminated in favor of the
1816	 frame pointer, we don't need to restore it.  We assume here
1817	 that if such an elimination is present, it can always be used.
1818	 This is the case on all known machines; if we don't make this
1819	 assumption, we do unnecessary saving on many machines.  */
1820      static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1821      size_t i;
1822
1823      for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1824	if (elim_regs[i].from == ARG_POINTER_REGNUM
1825	    && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1826	  break;
1827
1828      if (i == ARRAY_SIZE (elim_regs))
1829#endif
1830	{
1831	  /* Now restore our arg pointer from the address at which it
1832	     was saved in our stack frame.  */
1833	  emit_move_insn (virtual_incoming_args_rtx,
1834			  copy_to_reg (get_arg_pointer_save_area (cfun)));
1835	}
1836    }
1837#endif
1838
1839#ifdef HAVE_nonlocal_goto_receiver
1840  if (HAVE_nonlocal_goto_receiver)
1841    emit_insn (gen_nonlocal_goto_receiver ());
1842#endif
1843
1844  /* @@@ This is a kludge.  Not all machine descriptions define a blockage
1845     insn, but we must not allow the code we just generated to be reordered
1846     by scheduling.  Specifically, the update of the frame pointer must
1847     happen immediately, not later.  So emit an ASM_INPUT to act as blockage
1848     insn.  */
1849  emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
1850}
1851
1852/* Generate RTL for the automatic variable declaration DECL.
1853   (Other kinds of declarations are simply ignored if seen here.)  */
1854
1855void
1856expand_decl (tree decl)
1857{
1858  tree type;
1859
1860  type = TREE_TYPE (decl);
1861
1862  /* For a CONST_DECL, set mode, alignment, and sizes from those of the
1863     type in case this node is used in a reference.  */
1864  if (TREE_CODE (decl) == CONST_DECL)
1865    {
1866      DECL_MODE (decl) = TYPE_MODE (type);
1867      DECL_ALIGN (decl) = TYPE_ALIGN (type);
1868      DECL_SIZE (decl) = TYPE_SIZE (type);
1869      DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
1870      return;
1871    }
1872
1873  /* Otherwise, only automatic variables need any expansion done.  Static and
1874     external variables, and external functions, will be handled by
1875     `assemble_variable' (called from finish_decl).  TYPE_DECL requires
1876     nothing.  PARM_DECLs are handled in `assign_parms'.  */
1877  if (TREE_CODE (decl) != VAR_DECL)
1878    return;
1879
1880  if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
1881    return;
1882
1883  /* Create the RTL representation for the variable.  */
1884
1885  if (type == error_mark_node)
1886    SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
1887
1888  else if (DECL_SIZE (decl) == 0)
1889    /* Variable with incomplete type.  */
1890    {
1891      rtx x;
1892      if (DECL_INITIAL (decl) == 0)
1893	/* Error message was already done; now avoid a crash.  */
1894	x = gen_rtx_MEM (BLKmode, const0_rtx);
1895      else
1896	/* An initializer is going to decide the size of this array.
1897	   Until we know the size, represent its address with a reg.  */
1898	x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
1899
1900      set_mem_attributes (x, decl, 1);
1901      SET_DECL_RTL (decl, x);
1902    }
1903  else if (use_register_for_decl (decl))
1904    {
1905      /* Automatic variable that can go in a register.  */
1906      int unsignedp = TYPE_UNSIGNED (type);
1907      enum machine_mode reg_mode
1908	= promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
1909
1910      SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
1911
1912      /* Note if the object is a user variable.  */
1913      if (!DECL_ARTIFICIAL (decl))
1914	{
1915	  mark_user_reg (DECL_RTL (decl));
1916
1917	  /* Trust user variables which have a pointer type to really
1918	     be pointers.  Do not trust compiler generated temporaries
1919	     as our type system is totally busted as it relates to
1920	     pointer arithmetic which translates into lots of compiler
1921	     generated objects with pointer types, but which are not really
1922	     pointers.  */
1923	  if (POINTER_TYPE_P (type))
1924	    mark_reg_pointer (DECL_RTL (decl),
1925			      TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
1926	}
1927    }
1928
1929  else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
1930	   && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
1931		 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
1932					  STACK_CHECK_MAX_VAR_SIZE)))
1933    {
1934      /* Variable of fixed size that goes on the stack.  */
1935      rtx oldaddr = 0;
1936      rtx addr;
1937      rtx x;
1938
1939      /* If we previously made RTL for this decl, it must be an array
1940	 whose size was determined by the initializer.
1941	 The old address was a register; set that register now
1942	 to the proper address.  */
1943      if (DECL_RTL_SET_P (decl))
1944	{
1945	  gcc_assert (MEM_P (DECL_RTL (decl)));
1946	  gcc_assert (REG_P (XEXP (DECL_RTL (decl), 0)));
1947	  oldaddr = XEXP (DECL_RTL (decl), 0);
1948	}
1949
1950      /* Set alignment we actually gave this decl.  */
1951      DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
1952			   : GET_MODE_BITSIZE (DECL_MODE (decl)));
1953      DECL_USER_ALIGN (decl) = 0;
1954
1955      x = assign_temp (decl, 1, 1, 1);
1956      set_mem_attributes (x, decl, 1);
1957      SET_DECL_RTL (decl, x);
1958
1959      if (oldaddr)
1960	{
1961	  addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
1962	  if (addr != oldaddr)
1963	    emit_move_insn (oldaddr, addr);
1964	}
1965    }
1966  else
1967    /* Dynamic-size object: must push space on the stack.  */
1968    {
1969      rtx address, size, x;
1970
1971      /* Record the stack pointer on entry to block, if have
1972	 not already done so.  */
1973      do_pending_stack_adjust ();
1974
1975      /* Compute the variable's size, in bytes.  This will expand any
1976	 needed SAVE_EXPRs for the first time.  */
1977      size = expand_normal (DECL_SIZE_UNIT (decl));
1978      free_temp_slots ();
1979
1980      /* Allocate space on the stack for the variable.  Note that
1981	 DECL_ALIGN says how the variable is to be aligned and we
1982	 cannot use it to conclude anything about the alignment of
1983	 the size.  */
1984      address = allocate_dynamic_stack_space (size, NULL_RTX,
1985					      TYPE_ALIGN (TREE_TYPE (decl)));
1986
1987      /* Reference the variable indirect through that rtx.  */
1988      x = gen_rtx_MEM (DECL_MODE (decl), address);
1989      set_mem_attributes (x, decl, 1);
1990      SET_DECL_RTL (decl, x);
1991
1992
1993      /* Indicate the alignment we actually gave this variable.  */
1994#ifdef STACK_BOUNDARY
1995      DECL_ALIGN (decl) = STACK_BOUNDARY;
1996#else
1997      DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
1998#endif
1999      DECL_USER_ALIGN (decl) = 0;
2000    }
2001}
2002
2003/* Emit code to save the current value of stack.  */
2004rtx
2005expand_stack_save (void)
2006{
2007  rtx ret = NULL_RTX;
2008
2009  do_pending_stack_adjust ();
2010  emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
2011  return ret;
2012}
2013
2014/* Emit code to restore the current value of stack.  */
2015void
2016expand_stack_restore (tree var)
2017{
2018  rtx sa = DECL_RTL (var);
2019
2020  emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
2021}
2022
2023/* DECL is an anonymous union.  CLEANUP is a cleanup for DECL.
2024   DECL_ELTS is the list of elements that belong to DECL's type.
2025   In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup.  */
2026
2027void
2028expand_anon_union_decl (tree decl, tree cleanup ATTRIBUTE_UNUSED,
2029			tree decl_elts)
2030{
2031  rtx x;
2032  tree t;
2033
2034  /* If any of the elements are addressable, so is the entire union.  */
2035  for (t = decl_elts; t; t = TREE_CHAIN (t))
2036    if (TREE_ADDRESSABLE (TREE_VALUE (t)))
2037      {
2038	TREE_ADDRESSABLE (decl) = 1;
2039	break;
2040      }
2041
2042  expand_decl (decl);
2043  x = DECL_RTL (decl);
2044
2045  /* Go through the elements, assigning RTL to each.  */
2046  for (t = decl_elts; t; t = TREE_CHAIN (t))
2047    {
2048      tree decl_elt = TREE_VALUE (t);
2049      enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
2050      rtx decl_rtl;
2051
2052      /* If any of the elements are addressable, so is the entire
2053	 union.  */
2054      if (TREE_USED (decl_elt))
2055	TREE_USED (decl) = 1;
2056
2057      /* Propagate the union's alignment to the elements.  */
2058      DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
2059      DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
2060
2061      /* If the element has BLKmode and the union doesn't, the union is
2062         aligned such that the element doesn't need to have BLKmode, so
2063         change the element's mode to the appropriate one for its size.  */
2064      if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
2065	DECL_MODE (decl_elt) = mode
2066	  = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
2067
2068      if (mode == GET_MODE (x))
2069	decl_rtl = x;
2070      else if (MEM_P (x))
2071        /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
2072           instead create a new MEM rtx with the proper mode.  */
2073	decl_rtl = adjust_address_nv (x, mode, 0);
2074      else
2075	{
2076	  gcc_assert (REG_P (x));
2077	  decl_rtl = gen_lowpart_SUBREG (mode, x);
2078	}
2079      SET_DECL_RTL (decl_elt, decl_rtl);
2080    }
2081}
2082
2083/* Do the insertion of a case label into case_list.  The labels are
2084   fed to us in descending order from the sorted vector of case labels used
2085   in the tree part of the middle end.  So the list we construct is
2086   sorted in ascending order.  The bounds on the case range, LOW and HIGH,
2087   are converted to case's index type TYPE.  */
2088
2089static struct case_node *
2090add_case_node (struct case_node *head, tree type, tree low, tree high,
2091	       tree label)
2092{
2093  tree min_value, max_value;
2094  struct case_node *r;
2095
2096  gcc_assert (TREE_CODE (low) == INTEGER_CST);
2097  gcc_assert (!high || TREE_CODE (high) == INTEGER_CST);
2098
2099  min_value = TYPE_MIN_VALUE (type);
2100  max_value = TYPE_MAX_VALUE (type);
2101
2102  /* If there's no HIGH value, then this is not a case range; it's
2103     just a simple case label.  But that's just a degenerate case
2104     range.
2105     If the bounds are equal, turn this into the one-value case.  */
2106  if (!high || tree_int_cst_equal (low, high))
2107    {
2108      /* If the simple case value is unreachable, ignore it.  */
2109      if ((TREE_CODE (min_value) == INTEGER_CST
2110            && tree_int_cst_compare (low, min_value) < 0)
2111	  || (TREE_CODE (max_value) == INTEGER_CST
2112	      && tree_int_cst_compare (low, max_value) > 0))
2113	return head;
2114      low = fold_convert (type, low);
2115      high = low;
2116    }
2117  else
2118    {
2119      /* If the entire case range is unreachable, ignore it.  */
2120      if ((TREE_CODE (min_value) == INTEGER_CST
2121            && tree_int_cst_compare (high, min_value) < 0)
2122	  || (TREE_CODE (max_value) == INTEGER_CST
2123	      && tree_int_cst_compare (low, max_value) > 0))
2124	return head;
2125
2126      /* If the lower bound is less than the index type's minimum
2127	 value, truncate the range bounds.  */
2128      if (TREE_CODE (min_value) == INTEGER_CST
2129            && tree_int_cst_compare (low, min_value) < 0)
2130	low = min_value;
2131      low = fold_convert (type, low);
2132
2133      /* If the upper bound is greater than the index type's maximum
2134	 value, truncate the range bounds.  */
2135      if (TREE_CODE (max_value) == INTEGER_CST
2136	  && tree_int_cst_compare (high, max_value) > 0)
2137	high = max_value;
2138      high = fold_convert (type, high);
2139    }
2140
2141
2142  /* Add this label to the chain.  Make sure to drop overflow flags.  */
2143  r = ggc_alloc (sizeof (struct case_node));
2144  r->low = build_int_cst_wide (TREE_TYPE (low), TREE_INT_CST_LOW (low),
2145			       TREE_INT_CST_HIGH (low));
2146  r->high = build_int_cst_wide (TREE_TYPE (high), TREE_INT_CST_LOW (high),
2147				TREE_INT_CST_HIGH (high));
2148  r->code_label = label;
2149  r->parent = r->left = NULL;
2150  r->right = head;
2151  return r;
2152}
2153
2154/* Maximum number of case bit tests.  */
2155#define MAX_CASE_BIT_TESTS  3
2156
2157/* By default, enable case bit tests on targets with ashlsi3.  */
2158#ifndef CASE_USE_BIT_TESTS
2159#define CASE_USE_BIT_TESTS  (ashl_optab->handlers[word_mode].insn_code \
2160			     != CODE_FOR_nothing)
2161#endif
2162
2163
2164/* A case_bit_test represents a set of case nodes that may be
2165   selected from using a bit-wise comparison.  HI and LO hold
2166   the integer to be tested against, LABEL contains the label
2167   to jump to upon success and BITS counts the number of case
2168   nodes handled by this test, typically the number of bits
2169   set in HI:LO.  */
2170
2171struct case_bit_test
2172{
2173  HOST_WIDE_INT hi;
2174  HOST_WIDE_INT lo;
2175  rtx label;
2176  int bits;
2177};
2178
2179/* Determine whether "1 << x" is relatively cheap in word_mode.  */
2180
2181static
2182bool lshift_cheap_p (void)
2183{
2184  static bool init = false;
2185  static bool cheap = true;
2186
2187  if (!init)
2188    {
2189      rtx reg = gen_rtx_REG (word_mode, 10000);
2190      int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
2191      cheap = cost < COSTS_N_INSNS (3);
2192      init = true;
2193    }
2194
2195  return cheap;
2196}
2197
2198/* Comparison function for qsort to order bit tests by decreasing
2199   number of case nodes, i.e. the node with the most cases gets
2200   tested first.  */
2201
2202static int
2203case_bit_test_cmp (const void *p1, const void *p2)
2204{
2205  const struct case_bit_test *d1 = p1;
2206  const struct case_bit_test *d2 = p2;
2207
2208  if (d2->bits != d1->bits)
2209    return d2->bits - d1->bits;
2210
2211  /* Stabilize the sort.  */
2212  return CODE_LABEL_NUMBER (d2->label) - CODE_LABEL_NUMBER (d1->label);
2213}
2214
2215/*  Expand a switch statement by a short sequence of bit-wise
2216    comparisons.  "switch(x)" is effectively converted into
2217    "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
2218    integer constants.
2219
2220    INDEX_EXPR is the value being switched on, which is of
2221    type INDEX_TYPE.  MINVAL is the lowest case value of in
2222    the case nodes, of INDEX_TYPE type, and RANGE is highest
2223    value minus MINVAL, also of type INDEX_TYPE.  NODES is
2224    the set of case nodes, and DEFAULT_LABEL is the label to
2225    branch to should none of the cases match.
2226
2227    There *MUST* be MAX_CASE_BIT_TESTS or less unique case
2228    node targets.  */
2229
2230static void
2231emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
2232		     tree range, case_node_ptr nodes, rtx default_label)
2233{
2234  struct case_bit_test test[MAX_CASE_BIT_TESTS];
2235  enum machine_mode mode;
2236  rtx expr, index, label;
2237  unsigned int i,j,lo,hi;
2238  struct case_node *n;
2239  unsigned int count;
2240
2241  count = 0;
2242  for (n = nodes; n; n = n->right)
2243    {
2244      label = label_rtx (n->code_label);
2245      for (i = 0; i < count; i++)
2246	if (label == test[i].label)
2247	  break;
2248
2249      if (i == count)
2250	{
2251	  gcc_assert (count < MAX_CASE_BIT_TESTS);
2252	  test[i].hi = 0;
2253	  test[i].lo = 0;
2254	  test[i].label = label;
2255	  test[i].bits = 1;
2256	  count++;
2257	}
2258      else
2259        test[i].bits++;
2260
2261      lo = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2262				      n->low, minval), 1);
2263      hi = tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2264				      n->high, minval), 1);
2265      for (j = lo; j <= hi; j++)
2266        if (j >= HOST_BITS_PER_WIDE_INT)
2267	  test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
2268	else
2269	  test[i].lo |= (HOST_WIDE_INT) 1 << j;
2270    }
2271
2272  qsort (test, count, sizeof(*test), case_bit_test_cmp);
2273
2274  index_expr = fold_build2 (MINUS_EXPR, index_type,
2275			    fold_convert (index_type, index_expr),
2276			    fold_convert (index_type, minval));
2277  index = expand_normal (index_expr);
2278  do_pending_stack_adjust ();
2279
2280  mode = TYPE_MODE (index_type);
2281  expr = expand_normal (range);
2282  emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
2283			   default_label);
2284
2285  index = convert_to_mode (word_mode, index, 0);
2286  index = expand_binop (word_mode, ashl_optab, const1_rtx,
2287			index, NULL_RTX, 1, OPTAB_WIDEN);
2288
2289  for (i = 0; i < count; i++)
2290    {
2291      expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
2292      expr = expand_binop (word_mode, and_optab, index, expr,
2293			   NULL_RTX, 1, OPTAB_WIDEN);
2294      emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
2295			       word_mode, 1, test[i].label);
2296    }
2297
2298  emit_jump (default_label);
2299}
2300
2301#ifndef HAVE_casesi
2302#define HAVE_casesi 0
2303#endif
2304
2305#ifndef HAVE_tablejump
2306#define HAVE_tablejump 0
2307#endif
2308
2309/* Terminate a case (Pascal/Ada) or switch (C) statement
2310   in which ORIG_INDEX is the expression to be tested.
2311   If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
2312   type as given in the source before any compiler conversions.
2313   Generate the code to test it and jump to the right place.  */
2314
2315void
2316expand_case (tree exp)
2317{
2318  tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
2319  rtx default_label = 0;
2320  struct case_node *n;
2321  unsigned int count, uniq;
2322  rtx index;
2323  rtx table_label;
2324  int ncases;
2325  rtx *labelvec;
2326  int i, fail;
2327  rtx before_case, end, lab;
2328
2329  tree vec = SWITCH_LABELS (exp);
2330  tree orig_type = TREE_TYPE (exp);
2331  tree index_expr = SWITCH_COND (exp);
2332  tree index_type = TREE_TYPE (index_expr);
2333  int unsignedp = TYPE_UNSIGNED (index_type);
2334
2335  /* The insn after which the case dispatch should finally
2336     be emitted.  Zero for a dummy.  */
2337  rtx start;
2338
2339  /* A list of case labels; it is first built as a list and it may then
2340     be rearranged into a nearly balanced binary tree.  */
2341  struct case_node *case_list = 0;
2342
2343  /* Label to jump to if no case matches.  */
2344  tree default_label_decl;
2345
2346  /* The switch body is lowered in gimplify.c, we should never have
2347     switches with a non-NULL SWITCH_BODY here.  */
2348  gcc_assert (!SWITCH_BODY (exp));
2349  gcc_assert (SWITCH_LABELS (exp));
2350
2351  do_pending_stack_adjust ();
2352
2353  /* An ERROR_MARK occurs for various reasons including invalid data type.  */
2354  if (index_type != error_mark_node)
2355    {
2356      tree elt;
2357      bitmap label_bitmap;
2358
2359      /* cleanup_tree_cfg removes all SWITCH_EXPR with their index
2360	 expressions being INTEGER_CST.  */
2361      gcc_assert (TREE_CODE (index_expr) != INTEGER_CST);
2362
2363      /* The default case is at the end of TREE_VEC.  */
2364      elt = TREE_VEC_ELT (vec, TREE_VEC_LENGTH (vec) - 1);
2365      gcc_assert (!CASE_HIGH (elt));
2366      gcc_assert (!CASE_LOW (elt));
2367      default_label_decl = CASE_LABEL (elt);
2368
2369      for (i = TREE_VEC_LENGTH (vec) - 1; --i >= 0; )
2370	{
2371	  tree low, high;
2372	  elt = TREE_VEC_ELT (vec, i);
2373
2374	  low = CASE_LOW (elt);
2375	  gcc_assert (low);
2376	  high = CASE_HIGH (elt);
2377
2378	  /* Discard empty ranges.  */
2379	  if (high && INT_CST_LT (high, low))
2380	    continue;
2381
2382	  case_list = add_case_node (case_list, index_type, low, high,
2383				     CASE_LABEL (elt));
2384	}
2385
2386
2387      before_case = start = get_last_insn ();
2388      default_label = label_rtx (default_label_decl);
2389
2390      /* Get upper and lower bounds of case values.  */
2391
2392      uniq = 0;
2393      count = 0;
2394      label_bitmap = BITMAP_ALLOC (NULL);
2395      for (n = case_list; n; n = n->right)
2396	{
2397	  /* Count the elements and track the largest and smallest
2398	     of them (treating them as signed even if they are not).  */
2399	  if (count++ == 0)
2400	    {
2401	      minval = n->low;
2402	      maxval = n->high;
2403	    }
2404	  else
2405	    {
2406	      if (INT_CST_LT (n->low, minval))
2407		minval = n->low;
2408	      if (INT_CST_LT (maxval, n->high))
2409		maxval = n->high;
2410	    }
2411	  /* A range counts double, since it requires two compares.  */
2412	  if (! tree_int_cst_equal (n->low, n->high))
2413	    count++;
2414
2415	  /* If we have not seen this label yet, then increase the
2416	     number of unique case node targets seen.  */
2417	  lab = label_rtx (n->code_label);
2418	  if (!bitmap_bit_p (label_bitmap, CODE_LABEL_NUMBER (lab)))
2419	    {
2420	      bitmap_set_bit (label_bitmap, CODE_LABEL_NUMBER (lab));
2421	      uniq++;
2422	    }
2423	}
2424
2425      BITMAP_FREE (label_bitmap);
2426
2427      /* cleanup_tree_cfg removes all SWITCH_EXPR with a single
2428	 destination, such as one with a default case only.  However,
2429	 it doesn't remove cases that are out of range for the switch
2430	 type, so we may still get a zero here.  */
2431      if (count == 0)
2432	{
2433	  emit_jump (default_label);
2434	  return;
2435	}
2436
2437      /* Compute span of values.  */
2438      range = fold_build2 (MINUS_EXPR, index_type, maxval, minval);
2439
2440      /* Try implementing this switch statement by a short sequence of
2441	 bit-wise comparisons.  However, we let the binary-tree case
2442	 below handle constant index expressions.  */
2443      if (CASE_USE_BIT_TESTS
2444	  && ! TREE_CONSTANT (index_expr)
2445	  && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
2446	  && compare_tree_int (range, 0) > 0
2447	  && lshift_cheap_p ()
2448	  && ((uniq == 1 && count >= 3)
2449	      || (uniq == 2 && count >= 5)
2450	      || (uniq == 3 && count >= 6)))
2451	{
2452	  /* Optimize the case where all the case values fit in a
2453	     word without having to subtract MINVAL.  In this case,
2454	     we can optimize away the subtraction.  */
2455	  if (compare_tree_int (minval, 0) > 0
2456	      && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
2457	    {
2458	      minval = build_int_cst (index_type, 0);
2459	      range = maxval;
2460	    }
2461	  emit_case_bit_tests (index_type, index_expr, minval, range,
2462			       case_list, default_label);
2463	}
2464
2465      /* If range of values is much bigger than number of values,
2466	 make a sequence of conditional branches instead of a dispatch.
2467	 If the switch-index is a constant, do it this way
2468	 because we can optimize it.  */
2469
2470      else if (count < case_values_threshold ()
2471	       || compare_tree_int (range,
2472				    (optimize_size ? 3 : 10) * count) > 0
2473	       /* RANGE may be signed, and really large ranges will show up
2474		  as negative numbers.  */
2475	       || compare_tree_int (range, 0) < 0
2476#ifndef ASM_OUTPUT_ADDR_DIFF_ELT
2477	       || flag_pic
2478#endif
2479	       || !flag_jump_tables
2480	       || TREE_CONSTANT (index_expr)
2481	       /* If neither casesi or tablejump is available, we can
2482		  only go this way.  */
2483	       || (!HAVE_casesi && !HAVE_tablejump))
2484	{
2485	  index = expand_normal (index_expr);
2486
2487	  /* If the index is a short or char that we do not have
2488	     an insn to handle comparisons directly, convert it to
2489	     a full integer now, rather than letting each comparison
2490	     generate the conversion.  */
2491
2492	  if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
2493	      && ! have_insn_for (COMPARE, GET_MODE (index)))
2494	    {
2495	      enum machine_mode wider_mode;
2496	      for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
2497		   wider_mode = GET_MODE_WIDER_MODE (wider_mode))
2498		if (have_insn_for (COMPARE, wider_mode))
2499		  {
2500		    index = convert_to_mode (wider_mode, index, unsignedp);
2501		    break;
2502		  }
2503	    }
2504
2505	  do_pending_stack_adjust ();
2506
2507	  if (MEM_P (index))
2508	    index = copy_to_reg (index);
2509
2510	  /* We generate a binary decision tree to select the
2511	     appropriate target code.  This is done as follows:
2512
2513	     The list of cases is rearranged into a binary tree,
2514	     nearly optimal assuming equal probability for each case.
2515
2516	     The tree is transformed into RTL, eliminating
2517	     redundant test conditions at the same time.
2518
2519	     If program flow could reach the end of the
2520	     decision tree an unconditional jump to the
2521	     default code is emitted.  */
2522
2523	  use_cost_table
2524	    = (TREE_CODE (orig_type) != ENUMERAL_TYPE
2525	       && estimate_case_costs (case_list));
2526	  balance_case_nodes (&case_list, NULL);
2527	  emit_case_nodes (index, case_list, default_label, index_type);
2528	  emit_jump (default_label);
2529	}
2530      else
2531	{
2532	  table_label = gen_label_rtx ();
2533	  if (! try_casesi (index_type, index_expr, minval, range,
2534			    table_label, default_label))
2535	    {
2536	      bool ok;
2537
2538	      /* Index jumptables from zero for suitable values of
2539                 minval to avoid a subtraction.  */
2540	      if (! optimize_size
2541		  && compare_tree_int (minval, 0) > 0
2542		  && compare_tree_int (minval, 3) < 0)
2543		{
2544		  minval = build_int_cst (index_type, 0);
2545		  range = maxval;
2546		}
2547
2548	      ok = try_tablejump (index_type, index_expr, minval, range,
2549				  table_label, default_label);
2550	      gcc_assert (ok);
2551	    }
2552
2553	  /* Get table of labels to jump to, in order of case index.  */
2554
2555	  ncases = tree_low_cst (range, 0) + 1;
2556	  labelvec = alloca (ncases * sizeof (rtx));
2557	  memset (labelvec, 0, ncases * sizeof (rtx));
2558
2559	  for (n = case_list; n; n = n->right)
2560	    {
2561	      /* Compute the low and high bounds relative to the minimum
2562		 value since that should fit in a HOST_WIDE_INT while the
2563		 actual values may not.  */
2564	      HOST_WIDE_INT i_low
2565		= tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2566					     n->low, minval), 1);
2567	      HOST_WIDE_INT i_high
2568		= tree_low_cst (fold_build2 (MINUS_EXPR, index_type,
2569					     n->high, minval), 1);
2570	      HOST_WIDE_INT i;
2571
2572	      for (i = i_low; i <= i_high; i ++)
2573		labelvec[i]
2574		  = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
2575	    }
2576
2577	  /* Fill in the gaps with the default.  */
2578	  for (i = 0; i < ncases; i++)
2579	    if (labelvec[i] == 0)
2580	      labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
2581
2582	  /* Output the table.  */
2583	  emit_label (table_label);
2584
2585	  if (CASE_VECTOR_PC_RELATIVE || flag_pic)
2586	    emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
2587						   gen_rtx_LABEL_REF (Pmode, table_label),
2588						   gen_rtvec_v (ncases, labelvec),
2589						   const0_rtx, const0_rtx));
2590	  else
2591	    emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
2592					      gen_rtvec_v (ncases, labelvec)));
2593
2594	  /* Record no drop-through after the table.  */
2595	  emit_barrier ();
2596	}
2597
2598      before_case = NEXT_INSN (before_case);
2599      end = get_last_insn ();
2600      fail = squeeze_notes (&before_case, &end);
2601      gcc_assert (!fail);
2602      reorder_insns (before_case, end, start);
2603    }
2604
2605  free_temp_slots ();
2606}
2607
2608/* Generate code to jump to LABEL if OP0 and OP1 are equal in mode MODE.  */
2609
2610static void
2611do_jump_if_equal (enum machine_mode mode, rtx op0, rtx op1, rtx label,
2612		  int unsignedp)
2613{
2614  do_compare_rtx_and_jump (op0, op1, EQ, unsignedp, mode,
2615			   NULL_RTX, NULL_RTX, label);
2616}
2617
2618/* Not all case values are encountered equally.  This function
2619   uses a heuristic to weight case labels, in cases where that
2620   looks like a reasonable thing to do.
2621
2622   Right now, all we try to guess is text, and we establish the
2623   following weights:
2624
2625	chars above space:	16
2626	digits:			16
2627	default:		12
2628	space, punct:		8
2629	tab:			4
2630	newline:		2
2631	other "\" chars:	1
2632	remaining chars:	0
2633
2634   If we find any cases in the switch that are not either -1 or in the range
2635   of valid ASCII characters, or are control characters other than those
2636   commonly used with "\", don't treat this switch scanning text.
2637
2638   Return 1 if these nodes are suitable for cost estimation, otherwise
2639   return 0.  */
2640
2641static int
2642estimate_case_costs (case_node_ptr node)
2643{
2644  tree min_ascii = integer_minus_one_node;
2645  tree max_ascii = build_int_cst (TREE_TYPE (node->high), 127);
2646  case_node_ptr n;
2647  int i;
2648
2649  /* If we haven't already made the cost table, make it now.  Note that the
2650     lower bound of the table is -1, not zero.  */
2651
2652  if (! cost_table_initialized)
2653    {
2654      cost_table_initialized = 1;
2655
2656      for (i = 0; i < 128; i++)
2657	{
2658	  if (ISALNUM (i))
2659	    COST_TABLE (i) = 16;
2660	  else if (ISPUNCT (i))
2661	    COST_TABLE (i) = 8;
2662	  else if (ISCNTRL (i))
2663	    COST_TABLE (i) = -1;
2664	}
2665
2666      COST_TABLE (' ') = 8;
2667      COST_TABLE ('\t') = 4;
2668      COST_TABLE ('\0') = 4;
2669      COST_TABLE ('\n') = 2;
2670      COST_TABLE ('\f') = 1;
2671      COST_TABLE ('\v') = 1;
2672      COST_TABLE ('\b') = 1;
2673    }
2674
2675  /* See if all the case expressions look like text.  It is text if the
2676     constant is >= -1 and the highest constant is <= 127.  Do all comparisons
2677     as signed arithmetic since we don't want to ever access cost_table with a
2678     value less than -1.  Also check that none of the constants in a range
2679     are strange control characters.  */
2680
2681  for (n = node; n; n = n->right)
2682    {
2683      if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
2684	return 0;
2685
2686      for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
2687	   i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
2688	if (COST_TABLE (i) < 0)
2689	  return 0;
2690    }
2691
2692  /* All interesting values are within the range of interesting
2693     ASCII characters.  */
2694  return 1;
2695}
2696
2697/* Take an ordered list of case nodes
2698   and transform them into a near optimal binary tree,
2699   on the assumption that any target code selection value is as
2700   likely as any other.
2701
2702   The transformation is performed by splitting the ordered
2703   list into two equal sections plus a pivot.  The parts are
2704   then attached to the pivot as left and right branches.  Each
2705   branch is then transformed recursively.  */
2706
2707static void
2708balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
2709{
2710  case_node_ptr np;
2711
2712  np = *head;
2713  if (np)
2714    {
2715      int cost = 0;
2716      int i = 0;
2717      int ranges = 0;
2718      case_node_ptr *npp;
2719      case_node_ptr left;
2720
2721      /* Count the number of entries on branch.  Also count the ranges.  */
2722
2723      while (np)
2724	{
2725	  if (!tree_int_cst_equal (np->low, np->high))
2726	    {
2727	      ranges++;
2728	      if (use_cost_table)
2729		cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
2730	    }
2731
2732	  if (use_cost_table)
2733	    cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
2734
2735	  i++;
2736	  np = np->right;
2737	}
2738
2739      if (i > 2)
2740	{
2741	  /* Split this list if it is long enough for that to help.  */
2742	  npp = head;
2743	  left = *npp;
2744	  if (use_cost_table)
2745	    {
2746	      /* Find the place in the list that bisects the list's total cost,
2747		 Here I gets half the total cost.  */
2748	      int n_moved = 0;
2749	      i = (cost + 1) / 2;
2750	      while (1)
2751		{
2752		  /* Skip nodes while their cost does not reach that amount.  */
2753		  if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2754		    i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
2755		  i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
2756		  if (i <= 0)
2757		    break;
2758		  npp = &(*npp)->right;
2759		  n_moved += 1;
2760		}
2761	      if (n_moved == 0)
2762		{
2763		  /* Leave this branch lopsided, but optimize left-hand
2764		     side and fill in `parent' fields for right-hand side.  */
2765		  np = *head;
2766		  np->parent = parent;
2767		  balance_case_nodes (&np->left, np);
2768		  for (; np->right; np = np->right)
2769		    np->right->parent = np;
2770		  return;
2771		}
2772	    }
2773	  /* If there are just three nodes, split at the middle one.  */
2774	  else if (i == 3)
2775	    npp = &(*npp)->right;
2776	  else
2777	    {
2778	      /* Find the place in the list that bisects the list's total cost,
2779		 where ranges count as 2.
2780		 Here I gets half the total cost.  */
2781	      i = (i + ranges + 1) / 2;
2782	      while (1)
2783		{
2784		  /* Skip nodes while their cost does not reach that amount.  */
2785		  if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2786		    i--;
2787		  i--;
2788		  if (i <= 0)
2789		    break;
2790		  npp = &(*npp)->right;
2791		}
2792	    }
2793	  *head = np = *npp;
2794	  *npp = 0;
2795	  np->parent = parent;
2796	  np->left = left;
2797
2798	  /* Optimize each of the two split parts.  */
2799	  balance_case_nodes (&np->left, np);
2800	  balance_case_nodes (&np->right, np);
2801	}
2802      else
2803	{
2804	  /* Else leave this branch as one level,
2805	     but fill in `parent' fields.  */
2806	  np = *head;
2807	  np->parent = parent;
2808	  for (; np->right; np = np->right)
2809	    np->right->parent = np;
2810	}
2811    }
2812}
2813
2814/* Search the parent sections of the case node tree
2815   to see if a test for the lower bound of NODE would be redundant.
2816   INDEX_TYPE is the type of the index expression.
2817
2818   The instructions to generate the case decision tree are
2819   output in the same order as nodes are processed so it is
2820   known that if a parent node checks the range of the current
2821   node minus one that the current node is bounded at its lower
2822   span.  Thus the test would be redundant.  */
2823
2824static int
2825node_has_low_bound (case_node_ptr node, tree index_type)
2826{
2827  tree low_minus_one;
2828  case_node_ptr pnode;
2829
2830  /* If the lower bound of this node is the lowest value in the index type,
2831     we need not test it.  */
2832
2833  if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
2834    return 1;
2835
2836  /* If this node has a left branch, the value at the left must be less
2837     than that at this node, so it cannot be bounded at the bottom and
2838     we need not bother testing any further.  */
2839
2840  if (node->left)
2841    return 0;
2842
2843  low_minus_one = fold_build2 (MINUS_EXPR, TREE_TYPE (node->low),
2844			       node->low,
2845			       build_int_cst (TREE_TYPE (node->low), 1));
2846
2847  /* If the subtraction above overflowed, we can't verify anything.
2848     Otherwise, look for a parent that tests our value - 1.  */
2849
2850  if (! tree_int_cst_lt (low_minus_one, node->low))
2851    return 0;
2852
2853  for (pnode = node->parent; pnode; pnode = pnode->parent)
2854    if (tree_int_cst_equal (low_minus_one, pnode->high))
2855      return 1;
2856
2857  return 0;
2858}
2859
2860/* Search the parent sections of the case node tree
2861   to see if a test for the upper bound of NODE would be redundant.
2862   INDEX_TYPE is the type of the index expression.
2863
2864   The instructions to generate the case decision tree are
2865   output in the same order as nodes are processed so it is
2866   known that if a parent node checks the range of the current
2867   node plus one that the current node is bounded at its upper
2868   span.  Thus the test would be redundant.  */
2869
2870static int
2871node_has_high_bound (case_node_ptr node, tree index_type)
2872{
2873  tree high_plus_one;
2874  case_node_ptr pnode;
2875
2876  /* If there is no upper bound, obviously no test is needed.  */
2877
2878  if (TYPE_MAX_VALUE (index_type) == NULL)
2879    return 1;
2880
2881  /* If the upper bound of this node is the highest value in the type
2882     of the index expression, we need not test against it.  */
2883
2884  if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
2885    return 1;
2886
2887  /* If this node has a right branch, the value at the right must be greater
2888     than that at this node, so it cannot be bounded at the top and
2889     we need not bother testing any further.  */
2890
2891  if (node->right)
2892    return 0;
2893
2894  high_plus_one = fold_build2 (PLUS_EXPR, TREE_TYPE (node->high),
2895			       node->high,
2896			       build_int_cst (TREE_TYPE (node->high), 1));
2897
2898  /* If the addition above overflowed, we can't verify anything.
2899     Otherwise, look for a parent that tests our value + 1.  */
2900
2901  if (! tree_int_cst_lt (node->high, high_plus_one))
2902    return 0;
2903
2904  for (pnode = node->parent; pnode; pnode = pnode->parent)
2905    if (tree_int_cst_equal (high_plus_one, pnode->low))
2906      return 1;
2907
2908  return 0;
2909}
2910
2911/* Search the parent sections of the
2912   case node tree to see if both tests for the upper and lower
2913   bounds of NODE would be redundant.  */
2914
2915static int
2916node_is_bounded (case_node_ptr node, tree index_type)
2917{
2918  return (node_has_low_bound (node, index_type)
2919	  && node_has_high_bound (node, index_type));
2920}
2921
2922/* Emit step-by-step code to select a case for the value of INDEX.
2923   The thus generated decision tree follows the form of the
2924   case-node binary tree NODE, whose nodes represent test conditions.
2925   INDEX_TYPE is the type of the index of the switch.
2926
2927   Care is taken to prune redundant tests from the decision tree
2928   by detecting any boundary conditions already checked by
2929   emitted rtx.  (See node_has_high_bound, node_has_low_bound
2930   and node_is_bounded, above.)
2931
2932   Where the test conditions can be shown to be redundant we emit
2933   an unconditional jump to the target code.  As a further
2934   optimization, the subordinates of a tree node are examined to
2935   check for bounded nodes.  In this case conditional and/or
2936   unconditional jumps as a result of the boundary check for the
2937   current node are arranged to target the subordinates associated
2938   code for out of bound conditions on the current node.
2939
2940   We can assume that when control reaches the code generated here,
2941   the index value has already been compared with the parents
2942   of this node, and determined to be on the same side of each parent
2943   as this node is.  Thus, if this node tests for the value 51,
2944   and a parent tested for 52, we don't need to consider
2945   the possibility of a value greater than 51.  If another parent
2946   tests for the value 50, then this node need not test anything.  */
2947
2948static void
2949emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
2950		 tree index_type)
2951{
2952  /* If INDEX has an unsigned type, we must make unsigned branches.  */
2953  int unsignedp = TYPE_UNSIGNED (index_type);
2954  enum machine_mode mode = GET_MODE (index);
2955  enum machine_mode imode = TYPE_MODE (index_type);
2956
2957  /* Handle indices detected as constant during RTL expansion.  */
2958  if (mode == VOIDmode)
2959    mode = imode;
2960
2961  /* See if our parents have already tested everything for us.
2962     If they have, emit an unconditional jump for this node.  */
2963  if (node_is_bounded (node, index_type))
2964    emit_jump (label_rtx (node->code_label));
2965
2966  else if (tree_int_cst_equal (node->low, node->high))
2967    {
2968      /* Node is single valued.  First see if the index expression matches
2969	 this node and then check our children, if any.  */
2970
2971      do_jump_if_equal (mode, index,
2972			convert_modes (mode, imode,
2973				       expand_normal (node->low),
2974				       unsignedp),
2975			label_rtx (node->code_label), unsignedp);
2976
2977      if (node->right != 0 && node->left != 0)
2978	{
2979	  /* This node has children on both sides.
2980	     Dispatch to one side or the other
2981	     by comparing the index value with this node's value.
2982	     If one subtree is bounded, check that one first,
2983	     so we can avoid real branches in the tree.  */
2984
2985	  if (node_is_bounded (node->right, index_type))
2986	    {
2987	      emit_cmp_and_jump_insns (index,
2988				       convert_modes
2989				       (mode, imode,
2990					expand_normal (node->high),
2991					unsignedp),
2992				       GT, NULL_RTX, mode, unsignedp,
2993				       label_rtx (node->right->code_label));
2994	      emit_case_nodes (index, node->left, default_label, index_type);
2995	    }
2996
2997	  else if (node_is_bounded (node->left, index_type))
2998	    {
2999	      emit_cmp_and_jump_insns (index,
3000				       convert_modes
3001				       (mode, imode,
3002					expand_normal (node->high),
3003					unsignedp),
3004				       LT, NULL_RTX, mode, unsignedp,
3005				       label_rtx (node->left->code_label));
3006	      emit_case_nodes (index, node->right, default_label, index_type);
3007	    }
3008
3009	  /* If both children are single-valued cases with no
3010	     children, finish up all the work.  This way, we can save
3011	     one ordered comparison.  */
3012	  else if (tree_int_cst_equal (node->right->low, node->right->high)
3013		   && node->right->left == 0
3014		   && node->right->right == 0
3015		   && tree_int_cst_equal (node->left->low, node->left->high)
3016		   && node->left->left == 0
3017		   && node->left->right == 0)
3018	    {
3019	      /* Neither node is bounded.  First distinguish the two sides;
3020		 then emit the code for one side at a time.  */
3021
3022	      /* See if the value matches what the right hand side
3023		 wants.  */
3024	      do_jump_if_equal (mode, index,
3025				convert_modes (mode, imode,
3026					       expand_normal (node->right->low),
3027					       unsignedp),
3028				label_rtx (node->right->code_label),
3029				unsignedp);
3030
3031	      /* See if the value matches what the left hand side
3032		 wants.  */
3033	      do_jump_if_equal (mode, index,
3034				convert_modes (mode, imode,
3035					       expand_normal (node->left->low),
3036					       unsignedp),
3037				label_rtx (node->left->code_label),
3038				unsignedp);
3039	    }
3040
3041	  else
3042	    {
3043	      /* Neither node is bounded.  First distinguish the two sides;
3044		 then emit the code for one side at a time.  */
3045
3046	      tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3047
3048	      /* See if the value is on the right.  */
3049	      emit_cmp_and_jump_insns (index,
3050				       convert_modes
3051				       (mode, imode,
3052					expand_normal (node->high),
3053					unsignedp),
3054				       GT, NULL_RTX, mode, unsignedp,
3055				       label_rtx (test_label));
3056
3057	      /* Value must be on the left.
3058		 Handle the left-hand subtree.  */
3059	      emit_case_nodes (index, node->left, default_label, index_type);
3060	      /* If left-hand subtree does nothing,
3061		 go to default.  */
3062	      emit_jump (default_label);
3063
3064	      /* Code branches here for the right-hand subtree.  */
3065	      expand_label (test_label);
3066	      emit_case_nodes (index, node->right, default_label, index_type);
3067	    }
3068	}
3069
3070      else if (node->right != 0 && node->left == 0)
3071	{
3072	  /* Here we have a right child but no left so we issue a conditional
3073	     branch to default and process the right child.
3074
3075	     Omit the conditional branch to default if the right child
3076	     does not have any children and is single valued; it would
3077	     cost too much space to save so little time.  */
3078
3079	  if (node->right->right || node->right->left
3080	      || !tree_int_cst_equal (node->right->low, node->right->high))
3081	    {
3082	      if (!node_has_low_bound (node, index_type))
3083		{
3084		  emit_cmp_and_jump_insns (index,
3085					   convert_modes
3086					   (mode, imode,
3087					    expand_normal (node->high),
3088					    unsignedp),
3089					   LT, NULL_RTX, mode, unsignedp,
3090					   default_label);
3091		}
3092
3093	      emit_case_nodes (index, node->right, default_label, index_type);
3094	    }
3095	  else
3096	    /* We cannot process node->right normally
3097	       since we haven't ruled out the numbers less than
3098	       this node's value.  So handle node->right explicitly.  */
3099	    do_jump_if_equal (mode, index,
3100			      convert_modes
3101			      (mode, imode,
3102			       expand_normal (node->right->low),
3103			       unsignedp),
3104			      label_rtx (node->right->code_label), unsignedp);
3105	}
3106
3107      else if (node->right == 0 && node->left != 0)
3108	{
3109	  /* Just one subtree, on the left.  */
3110	  if (node->left->left || node->left->right
3111	      || !tree_int_cst_equal (node->left->low, node->left->high))
3112	    {
3113	      if (!node_has_high_bound (node, index_type))
3114		{
3115		  emit_cmp_and_jump_insns (index,
3116					   convert_modes
3117					   (mode, imode,
3118					    expand_normal (node->high),
3119					    unsignedp),
3120					   GT, NULL_RTX, mode, unsignedp,
3121					   default_label);
3122		}
3123
3124	      emit_case_nodes (index, node->left, default_label, index_type);
3125	    }
3126	  else
3127	    /* We cannot process node->left normally
3128	       since we haven't ruled out the numbers less than
3129	       this node's value.  So handle node->left explicitly.  */
3130	    do_jump_if_equal (mode, index,
3131			      convert_modes
3132			      (mode, imode,
3133			       expand_normal (node->left->low),
3134			       unsignedp),
3135			      label_rtx (node->left->code_label), unsignedp);
3136	}
3137    }
3138  else
3139    {
3140      /* Node is a range.  These cases are very similar to those for a single
3141	 value, except that we do not start by testing whether this node
3142	 is the one to branch to.  */
3143
3144      if (node->right != 0 && node->left != 0)
3145	{
3146	  /* Node has subtrees on both sides.
3147	     If the right-hand subtree is bounded,
3148	     test for it first, since we can go straight there.
3149	     Otherwise, we need to make a branch in the control structure,
3150	     then handle the two subtrees.  */
3151	  tree test_label = 0;
3152
3153	  if (node_is_bounded (node->right, index_type))
3154	    /* Right hand node is fully bounded so we can eliminate any
3155	       testing and branch directly to the target code.  */
3156	    emit_cmp_and_jump_insns (index,
3157				     convert_modes
3158				     (mode, imode,
3159				      expand_normal (node->high),
3160				      unsignedp),
3161				     GT, NULL_RTX, mode, unsignedp,
3162				     label_rtx (node->right->code_label));
3163	  else
3164	    {
3165	      /* Right hand node requires testing.
3166		 Branch to a label where we will handle it later.  */
3167
3168	      test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
3169	      emit_cmp_and_jump_insns (index,
3170				       convert_modes
3171				       (mode, imode,
3172					expand_normal (node->high),
3173					unsignedp),
3174				       GT, NULL_RTX, mode, unsignedp,
3175				       label_rtx (test_label));
3176	    }
3177
3178	  /* Value belongs to this node or to the left-hand subtree.  */
3179
3180	  emit_cmp_and_jump_insns (index,
3181				   convert_modes
3182				   (mode, imode,
3183				    expand_normal (node->low),
3184				    unsignedp),
3185				   GE, NULL_RTX, mode, unsignedp,
3186				   label_rtx (node->code_label));
3187
3188	  /* Handle the left-hand subtree.  */
3189	  emit_case_nodes (index, node->left, default_label, index_type);
3190
3191	  /* If right node had to be handled later, do that now.  */
3192
3193	  if (test_label)
3194	    {
3195	      /* If the left-hand subtree fell through,
3196		 don't let it fall into the right-hand subtree.  */
3197	      emit_jump (default_label);
3198
3199	      expand_label (test_label);
3200	      emit_case_nodes (index, node->right, default_label, index_type);
3201	    }
3202	}
3203
3204      else if (node->right != 0 && node->left == 0)
3205	{
3206	  /* Deal with values to the left of this node,
3207	     if they are possible.  */
3208	  if (!node_has_low_bound (node, index_type))
3209	    {
3210	      emit_cmp_and_jump_insns (index,
3211				       convert_modes
3212				       (mode, imode,
3213					expand_normal (node->low),
3214					unsignedp),
3215				       LT, NULL_RTX, mode, unsignedp,
3216				       default_label);
3217	    }
3218
3219	  /* Value belongs to this node or to the right-hand subtree.  */
3220
3221	  emit_cmp_and_jump_insns (index,
3222				   convert_modes
3223				   (mode, imode,
3224				    expand_normal (node->high),
3225				    unsignedp),
3226				   LE, NULL_RTX, mode, unsignedp,
3227				   label_rtx (node->code_label));
3228
3229	  emit_case_nodes (index, node->right, default_label, index_type);
3230	}
3231
3232      else if (node->right == 0 && node->left != 0)
3233	{
3234	  /* Deal with values to the right of this node,
3235	     if they are possible.  */
3236	  if (!node_has_high_bound (node, index_type))
3237	    {
3238	      emit_cmp_and_jump_insns (index,
3239				       convert_modes
3240				       (mode, imode,
3241					expand_normal (node->high),
3242					unsignedp),
3243				       GT, NULL_RTX, mode, unsignedp,
3244				       default_label);
3245	    }
3246
3247	  /* Value belongs to this node or to the left-hand subtree.  */
3248
3249	  emit_cmp_and_jump_insns (index,
3250				   convert_modes
3251				   (mode, imode,
3252				    expand_normal (node->low),
3253				    unsignedp),
3254				   GE, NULL_RTX, mode, unsignedp,
3255				   label_rtx (node->code_label));
3256
3257	  emit_case_nodes (index, node->left, default_label, index_type);
3258	}
3259
3260      else
3261	{
3262	  /* Node has no children so we check low and high bounds to remove
3263	     redundant tests.  Only one of the bounds can exist,
3264	     since otherwise this node is bounded--a case tested already.  */
3265	  int high_bound = node_has_high_bound (node, index_type);
3266	  int low_bound = node_has_low_bound (node, index_type);
3267
3268	  if (!high_bound && low_bound)
3269	    {
3270	      emit_cmp_and_jump_insns (index,
3271				       convert_modes
3272				       (mode, imode,
3273					expand_normal (node->high),
3274					unsignedp),
3275				       GT, NULL_RTX, mode, unsignedp,
3276				       default_label);
3277	    }
3278
3279	  else if (!low_bound && high_bound)
3280	    {
3281	      emit_cmp_and_jump_insns (index,
3282				       convert_modes
3283				       (mode, imode,
3284					expand_normal (node->low),
3285					unsignedp),
3286				       LT, NULL_RTX, mode, unsignedp,
3287				       default_label);
3288	    }
3289	  else if (!low_bound && !high_bound)
3290	    {
3291	      /* Widen LOW and HIGH to the same width as INDEX.  */
3292	      tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
3293	      tree low = build1 (CONVERT_EXPR, type, node->low);
3294	      tree high = build1 (CONVERT_EXPR, type, node->high);
3295	      rtx low_rtx, new_index, new_bound;
3296
3297	      /* Instead of doing two branches, emit one unsigned branch for
3298		 (index-low) > (high-low).  */
3299	      low_rtx = expand_expr (low, NULL_RTX, mode, EXPAND_NORMAL);
3300	      new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
3301					       NULL_RTX, unsignedp,
3302					       OPTAB_WIDEN);
3303	      new_bound = expand_expr (fold_build2 (MINUS_EXPR, type,
3304						    high, low),
3305				       NULL_RTX, mode, EXPAND_NORMAL);
3306
3307	      emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
3308				       mode, 1, default_label);
3309	    }
3310
3311	  emit_jump (label_rtx (node->code_label));
3312	}
3313    }
3314}
3315