stmt.c revision 132718
1/* Expands front end tree to back end RTL for GCC
2   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3   1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22/* This file handles the generation of rtl code from tree structure
23   above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24   It also creates the rtl expressions for parameters and auto variables
25   and has full responsibility for allocating stack slots.
26
27   The functions whose names start with `expand_' are called by the
28   parser to generate RTL instructions for various kinds of constructs.
29
30   Some control and binding constructs require calling several such
31   functions at different times.  For example, a simple if-then
32   is expanded by calling `expand_start_cond' (with the condition-expression
33   as argument) before parsing the then-clause and calling `expand_end_cond'
34   after parsing the then-clause.  */
35
36#include "config.h"
37#include "system.h"
38#include "coretypes.h"
39#include "tm.h"
40
41#include "rtl.h"
42#include "tree.h"
43#include "tm_p.h"
44#include "flags.h"
45#include "except.h"
46#include "function.h"
47#include "insn-config.h"
48#include "expr.h"
49#include "libfuncs.h"
50#include "hard-reg-set.h"
51#include "loop.h"
52#include "recog.h"
53#include "machmode.h"
54#include "toplev.h"
55#include "output.h"
56#include "ggc.h"
57#include "langhooks.h"
58#include "predict.h"
59#include "optabs.h"
60#include "target.h"
61
62/* Assume that case vectors are not pc-relative.  */
63#ifndef CASE_VECTOR_PC_RELATIVE
64#define CASE_VECTOR_PC_RELATIVE 0
65#endif
66
67/* Functions and data structures for expanding case statements.  */
68
69/* Case label structure, used to hold info on labels within case
70   statements.  We handle "range" labels; for a single-value label
71   as in C, the high and low limits are the same.
72
73   An AVL tree of case nodes is initially created, and later transformed
74   to a list linked via the RIGHT fields in the nodes.  Nodes with
75   higher case values are later in the list.
76
77   Switch statements can be output in one of two forms.  A branch table
78   is used if there are more than a few labels and the labels are dense
79   within the range between the smallest and largest case value.  If a
80   branch table is used, no further manipulations are done with the case
81   node chain.
82
83   The alternative to the use of a branch table is to generate a series
84   of compare and jump insns.  When that is done, we use the LEFT, RIGHT,
85   and PARENT fields to hold a binary tree.  Initially the tree is
86   totally unbalanced, with everything on the right.  We balance the tree
87   with nodes on the left having lower case values than the parent
88   and nodes on the right having higher values.  We then output the tree
89   in order.  */
90
91struct case_node GTY(())
92{
93  struct case_node	*left;	/* Left son in binary tree */
94  struct case_node	*right;	/* Right son in binary tree; also node chain */
95  struct case_node	*parent; /* Parent of node in binary tree */
96  tree			low;	/* Lowest index value for this label */
97  tree			high;	/* Highest index value for this label */
98  tree			code_label; /* Label to jump to when node matches */
99  int			balance;
100};
101
102typedef struct case_node case_node;
103typedef struct case_node *case_node_ptr;
104
105/* These are used by estimate_case_costs and balance_case_nodes.  */
106
107/* This must be a signed type, and non-ANSI compilers lack signed char.  */
108static short cost_table_[129];
109static int use_cost_table;
110static int cost_table_initialized;
111
112/* Special care is needed because we allow -1, but TREE_INT_CST_LOW
113   is unsigned.  */
114#define COST_TABLE(I)  cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
115
116/* Stack of control and binding constructs we are currently inside.
117
118   These constructs begin when you call `expand_start_WHATEVER'
119   and end when you call `expand_end_WHATEVER'.  This stack records
120   info about how the construct began that tells the end-function
121   what to do.  It also may provide information about the construct
122   to alter the behavior of other constructs within the body.
123   For example, they may affect the behavior of C `break' and `continue'.
124
125   Each construct gets one `struct nesting' object.
126   All of these objects are chained through the `all' field.
127   `nesting_stack' points to the first object (innermost construct).
128   The position of an entry on `nesting_stack' is in its `depth' field.
129
130   Each type of construct has its own individual stack.
131   For example, loops have `loop_stack'.  Each object points to the
132   next object of the same type through the `next' field.
133
134   Some constructs are visible to `break' exit-statements and others
135   are not.  Which constructs are visible depends on the language.
136   Therefore, the data structure allows each construct to be visible
137   or not, according to the args given when the construct is started.
138   The construct is visible if the `exit_label' field is non-null.
139   In that case, the value should be a CODE_LABEL rtx.  */
140
141struct nesting GTY(())
142{
143  struct nesting *all;
144  struct nesting *next;
145  int depth;
146  rtx exit_label;
147  enum nesting_desc {
148    COND_NESTING,
149    LOOP_NESTING,
150    BLOCK_NESTING,
151    CASE_NESTING
152  } desc;
153  union nesting_u
154    {
155      /* For conds (if-then and if-then-else statements).  */
156      struct nesting_cond
157	{
158	  /* Label for the end of the if construct.
159	     There is none if EXITFLAG was not set
160	     and no `else' has been seen yet.  */
161	  rtx endif_label;
162	  /* Label for the end of this alternative.
163	     This may be the end of the if or the next else/elseif.  */
164	  rtx next_label;
165	} GTY ((tag ("COND_NESTING"))) cond;
166      /* For loops.  */
167      struct nesting_loop
168	{
169	  /* Label at the top of the loop; place to loop back to.  */
170	  rtx start_label;
171	  /* Label at the end of the whole construct.  */
172	  rtx end_label;
173	  /* Label for `continue' statement to jump to;
174	     this is in front of the stepper of the loop.  */
175	  rtx continue_label;
176	} GTY ((tag ("LOOP_NESTING"))) loop;
177      /* For variable binding contours.  */
178      struct nesting_block
179	{
180	  /* Sequence number of this binding contour within the function,
181	     in order of entry.  */
182	  int block_start_count;
183	  /* Nonzero => value to restore stack to on exit.  */
184	  rtx stack_level;
185	  /* The NOTE that starts this contour.
186	     Used by expand_goto to check whether the destination
187	     is within each contour or not.  */
188	  rtx first_insn;
189	  /* Innermost containing binding contour that has a stack level.  */
190	  struct nesting *innermost_stack_block;
191	  /* List of cleanups to be run on exit from this contour.
192	     This is a list of expressions to be evaluated.
193	     The TREE_PURPOSE of each link is the ..._DECL node
194	     which the cleanup pertains to.  */
195	  tree cleanups;
196	  /* List of cleanup-lists of blocks containing this block,
197	     as they were at the locus where this block appears.
198	     There is an element for each containing block,
199	     ordered innermost containing block first.
200	     The tail of this list can be 0,
201	     if all remaining elements would be empty lists.
202	     The element's TREE_VALUE is the cleanup-list of that block,
203	     which may be null.  */
204	  tree outer_cleanups;
205	  /* Chain of labels defined inside this binding contour.
206	     For contours that have stack levels or cleanups.  */
207	  struct label_chain *label_chain;
208	  /* Nonzero if this is associated with an EH region.  */
209	  int exception_region;
210	  /* The saved target_temp_slot_level from our outer block.
211	     We may reset target_temp_slot_level to be the level of
212	     this block, if that is done, target_temp_slot_level
213	     reverts to the saved target_temp_slot_level at the very
214	     end of the block.  */
215	  int block_target_temp_slot_level;
216	  /* True if we are currently emitting insns in an area of
217	     output code that is controlled by a conditional
218	     expression.  This is used by the cleanup handling code to
219	     generate conditional cleanup actions.  */
220	  int conditional_code;
221	  /* A place to move the start of the exception region for any
222	     of the conditional cleanups, must be at the end or after
223	     the start of the last unconditional cleanup, and before any
224	     conditional branch points.  */
225	  rtx last_unconditional_cleanup;
226	} GTY ((tag ("BLOCK_NESTING"))) block;
227      /* For switch (C) or case (Pascal) statements,
228	 and also for dummies (see `expand_start_case_dummy').  */
229      struct nesting_case
230	{
231	  /* The insn after which the case dispatch should finally
232	     be emitted.  Zero for a dummy.  */
233	  rtx start;
234	  /* A list of case labels; it is first built as an AVL tree.
235	     During expand_end_case, this is converted to a list, and may be
236	     rearranged into a nearly balanced binary tree.  */
237	  struct case_node *case_list;
238	  /* Label to jump to if no case matches.  */
239	  tree default_label;
240	  /* The expression to be dispatched on.  */
241	  tree index_expr;
242	  /* Type that INDEX_EXPR should be converted to.  */
243	  tree nominal_type;
244	  /* Name of this kind of statement, for warnings.  */
245	  const char *printname;
246	  /* Used to save no_line_numbers till we see the first case label.
247	     We set this to -1 when we see the first case label in this
248	     case statement.  */
249	  int line_number_status;
250	} GTY ((tag ("CASE_NESTING"))) case_stmt;
251    } GTY ((desc ("%1.desc"))) data;
252};
253
254/* Allocate and return a new `struct nesting'.  */
255
256#define ALLOC_NESTING() ggc_alloc (sizeof (struct nesting))
257
258/* Pop the nesting stack element by element until we pop off
259   the element which is at the top of STACK.
260   Update all the other stacks, popping off elements from them
261   as we pop them from nesting_stack.  */
262
263#define POPSTACK(STACK)					\
264do { struct nesting *target = STACK;			\
265     struct nesting *this;				\
266     do { this = nesting_stack;				\
267	  if (loop_stack == this)			\
268	    loop_stack = loop_stack->next;		\
269	  if (cond_stack == this)			\
270	    cond_stack = cond_stack->next;		\
271	  if (block_stack == this)			\
272	    block_stack = block_stack->next;		\
273	  if (stack_block_stack == this)		\
274	    stack_block_stack = stack_block_stack->next; \
275	  if (case_stack == this)			\
276	    case_stack = case_stack->next;		\
277	  nesting_depth = nesting_stack->depth - 1;	\
278	  nesting_stack = this->all; }			\
279     while (this != target); } while (0)
280
281/* In some cases it is impossible to generate code for a forward goto
282   until the label definition is seen.  This happens when it may be necessary
283   for the goto to reset the stack pointer: we don't yet know how to do that.
284   So expand_goto puts an entry on this fixup list.
285   Each time a binding contour that resets the stack is exited,
286   we check each fixup.
287   If the target label has now been defined, we can insert the proper code.  */
288
289struct goto_fixup GTY(())
290{
291  /* Points to following fixup.  */
292  struct goto_fixup *next;
293  /* Points to the insn before the jump insn.
294     If more code must be inserted, it goes after this insn.  */
295  rtx before_jump;
296  /* The LABEL_DECL that this jump is jumping to, or 0
297     for break, continue or return.  */
298  tree target;
299  /* The BLOCK for the place where this goto was found.  */
300  tree context;
301  /* The CODE_LABEL rtx that this is jumping to.  */
302  rtx target_rtl;
303  /* Number of binding contours started in current function
304     before the label reference.  */
305  int block_start_count;
306  /* The outermost stack level that should be restored for this jump.
307     Each time a binding contour that resets the stack is exited,
308     if the target label is *not* yet defined, this slot is updated.  */
309  rtx stack_level;
310  /* List of lists of cleanup expressions to be run by this goto.
311     There is one element for each block that this goto is within.
312     The tail of this list can be 0,
313     if all remaining elements would be empty.
314     The TREE_VALUE contains the cleanup list of that block as of the
315     time this goto was seen.
316     The TREE_ADDRESSABLE flag is 1 for a block that has been exited.  */
317  tree cleanup_list_list;
318};
319
320/* Within any binding contour that must restore a stack level,
321   all labels are recorded with a chain of these structures.  */
322
323struct label_chain GTY(())
324{
325  /* Points to following fixup.  */
326  struct label_chain *next;
327  tree label;
328};
329
330struct stmt_status GTY(())
331{
332  /* Chain of all pending binding contours.  */
333  struct nesting * x_block_stack;
334
335  /* If any new stacks are added here, add them to POPSTACKS too.  */
336
337  /* Chain of all pending binding contours that restore stack levels
338     or have cleanups.  */
339  struct nesting * x_stack_block_stack;
340
341  /* Chain of all pending conditional statements.  */
342  struct nesting * x_cond_stack;
343
344  /* Chain of all pending loops.  */
345  struct nesting * x_loop_stack;
346
347  /* Chain of all pending case or switch statements.  */
348  struct nesting * x_case_stack;
349
350  /* Separate chain including all of the above,
351     chained through the `all' field.  */
352  struct nesting * x_nesting_stack;
353
354  /* Number of entries on nesting_stack now.  */
355  int x_nesting_depth;
356
357  /* Number of binding contours started so far in this function.  */
358  int x_block_start_count;
359
360  /* Each time we expand an expression-statement,
361     record the expr's type and its RTL value here.  */
362  tree x_last_expr_type;
363  rtx x_last_expr_value;
364  rtx x_last_expr_alt_rtl;
365
366  /* Nonzero if within a ({...}) grouping, in which case we must
367     always compute a value for each expr-stmt in case it is the last one.  */
368  int x_expr_stmts_for_value;
369
370  /* Location of last line-number note, whether we actually
371     emitted it or not.  */
372  location_t x_emit_locus;
373
374  struct goto_fixup *x_goto_fixup_chain;
375};
376
377#define block_stack (cfun->stmt->x_block_stack)
378#define stack_block_stack (cfun->stmt->x_stack_block_stack)
379#define cond_stack (cfun->stmt->x_cond_stack)
380#define loop_stack (cfun->stmt->x_loop_stack)
381#define case_stack (cfun->stmt->x_case_stack)
382#define nesting_stack (cfun->stmt->x_nesting_stack)
383#define nesting_depth (cfun->stmt->x_nesting_depth)
384#define current_block_start_count (cfun->stmt->x_block_start_count)
385#define last_expr_type (cfun->stmt->x_last_expr_type)
386#define last_expr_value (cfun->stmt->x_last_expr_value)
387#define last_expr_alt_rtl (cfun->stmt->x_last_expr_alt_rtl)
388#define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
389#define emit_locus (cfun->stmt->x_emit_locus)
390#define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
391
392/* Nonzero if we are using EH to handle cleanups.  */
393static int using_eh_for_cleanups_p = 0;
394
395static int n_occurrences (int, const char *);
396static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
397static void expand_goto_internal (tree, rtx, rtx);
398static int expand_fixup (tree, rtx, rtx);
399static rtx expand_nl_handler_label (rtx, rtx);
400static void expand_nl_goto_receiver (void);
401static void expand_nl_goto_receivers (struct nesting *);
402static void fixup_gotos (struct nesting *, rtx, tree, rtx, int);
403static bool check_operand_nalternatives (tree, tree);
404static bool check_unique_operand_names (tree, tree);
405static char *resolve_operand_name_1 (char *, tree, tree);
406static void expand_null_return_1 (rtx);
407static enum br_predictor return_prediction (rtx);
408static rtx shift_return_value (rtx);
409static void expand_value_return (rtx);
410static int tail_recursion_args (tree, tree);
411static void expand_cleanups (tree, int, int);
412static void check_seenlabel (void);
413static void do_jump_if_equal (rtx, rtx, rtx, int);
414static int estimate_case_costs (case_node_ptr);
415static bool same_case_target_p (rtx, rtx);
416static void strip_default_case_nodes (case_node_ptr *, rtx);
417static bool lshift_cheap_p (void);
418static int case_bit_test_cmp (const void *, const void *);
419static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
420static void group_case_nodes (case_node_ptr);
421static void balance_case_nodes (case_node_ptr *, case_node_ptr);
422static int node_has_low_bound (case_node_ptr, tree);
423static int node_has_high_bound (case_node_ptr, tree);
424static int node_is_bounded (case_node_ptr, tree);
425static void emit_jump_if_reachable (rtx);
426static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
427static struct case_node *case_tree2list (case_node *, case_node *);
428
429void
430using_eh_for_cleanups (void)
431{
432  using_eh_for_cleanups_p = 1;
433}
434
435void
436init_stmt_for_function (void)
437{
438  cfun->stmt = ggc_alloc_cleared (sizeof (struct stmt_status));
439}
440
441/* Record the current file and line.  Called from emit_line_note.  */
442
443void
444set_file_and_line_for_stmt (location_t location)
445{
446  /* If we're outputting an inline function, and we add a line note,
447     there may be no CFUN->STMT information.  So, there's no need to
448     update it.  */
449  if (cfun->stmt)
450    emit_locus = location;
451}
452
453/* Emit a no-op instruction.  */
454
455void
456emit_nop (void)
457{
458  rtx last_insn;
459
460  last_insn = get_last_insn ();
461  if (!optimize
462      && (GET_CODE (last_insn) == CODE_LABEL
463	  || (GET_CODE (last_insn) == NOTE
464	      && prev_real_insn (last_insn) == 0)))
465    emit_insn (gen_nop ());
466}
467
468/* Return the rtx-label that corresponds to a LABEL_DECL,
469   creating it if necessary.  */
470
471rtx
472label_rtx (tree label)
473{
474  if (TREE_CODE (label) != LABEL_DECL)
475    abort ();
476
477  if (!DECL_RTL_SET_P (label))
478    SET_DECL_RTL (label, gen_label_rtx ());
479
480  return DECL_RTL (label);
481}
482
483/* As above, but also put it on the forced-reference list of the
484   function that contains it.  */
485rtx
486force_label_rtx (tree label)
487{
488  rtx ref = label_rtx (label);
489  tree function = decl_function_context (label);
490  struct function *p;
491
492  if (!function)
493    abort ();
494
495  if (function != current_function_decl
496      && function != inline_function_decl)
497    p = find_function_data (function);
498  else
499    p = cfun;
500
501  p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
502						p->expr->x_forced_labels);
503  return ref;
504}
505
506/* Add an unconditional jump to LABEL as the next sequential instruction.  */
507
508void
509emit_jump (rtx label)
510{
511  do_pending_stack_adjust ();
512  emit_jump_insn (gen_jump (label));
513  emit_barrier ();
514}
515
516/* Emit code to jump to the address
517   specified by the pointer expression EXP.  */
518
519void
520expand_computed_goto (tree exp)
521{
522  rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
523
524  x = convert_memory_address (Pmode, x);
525
526  emit_queue ();
527
528  if (! cfun->computed_goto_common_label)
529    {
530      cfun->computed_goto_common_reg = copy_to_mode_reg (Pmode, x);
531      cfun->computed_goto_common_label = gen_label_rtx ();
532
533      do_pending_stack_adjust ();
534      emit_label (cfun->computed_goto_common_label);
535      emit_indirect_jump (cfun->computed_goto_common_reg);
536
537      current_function_has_computed_jump = 1;
538    }
539  else
540    {
541      emit_move_insn (cfun->computed_goto_common_reg, x);
542      emit_jump (cfun->computed_goto_common_label);
543    }
544}
545
546/* Handle goto statements and the labels that they can go to.  */
547
548/* Specify the location in the RTL code of a label LABEL,
549   which is a LABEL_DECL tree node.
550
551   This is used for the kind of label that the user can jump to with a
552   goto statement, and for alternatives of a switch or case statement.
553   RTL labels generated for loops and conditionals don't go through here;
554   they are generated directly at the RTL level, by other functions below.
555
556   Note that this has nothing to do with defining label *names*.
557   Languages vary in how they do that and what that even means.  */
558
559void
560expand_label (tree label)
561{
562  struct label_chain *p;
563
564  do_pending_stack_adjust ();
565  emit_label (label_rtx (label));
566  if (DECL_NAME (label))
567    LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
568
569  if (stack_block_stack != 0)
570    {
571      p = ggc_alloc (sizeof (struct label_chain));
572      p->next = stack_block_stack->data.block.label_chain;
573      stack_block_stack->data.block.label_chain = p;
574      p->label = label;
575    }
576}
577
578/* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
579   from nested functions.  */
580
581void
582declare_nonlocal_label (tree label)
583{
584  rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
585
586  nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
587  LABEL_PRESERVE_P (label_rtx (label)) = 1;
588  if (nonlocal_goto_handler_slots == 0)
589    {
590      emit_stack_save (SAVE_NONLOCAL,
591		       &nonlocal_goto_stack_level,
592		       PREV_INSN (tail_recursion_reentry));
593    }
594  nonlocal_goto_handler_slots
595    = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
596}
597
598/* Generate RTL code for a `goto' statement with target label LABEL.
599   LABEL should be a LABEL_DECL tree node that was or will later be
600   defined with `expand_label'.  */
601
602void
603expand_goto (tree label)
604{
605  tree context;
606
607  /* Check for a nonlocal goto to a containing function.  */
608  context = decl_function_context (label);
609  if (context != 0 && context != current_function_decl)
610    {
611      struct function *p = find_function_data (context);
612      rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
613      rtx handler_slot, static_chain, save_area, insn;
614      tree link;
615
616      /* Find the corresponding handler slot for this label.  */
617      handler_slot = p->x_nonlocal_goto_handler_slots;
618      for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
619	   link = TREE_CHAIN (link))
620	handler_slot = XEXP (handler_slot, 1);
621      handler_slot = XEXP (handler_slot, 0);
622
623      p->has_nonlocal_label = 1;
624      current_function_has_nonlocal_goto = 1;
625      LABEL_REF_NONLOCAL_P (label_ref) = 1;
626
627      /* Copy the rtl for the slots so that they won't be shared in
628	 case the virtual stack vars register gets instantiated differently
629	 in the parent than in the child.  */
630
631      static_chain = copy_to_reg (lookup_static_chain (label));
632
633      /* Get addr of containing function's current nonlocal goto handler,
634	 which will do any cleanups and then jump to the label.  */
635      handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
636					       virtual_stack_vars_rtx,
637					       static_chain));
638
639      /* Get addr of containing function's nonlocal save area.  */
640      save_area = p->x_nonlocal_goto_stack_level;
641      if (save_area)
642	save_area = replace_rtx (copy_rtx (save_area),
643				 virtual_stack_vars_rtx, static_chain);
644
645#if HAVE_nonlocal_goto
646      if (HAVE_nonlocal_goto)
647	emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
648				      save_area, label_ref));
649      else
650#endif
651	{
652	  emit_insn (gen_rtx_CLOBBER (VOIDmode,
653				      gen_rtx_MEM (BLKmode,
654						   gen_rtx_SCRATCH (VOIDmode))));
655	  emit_insn (gen_rtx_CLOBBER (VOIDmode,
656				      gen_rtx_MEM (BLKmode,
657						   hard_frame_pointer_rtx)));
658
659	  /* Restore frame pointer for containing function.
660	     This sets the actual hard register used for the frame pointer
661	     to the location of the function's incoming static chain info.
662	     The non-local goto handler will then adjust it to contain the
663	     proper value and reload the argument pointer, if needed.  */
664	  emit_move_insn (hard_frame_pointer_rtx, static_chain);
665	  emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
666
667	  /* USE of hard_frame_pointer_rtx added for consistency;
668	     not clear if really needed.  */
669	  emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
670	  emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
671	  emit_indirect_jump (handler_slot);
672	}
673
674      /* Search backwards to the jump insn and mark it as a
675	 non-local goto.  */
676      for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
677	{
678	  if (GET_CODE (insn) == JUMP_INSN)
679	    {
680	      REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
681						  const0_rtx, REG_NOTES (insn));
682	      break;
683	    }
684	  else if (GET_CODE (insn) == CALL_INSN)
685	      break;
686	}
687    }
688  else
689    expand_goto_internal (label, label_rtx (label), NULL_RTX);
690}
691
692/* Generate RTL code for a `goto' statement with target label BODY.
693   LABEL should be a LABEL_REF.
694   LAST_INSN, if non-0, is the rtx we should consider as the last
695   insn emitted (for the purposes of cleaning up a return).  */
696
697static void
698expand_goto_internal (tree body, rtx label, rtx last_insn)
699{
700  struct nesting *block;
701  rtx stack_level = 0;
702
703  if (GET_CODE (label) != CODE_LABEL)
704    abort ();
705
706  /* If label has already been defined, we can tell now
707     whether and how we must alter the stack level.  */
708
709  if (PREV_INSN (label) != 0)
710    {
711      /* Find the innermost pending block that contains the label.
712	 (Check containment by comparing insn-uids.)
713	 Then restore the outermost stack level within that block,
714	 and do cleanups of all blocks contained in it.  */
715      for (block = block_stack; block; block = block->next)
716	{
717	  if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
718	    break;
719	  if (block->data.block.stack_level != 0)
720	    stack_level = block->data.block.stack_level;
721	  /* Execute the cleanups for blocks we are exiting.  */
722	  if (block->data.block.cleanups != 0)
723	    {
724	      expand_cleanups (block->data.block.cleanups, 1, 1);
725	      do_pending_stack_adjust ();
726	    }
727	}
728
729      if (stack_level)
730	{
731	  /* Ensure stack adjust isn't done by emit_jump, as this
732	     would clobber the stack pointer.  This one should be
733	     deleted as dead by flow.  */
734	  clear_pending_stack_adjust ();
735	  do_pending_stack_adjust ();
736
737	  /* Don't do this adjust if it's to the end label and this function
738	     is to return with a depressed stack pointer.  */
739	  if (label == return_label
740	      && (((TREE_CODE (TREE_TYPE (current_function_decl))
741		   == FUNCTION_TYPE)
742		   && (TYPE_RETURNS_STACK_DEPRESSED
743		       (TREE_TYPE (current_function_decl))))))
744	    ;
745	  else
746	    emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
747	}
748
749      if (body != 0 && DECL_TOO_LATE (body))
750	error ("jump to `%s' invalidly jumps into binding contour",
751	       IDENTIFIER_POINTER (DECL_NAME (body)));
752    }
753  /* Label not yet defined: may need to put this goto
754     on the fixup list.  */
755  else if (! expand_fixup (body, label, last_insn))
756    {
757      /* No fixup needed.  Record that the label is the target
758	 of at least one goto that has no fixup.  */
759      if (body != 0)
760	TREE_ADDRESSABLE (body) = 1;
761    }
762
763  emit_jump (label);
764}
765
766/* Generate if necessary a fixup for a goto
767   whose target label in tree structure (if any) is TREE_LABEL
768   and whose target in rtl is RTL_LABEL.
769
770   If LAST_INSN is nonzero, we pretend that the jump appears
771   after insn LAST_INSN instead of at the current point in the insn stream.
772
773   The fixup will be used later to insert insns just before the goto.
774   Those insns will restore the stack level as appropriate for the
775   target label, and will (in the case of C++) also invoke any object
776   destructors which have to be invoked when we exit the scopes which
777   are exited by the goto.
778
779   Value is nonzero if a fixup is made.  */
780
781static int
782expand_fixup (tree tree_label, rtx rtl_label, rtx last_insn)
783{
784  struct nesting *block, *end_block;
785
786  /* See if we can recognize which block the label will be output in.
787     This is possible in some very common cases.
788     If we succeed, set END_BLOCK to that block.
789     Otherwise, set it to 0.  */
790
791  if (cond_stack
792      && (rtl_label == cond_stack->data.cond.endif_label
793	  || rtl_label == cond_stack->data.cond.next_label))
794    end_block = cond_stack;
795  /* If we are in a loop, recognize certain labels which
796     are likely targets.  This reduces the number of fixups
797     we need to create.  */
798  else if (loop_stack
799      && (rtl_label == loop_stack->data.loop.start_label
800	  || rtl_label == loop_stack->data.loop.end_label
801	  || rtl_label == loop_stack->data.loop.continue_label))
802    end_block = loop_stack;
803  else
804    end_block = 0;
805
806  /* Now set END_BLOCK to the binding level to which we will return.  */
807
808  if (end_block)
809    {
810      struct nesting *next_block = end_block->all;
811      block = block_stack;
812
813      /* First see if the END_BLOCK is inside the innermost binding level.
814	 If so, then no cleanups or stack levels are relevant.  */
815      while (next_block && next_block != block)
816	next_block = next_block->all;
817
818      if (next_block)
819	return 0;
820
821      /* Otherwise, set END_BLOCK to the innermost binding level
822	 which is outside the relevant control-structure nesting.  */
823      next_block = block_stack->next;
824      for (block = block_stack; block != end_block; block = block->all)
825	if (block == next_block)
826	  next_block = next_block->next;
827      end_block = next_block;
828    }
829
830  /* Does any containing block have a stack level or cleanups?
831     If not, no fixup is needed, and that is the normal case
832     (the only case, for standard C).  */
833  for (block = block_stack; block != end_block; block = block->next)
834    if (block->data.block.stack_level != 0
835	|| block->data.block.cleanups != 0)
836      break;
837
838  if (block != end_block)
839    {
840      /* Ok, a fixup is needed.  Add a fixup to the list of such.  */
841      struct goto_fixup *fixup = ggc_alloc (sizeof (struct goto_fixup));
842      /* In case an old stack level is restored, make sure that comes
843	 after any pending stack adjust.  */
844      /* ?? If the fixup isn't to come at the present position,
845	 doing the stack adjust here isn't useful.  Doing it with our
846	 settings at that location isn't useful either.  Let's hope
847	 someone does it!  */
848      if (last_insn == 0)
849	do_pending_stack_adjust ();
850      fixup->target = tree_label;
851      fixup->target_rtl = rtl_label;
852
853      /* Create a BLOCK node and a corresponding matched set of
854	 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
855	 this point.  The notes will encapsulate any and all fixup
856	 code which we might later insert at this point in the insn
857	 stream.  Also, the BLOCK node will be the parent (i.e. the
858	 `SUPERBLOCK') of any other BLOCK nodes which we might create
859	 later on when we are expanding the fixup code.
860
861	 Note that optimization passes (including expand_end_loop)
862	 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
863	 as a placeholder.  */
864
865      {
866	rtx original_before_jump
867	  = last_insn ? last_insn : get_last_insn ();
868	rtx start;
869	rtx end;
870	tree block;
871
872	block = make_node (BLOCK);
873	TREE_USED (block) = 1;
874
875	if (!cfun->x_whole_function_mode_p)
876	  (*lang_hooks.decls.insert_block) (block);
877	else
878	  {
879	    BLOCK_CHAIN (block)
880	      = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
881	    BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
882	      = block;
883	  }
884
885	start_sequence ();
886	start = emit_note (NOTE_INSN_BLOCK_BEG);
887	if (cfun->x_whole_function_mode_p)
888	  NOTE_BLOCK (start) = block;
889	fixup->before_jump = emit_note (NOTE_INSN_DELETED);
890	end = emit_note (NOTE_INSN_BLOCK_END);
891	if (cfun->x_whole_function_mode_p)
892	  NOTE_BLOCK (end) = block;
893	fixup->context = block;
894	end_sequence ();
895	emit_insn_after (start, original_before_jump);
896      }
897
898      fixup->block_start_count = current_block_start_count;
899      fixup->stack_level = 0;
900      fixup->cleanup_list_list
901	= ((block->data.block.outer_cleanups
902	    || block->data.block.cleanups)
903	   ? tree_cons (NULL_TREE, block->data.block.cleanups,
904			block->data.block.outer_cleanups)
905	   : 0);
906      fixup->next = goto_fixup_chain;
907      goto_fixup_chain = fixup;
908    }
909
910  return block != 0;
911}
912
913/* Expand any needed fixups in the outputmost binding level of the
914   function.  FIRST_INSN is the first insn in the function.  */
915
916void
917expand_fixups (rtx first_insn)
918{
919  fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
920}
921
922/* When exiting a binding contour, process all pending gotos requiring fixups.
923   THISBLOCK is the structure that describes the block being exited.
924   STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
925   CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
926   FIRST_INSN is the insn that began this contour.
927
928   Gotos that jump out of this contour must restore the
929   stack level and do the cleanups before actually jumping.
930
931   DONT_JUMP_IN positive means report error if there is a jump into this
932   contour from before the beginning of the contour.  This is also done if
933   STACK_LEVEL is nonzero unless DONT_JUMP_IN is negative.  */
934
935static void
936fixup_gotos (struct nesting *thisblock, rtx stack_level,
937	     tree cleanup_list, rtx first_insn, int dont_jump_in)
938{
939  struct goto_fixup *f, *prev;
940
941  /* F is the fixup we are considering; PREV is the previous one.  */
942  /* We run this loop in two passes so that cleanups of exited blocks
943     are run first, and blocks that are exited are marked so
944     afterwards.  */
945
946  for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
947    {
948      /* Test for a fixup that is inactive because it is already handled.  */
949      if (f->before_jump == 0)
950	{
951	  /* Delete inactive fixup from the chain, if that is easy to do.  */
952	  if (prev != 0)
953	    prev->next = f->next;
954	}
955      /* Has this fixup's target label been defined?
956	 If so, we can finalize it.  */
957      else if (PREV_INSN (f->target_rtl) != 0)
958	{
959	  rtx cleanup_insns;
960
961	  /* If this fixup jumped into this contour from before the beginning
962	     of this contour, report an error.   This code used to use
963	     the first non-label insn after f->target_rtl, but that's
964	     wrong since such can be added, by things like put_var_into_stack
965	     and have INSN_UIDs that are out of the range of the block.  */
966	  /* ??? Bug: this does not detect jumping in through intermediate
967	     blocks that have stack levels or cleanups.
968	     It detects only a problem with the innermost block
969	     around the label.  */
970	  if (f->target != 0
971	      && (dont_jump_in > 0 || (dont_jump_in == 0 && stack_level)
972		  || cleanup_list)
973	      && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
974	      && INSN_UID (first_insn) > INSN_UID (f->before_jump)
975	      && ! DECL_ERROR_ISSUED (f->target))
976	    {
977	      error ("%Jlabel '%D' used before containing binding contour",
978		     f->target, f->target);
979	      /* Prevent multiple errors for one label.  */
980	      DECL_ERROR_ISSUED (f->target) = 1;
981	    }
982
983	  /* We will expand the cleanups into a sequence of their own and
984	     then later on we will attach this new sequence to the insn
985	     stream just ahead of the actual jump insn.  */
986
987	  start_sequence ();
988
989	  /* Temporarily restore the lexical context where we will
990	     logically be inserting the fixup code.  We do this for the
991	     sake of getting the debugging information right.  */
992
993	  (*lang_hooks.decls.pushlevel) (0);
994	  (*lang_hooks.decls.set_block) (f->context);
995
996	  /* Expand the cleanups for blocks this jump exits.  */
997	  if (f->cleanup_list_list)
998	    {
999	      tree lists;
1000	      for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1001		/* Marked elements correspond to blocks that have been closed.
1002		   Do their cleanups.  */
1003		if (TREE_ADDRESSABLE (lists)
1004		    && TREE_VALUE (lists) != 0)
1005		  {
1006		    expand_cleanups (TREE_VALUE (lists), 1, 1);
1007		    /* Pop any pushes done in the cleanups,
1008		       in case function is about to return.  */
1009		    do_pending_stack_adjust ();
1010		  }
1011	    }
1012
1013	  /* Restore stack level for the biggest contour that this
1014	     jump jumps out of.  */
1015	  if (f->stack_level
1016	      && ! (f->target_rtl == return_label
1017		    && ((TREE_CODE (TREE_TYPE (current_function_decl))
1018			 == FUNCTION_TYPE)
1019			&& (TYPE_RETURNS_STACK_DEPRESSED
1020			    (TREE_TYPE (current_function_decl))))))
1021	    emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1022
1023	  /* Finish up the sequence containing the insns which implement the
1024	     necessary cleanups, and then attach that whole sequence to the
1025	     insn stream just ahead of the actual jump insn.  Attaching it
1026	     at that point insures that any cleanups which are in fact
1027	     implicit C++ object destructions (which must be executed upon
1028	     leaving the block) appear (to the debugger) to be taking place
1029	     in an area of the generated code where the object(s) being
1030	     destructed are still "in scope".  */
1031
1032	  cleanup_insns = get_insns ();
1033	  (*lang_hooks.decls.poplevel) (1, 0, 0);
1034
1035	  end_sequence ();
1036	  emit_insn_after (cleanup_insns, f->before_jump);
1037
1038	  f->before_jump = 0;
1039	}
1040    }
1041
1042  /* For any still-undefined labels, do the cleanups for this block now.
1043     We must do this now since items in the cleanup list may go out
1044     of scope when the block ends.  */
1045  for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1046    if (f->before_jump != 0
1047	&& PREV_INSN (f->target_rtl) == 0
1048	/* Label has still not appeared.  If we are exiting a block with
1049	   a stack level to restore, that started before the fixup,
1050	   mark this stack level as needing restoration
1051	   when the fixup is later finalized.  */
1052	&& thisblock != 0
1053	/* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1054	   means the label is undefined.  That's erroneous, but possible.  */
1055	&& (thisblock->data.block.block_start_count
1056	    <= f->block_start_count))
1057      {
1058	tree lists = f->cleanup_list_list;
1059	rtx cleanup_insns;
1060
1061	for (; lists; lists = TREE_CHAIN (lists))
1062	  /* If the following elt. corresponds to our containing block
1063	     then the elt. must be for this block.  */
1064	  if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1065	    {
1066	      start_sequence ();
1067	      (*lang_hooks.decls.pushlevel) (0);
1068	      (*lang_hooks.decls.set_block) (f->context);
1069	      expand_cleanups (TREE_VALUE (lists), 1, 1);
1070	      do_pending_stack_adjust ();
1071	      cleanup_insns = get_insns ();
1072	      (*lang_hooks.decls.poplevel) (1, 0, 0);
1073	      end_sequence ();
1074	      if (cleanup_insns != 0)
1075		f->before_jump
1076		  = emit_insn_after (cleanup_insns, f->before_jump);
1077
1078	      f->cleanup_list_list = TREE_CHAIN (lists);
1079	    }
1080
1081	if (stack_level)
1082	  f->stack_level = stack_level;
1083      }
1084}
1085
1086/* Return the number of times character C occurs in string S.  */
1087static int
1088n_occurrences (int c, const char *s)
1089{
1090  int n = 0;
1091  while (*s)
1092    n += (*s++ == c);
1093  return n;
1094}
1095
1096/* Generate RTL for an asm statement (explicit assembler code).
1097   STRING is a STRING_CST node containing the assembler code text,
1098   or an ADDR_EXPR containing a STRING_CST.  VOL nonzero means the
1099   insn is volatile; don't optimize it.  */
1100
1101void
1102expand_asm (tree string, int vol)
1103{
1104  rtx body;
1105
1106  if (TREE_CODE (string) == ADDR_EXPR)
1107    string = TREE_OPERAND (string, 0);
1108
1109  body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
1110
1111  MEM_VOLATILE_P (body) = vol;
1112
1113  emit_insn (body);
1114
1115  clear_last_expr ();
1116}
1117
1118/* Parse the output constraint pointed to by *CONSTRAINT_P.  It is the
1119   OPERAND_NUMth output operand, indexed from zero.  There are NINPUTS
1120   inputs and NOUTPUTS outputs to this extended-asm.  Upon return,
1121   *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1122   memory operand.  Similarly, *ALLOWS_REG will be TRUE iff the
1123   constraint allows the use of a register operand.  And, *IS_INOUT
1124   will be true if the operand is read-write, i.e., if it is used as
1125   an input as well as an output.  If *CONSTRAINT_P is not in
1126   canonical form, it will be made canonical.  (Note that `+' will be
1127   replaced with `=' as part of this process.)
1128
1129   Returns TRUE if all went well; FALSE if an error occurred.  */
1130
1131bool
1132parse_output_constraint (const char **constraint_p, int operand_num,
1133			 int ninputs, int noutputs, bool *allows_mem,
1134			 bool *allows_reg, bool *is_inout)
1135{
1136  const char *constraint = *constraint_p;
1137  const char *p;
1138
1139  /* Assume the constraint doesn't allow the use of either a register
1140     or memory.  */
1141  *allows_mem = false;
1142  *allows_reg = false;
1143
1144  /* Allow the `=' or `+' to not be at the beginning of the string,
1145     since it wasn't explicitly documented that way, and there is a
1146     large body of code that puts it last.  Swap the character to
1147     the front, so as not to uglify any place else.  */
1148  p = strchr (constraint, '=');
1149  if (!p)
1150    p = strchr (constraint, '+');
1151
1152  /* If the string doesn't contain an `=', issue an error
1153     message.  */
1154  if (!p)
1155    {
1156      error ("output operand constraint lacks `='");
1157      return false;
1158    }
1159
1160  /* If the constraint begins with `+', then the operand is both read
1161     from and written to.  */
1162  *is_inout = (*p == '+');
1163
1164  /* Canonicalize the output constraint so that it begins with `='.  */
1165  if (p != constraint || is_inout)
1166    {
1167      char *buf;
1168      size_t c_len = strlen (constraint);
1169
1170      if (p != constraint)
1171	warning ("output constraint `%c' for operand %d is not at the beginning",
1172		 *p, operand_num);
1173
1174      /* Make a copy of the constraint.  */
1175      buf = alloca (c_len + 1);
1176      strcpy (buf, constraint);
1177      /* Swap the first character and the `=' or `+'.  */
1178      buf[p - constraint] = buf[0];
1179      /* Make sure the first character is an `='.  (Until we do this,
1180	 it might be a `+'.)  */
1181      buf[0] = '=';
1182      /* Replace the constraint with the canonicalized string.  */
1183      *constraint_p = ggc_alloc_string (buf, c_len);
1184      constraint = *constraint_p;
1185    }
1186
1187  /* Loop through the constraint string.  */
1188  for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
1189    switch (*p)
1190      {
1191      case '+':
1192      case '=':
1193	error ("operand constraint contains incorrectly positioned '+' or '='");
1194	return false;
1195
1196      case '%':
1197	if (operand_num + 1 == ninputs + noutputs)
1198	  {
1199	    error ("`%%' constraint used with last operand");
1200	    return false;
1201	  }
1202	break;
1203
1204      case 'V':  case 'm':  case 'o':
1205	*allows_mem = true;
1206	break;
1207
1208      case '?':  case '!':  case '*':  case '&':  case '#':
1209      case 'E':  case 'F':  case 'G':  case 'H':
1210      case 's':  case 'i':  case 'n':
1211      case 'I':  case 'J':  case 'K':  case 'L':  case 'M':
1212      case 'N':  case 'O':  case 'P':  case ',':
1213	break;
1214
1215      case '0':  case '1':  case '2':  case '3':  case '4':
1216      case '5':  case '6':  case '7':  case '8':  case '9':
1217      case '[':
1218	error ("matching constraint not valid in output operand");
1219	return false;
1220
1221      case '<':  case '>':
1222	/* ??? Before flow, auto inc/dec insns are not supposed to exist,
1223	   excepting those that expand_call created.  So match memory
1224	   and hope.  */
1225	*allows_mem = true;
1226	break;
1227
1228      case 'g':  case 'X':
1229	*allows_reg = true;
1230	*allows_mem = true;
1231	break;
1232
1233      case 'p': case 'r':
1234	*allows_reg = true;
1235	break;
1236
1237      default:
1238	if (!ISALPHA (*p))
1239	  break;
1240	if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
1241	  *allows_reg = true;
1242#ifdef EXTRA_CONSTRAINT_STR
1243	else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
1244	  *allows_reg = true;
1245	else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
1246	  *allows_mem = true;
1247	else
1248	  {
1249	    /* Otherwise we can't assume anything about the nature of
1250	       the constraint except that it isn't purely registers.
1251	       Treat it like "g" and hope for the best.  */
1252	    *allows_reg = true;
1253	    *allows_mem = true;
1254	  }
1255#endif
1256	break;
1257      }
1258
1259  return true;
1260}
1261
1262/* Similar, but for input constraints.  */
1263
1264bool
1265parse_input_constraint (const char **constraint_p, int input_num,
1266			int ninputs, int noutputs, int ninout,
1267			const char * const * constraints,
1268			bool *allows_mem, bool *allows_reg)
1269{
1270  const char *constraint = *constraint_p;
1271  const char *orig_constraint = constraint;
1272  size_t c_len = strlen (constraint);
1273  size_t j;
1274  bool saw_match = false;
1275
1276  /* Assume the constraint doesn't allow the use of either
1277     a register or memory.  */
1278  *allows_mem = false;
1279  *allows_reg = false;
1280
1281  /* Make sure constraint has neither `=', `+', nor '&'.  */
1282
1283  for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
1284    switch (constraint[j])
1285      {
1286      case '+':  case '=':  case '&':
1287	if (constraint == orig_constraint)
1288	  {
1289	    error ("input operand constraint contains `%c'", constraint[j]);
1290	    return false;
1291	  }
1292	break;
1293
1294      case '%':
1295	if (constraint == orig_constraint
1296	    && input_num + 1 == ninputs - ninout)
1297	  {
1298	    error ("`%%' constraint used with last operand");
1299	    return false;
1300	  }
1301	break;
1302
1303      case 'V':  case 'm':  case 'o':
1304	*allows_mem = true;
1305	break;
1306
1307      case '<':  case '>':
1308      case '?':  case '!':  case '*':  case '#':
1309      case 'E':  case 'F':  case 'G':  case 'H':
1310      case 's':  case 'i':  case 'n':
1311      case 'I':  case 'J':  case 'K':  case 'L':  case 'M':
1312      case 'N':  case 'O':  case 'P':  case ',':
1313	break;
1314
1315	/* Whether or not a numeric constraint allows a register is
1316	   decided by the matching constraint, and so there is no need
1317	   to do anything special with them.  We must handle them in
1318	   the default case, so that we don't unnecessarily force
1319	   operands to memory.  */
1320      case '0':  case '1':  case '2':  case '3':  case '4':
1321      case '5':  case '6':  case '7':  case '8':  case '9':
1322	{
1323	  char *end;
1324	  unsigned long match;
1325
1326	  saw_match = true;
1327
1328	  match = strtoul (constraint + j, &end, 10);
1329	  if (match >= (unsigned long) noutputs)
1330	    {
1331	      error ("matching constraint references invalid operand number");
1332	      return false;
1333	    }
1334
1335	  /* Try and find the real constraint for this dup.  Only do this
1336	     if the matching constraint is the only alternative.  */
1337	  if (*end == '\0'
1338	      && (j == 0 || (j == 1 && constraint[0] == '%')))
1339	    {
1340	      constraint = constraints[match];
1341	      *constraint_p = constraint;
1342	      c_len = strlen (constraint);
1343	      j = 0;
1344	      /* ??? At the end of the loop, we will skip the first part of
1345		 the matched constraint.  This assumes not only that the
1346		 other constraint is an output constraint, but also that
1347		 the '=' or '+' come first.  */
1348	      break;
1349	    }
1350	  else
1351	    j = end - constraint;
1352	  /* Anticipate increment at end of loop.  */
1353	  j--;
1354	}
1355	/* Fall through.  */
1356
1357      case 'p':  case 'r':
1358	*allows_reg = true;
1359	break;
1360
1361      case 'g':  case 'X':
1362	*allows_reg = true;
1363	*allows_mem = true;
1364	break;
1365
1366      default:
1367	if (! ISALPHA (constraint[j]))
1368	  {
1369	    error ("invalid punctuation `%c' in constraint", constraint[j]);
1370	    return false;
1371	  }
1372	if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
1373	    != NO_REGS)
1374	  *allows_reg = true;
1375#ifdef EXTRA_CONSTRAINT_STR
1376	else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
1377	  *allows_reg = true;
1378	else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
1379	  *allows_mem = true;
1380	else
1381	  {
1382	    /* Otherwise we can't assume anything about the nature of
1383	       the constraint except that it isn't purely registers.
1384	       Treat it like "g" and hope for the best.  */
1385	    *allows_reg = true;
1386	    *allows_mem = true;
1387	  }
1388#endif
1389	break;
1390      }
1391
1392  if (saw_match && !*allows_reg)
1393    warning ("matching constraint does not allow a register");
1394
1395  return true;
1396}
1397
1398/* Check for overlap between registers marked in CLOBBERED_REGS and
1399   anything inappropriate in DECL.  Emit error and return TRUE for error,
1400   FALSE for ok.  */
1401
1402static bool
1403decl_conflicts_with_clobbers_p (tree decl, const HARD_REG_SET clobbered_regs)
1404{
1405  /* Conflicts between asm-declared register variables and the clobber
1406     list are not allowed.  */
1407  if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
1408      && DECL_REGISTER (decl)
1409      && REG_P (DECL_RTL (decl))
1410      && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
1411    {
1412      rtx reg = DECL_RTL (decl);
1413      unsigned int regno;
1414
1415      for (regno = REGNO (reg);
1416	   regno < (REGNO (reg)
1417		    + HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)));
1418	   regno++)
1419	if (TEST_HARD_REG_BIT (clobbered_regs, regno))
1420	  {
1421	    error ("asm-specifier for variable `%s' conflicts with asm clobber list",
1422		   IDENTIFIER_POINTER (DECL_NAME (decl)));
1423
1424	    /* Reset registerness to stop multiple errors emitted for a
1425	       single variable.  */
1426	    DECL_REGISTER (decl) = 0;
1427	    return true;
1428	  }
1429    }
1430  return false;
1431}
1432
1433/* Generate RTL for an asm statement with arguments.
1434   STRING is the instruction template.
1435   OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1436   Each output or input has an expression in the TREE_VALUE and
1437   and a tree list in TREE_PURPOSE which in turn contains a constraint
1438   name in TREE_VALUE (or NULL_TREE) and a constraint string
1439   in TREE_PURPOSE.
1440   CLOBBERS is a list of STRING_CST nodes each naming a hard register
1441   that is clobbered by this insn.
1442
1443   Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1444   Some elements of OUTPUTS may be replaced with trees representing temporary
1445   values.  The caller should copy those temporary values to the originally
1446   specified lvalues.
1447
1448   VOL nonzero means the insn is volatile; don't optimize it.  */
1449
1450void
1451expand_asm_operands (tree string, tree outputs, tree inputs,
1452		     tree clobbers, int vol, location_t locus)
1453{
1454  rtvec argvec, constraintvec;
1455  rtx body;
1456  int ninputs = list_length (inputs);
1457  int noutputs = list_length (outputs);
1458  int ninout;
1459  int nclobbers;
1460  HARD_REG_SET clobbered_regs;
1461  int clobber_conflict_found = 0;
1462  tree tail;
1463  tree t;
1464  int i;
1465  /* Vector of RTX's of evaluated output operands.  */
1466  rtx *output_rtx = alloca (noutputs * sizeof (rtx));
1467  int *inout_opnum = alloca (noutputs * sizeof (int));
1468  rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
1469  enum machine_mode *inout_mode
1470    = alloca (noutputs * sizeof (enum machine_mode));
1471  const char **constraints
1472    = alloca ((noutputs + ninputs) * sizeof (const char *));
1473  int old_generating_concat_p = generating_concat_p;
1474
1475  /* An ASM with no outputs needs to be treated as volatile, for now.  */
1476  if (noutputs == 0)
1477    vol = 1;
1478
1479  if (! check_operand_nalternatives (outputs, inputs))
1480    return;
1481
1482  string = resolve_asm_operand_names (string, outputs, inputs);
1483
1484  /* Collect constraints.  */
1485  i = 0;
1486  for (t = outputs; t ; t = TREE_CHAIN (t), i++)
1487    constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1488  for (t = inputs; t ; t = TREE_CHAIN (t), i++)
1489    constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1490
1491#ifdef MD_ASM_CLOBBERS
1492  /* Sometimes we wish to automatically clobber registers across an asm.
1493     Case in point is when the i386 backend moved from cc0 to a hard reg --
1494     maintaining source-level compatibility means automatically clobbering
1495     the flags register.  */
1496  MD_ASM_CLOBBERS (clobbers);
1497#endif
1498
1499  /* Count the number of meaningful clobbered registers, ignoring what
1500     we would ignore later.  */
1501  nclobbers = 0;
1502  CLEAR_HARD_REG_SET (clobbered_regs);
1503  for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1504    {
1505      const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1506
1507      i = decode_reg_name (regname);
1508      if (i >= 0 || i == -4)
1509	++nclobbers;
1510      else if (i == -2)
1511	error ("unknown register name `%s' in `asm'", regname);
1512
1513      /* Mark clobbered registers.  */
1514      if (i >= 0)
1515        {
1516	  /* Clobbering the PIC register is an error */
1517	  if (i == (int) PIC_OFFSET_TABLE_REGNUM)
1518	    {
1519	      error ("PIC register `%s' clobbered in `asm'", regname);
1520	      return;
1521	    }
1522
1523	  SET_HARD_REG_BIT (clobbered_regs, i);
1524	}
1525    }
1526
1527  clear_last_expr ();
1528
1529  /* First pass over inputs and outputs checks validity and sets
1530     mark_addressable if needed.  */
1531
1532  ninout = 0;
1533  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1534    {
1535      tree val = TREE_VALUE (tail);
1536      tree type = TREE_TYPE (val);
1537      const char *constraint;
1538      bool is_inout;
1539      bool allows_reg;
1540      bool allows_mem;
1541
1542      /* If there's an erroneous arg, emit no insn.  */
1543      if (type == error_mark_node)
1544	return;
1545
1546      /* Try to parse the output constraint.  If that fails, there's
1547	 no point in going further.  */
1548      constraint = constraints[i];
1549      if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1550				    &allows_mem, &allows_reg, &is_inout))
1551	return;
1552
1553      if (! allows_reg
1554	  && (allows_mem
1555	      || is_inout
1556	      || (DECL_P (val)
1557		  && GET_CODE (DECL_RTL (val)) == REG
1558		  && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
1559	(*lang_hooks.mark_addressable) (val);
1560
1561      if (is_inout)
1562	ninout++;
1563    }
1564
1565  ninputs += ninout;
1566  if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1567    {
1568      error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1569      return;
1570    }
1571
1572  for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1573    {
1574      bool allows_reg, allows_mem;
1575      const char *constraint;
1576
1577      /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1578	 would get VOIDmode and that could cause a crash in reload.  */
1579      if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1580	return;
1581
1582      constraint = constraints[i + noutputs];
1583      if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1584				    constraints, &allows_mem, &allows_reg))
1585	return;
1586
1587      if (! allows_reg && allows_mem)
1588	(*lang_hooks.mark_addressable) (TREE_VALUE (tail));
1589    }
1590
1591  /* Second pass evaluates arguments.  */
1592
1593  ninout = 0;
1594  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1595    {
1596      tree val = TREE_VALUE (tail);
1597      tree type = TREE_TYPE (val);
1598      bool is_inout;
1599      bool allows_reg;
1600      bool allows_mem;
1601      rtx op;
1602
1603      if (!parse_output_constraint (&constraints[i], i, ninputs,
1604				    noutputs, &allows_mem, &allows_reg,
1605				    &is_inout))
1606	abort ();
1607
1608      /* If an output operand is not a decl or indirect ref and our constraint
1609	 allows a register, make a temporary to act as an intermediate.
1610	 Make the asm insn write into that, then our caller will copy it to
1611	 the real output operand.  Likewise for promoted variables.  */
1612
1613      generating_concat_p = 0;
1614
1615      real_output_rtx[i] = NULL_RTX;
1616      if ((TREE_CODE (val) == INDIRECT_REF
1617	   && allows_mem)
1618	  || (DECL_P (val)
1619	      && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1620	      && ! (GET_CODE (DECL_RTL (val)) == REG
1621		    && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1622	  || ! allows_reg
1623	  || is_inout)
1624	{
1625	  op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1626	  if (GET_CODE (op) == MEM)
1627	    op = validize_mem (op);
1628
1629	  if (! allows_reg && GET_CODE (op) != MEM)
1630	    error ("output number %d not directly addressable", i);
1631	  if ((! allows_mem && GET_CODE (op) == MEM)
1632	      || GET_CODE (op) == CONCAT)
1633	    {
1634	      real_output_rtx[i] = protect_from_queue (op, 1);
1635	      op = gen_reg_rtx (GET_MODE (op));
1636	      if (is_inout)
1637		emit_move_insn (op, real_output_rtx[i]);
1638	    }
1639	}
1640      else
1641	{
1642	  op = assign_temp (type, 0, 0, 1);
1643	  op = validize_mem (op);
1644	  TREE_VALUE (tail) = make_tree (type, op);
1645	}
1646      output_rtx[i] = op;
1647
1648      generating_concat_p = old_generating_concat_p;
1649
1650      if (is_inout)
1651	{
1652	  inout_mode[ninout] = TYPE_MODE (type);
1653	  inout_opnum[ninout++] = i;
1654	}
1655
1656      if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1657	clobber_conflict_found = 1;
1658    }
1659
1660  /* Make vectors for the expression-rtx, constraint strings,
1661     and named operands.  */
1662
1663  argvec = rtvec_alloc (ninputs);
1664  constraintvec = rtvec_alloc (ninputs);
1665
1666  body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1667				: GET_MODE (output_rtx[0])),
1668			       TREE_STRING_POINTER (string),
1669			       empty_string, 0, argvec, constraintvec,
1670			       locus.file, locus.line);
1671
1672  MEM_VOLATILE_P (body) = vol;
1673
1674  /* Eval the inputs and put them into ARGVEC.
1675     Put their constraints into ASM_INPUTs and store in CONSTRAINTS.  */
1676
1677  for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1678    {
1679      bool allows_reg, allows_mem;
1680      const char *constraint;
1681      tree val, type;
1682      rtx op;
1683
1684      constraint = constraints[i + noutputs];
1685      if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1686				    constraints, &allows_mem, &allows_reg))
1687	abort ();
1688
1689      generating_concat_p = 0;
1690
1691      val = TREE_VALUE (tail);
1692      type = TREE_TYPE (val);
1693      op = expand_expr (val, NULL_RTX, VOIDmode,
1694			(allows_mem && !allows_reg
1695			 ? EXPAND_MEMORY : EXPAND_NORMAL));
1696
1697      /* Never pass a CONCAT to an ASM.  */
1698      if (GET_CODE (op) == CONCAT)
1699	op = force_reg (GET_MODE (op), op);
1700      else if (GET_CODE (op) == MEM)
1701	op = validize_mem (op);
1702
1703      if (asm_operand_ok (op, constraint) <= 0)
1704	{
1705	  if (allows_reg)
1706	    op = force_reg (TYPE_MODE (type), op);
1707	  else if (!allows_mem)
1708	    warning ("asm operand %d probably doesn't match constraints",
1709		     i + noutputs);
1710	  else if (GET_CODE (op) == MEM)
1711	    {
1712	      /* We won't recognize either volatile memory or memory
1713		 with a queued address as available a memory_operand
1714		 at this point.  Ignore it: clearly this *is* a memory.  */
1715	    }
1716	  else
1717	    {
1718	      warning ("use of memory input without lvalue in "
1719		       "asm operand %d is deprecated", i + noutputs);
1720
1721	      if (CONSTANT_P (op))
1722		{
1723		  rtx mem = force_const_mem (TYPE_MODE (type), op);
1724		  if (mem)
1725		    op = validize_mem (mem);
1726		  else
1727		    op = force_reg (TYPE_MODE (type), op);
1728		}
1729	      if (GET_CODE (op) == REG
1730		  || GET_CODE (op) == SUBREG
1731		  || GET_CODE (op) == ADDRESSOF
1732		  || GET_CODE (op) == CONCAT)
1733		{
1734		  tree qual_type = build_qualified_type (type,
1735							 (TYPE_QUALS (type)
1736							  | TYPE_QUAL_CONST));
1737		  rtx memloc = assign_temp (qual_type, 1, 1, 1);
1738		  memloc = validize_mem (memloc);
1739		  emit_move_insn (memloc, op);
1740		  op = memloc;
1741		}
1742	    }
1743	}
1744
1745      generating_concat_p = old_generating_concat_p;
1746      ASM_OPERANDS_INPUT (body, i) = op;
1747
1748      ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1749	= gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1750
1751      if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1752	clobber_conflict_found = 1;
1753    }
1754
1755  /* Protect all the operands from the queue now that they have all been
1756     evaluated.  */
1757
1758  generating_concat_p = 0;
1759
1760  for (i = 0; i < ninputs - ninout; i++)
1761    ASM_OPERANDS_INPUT (body, i)
1762      = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1763
1764  for (i = 0; i < noutputs; i++)
1765    output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1766
1767  /* For in-out operands, copy output rtx to input rtx.  */
1768  for (i = 0; i < ninout; i++)
1769    {
1770      int j = inout_opnum[i];
1771      char buffer[16];
1772
1773      ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1774	= output_rtx[j];
1775
1776      sprintf (buffer, "%d", j);
1777      ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1778	= gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
1779    }
1780
1781  generating_concat_p = old_generating_concat_p;
1782
1783  /* Now, for each output, construct an rtx
1784     (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1785			       ARGVEC CONSTRAINTS OPNAMES))
1786     If there is more than one, put them inside a PARALLEL.  */
1787
1788  if (noutputs == 1 && nclobbers == 0)
1789    {
1790      ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1791      emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1792    }
1793
1794  else if (noutputs == 0 && nclobbers == 0)
1795    {
1796      /* No output operands: put in a raw ASM_OPERANDS rtx.  */
1797      emit_insn (body);
1798    }
1799
1800  else
1801    {
1802      rtx obody = body;
1803      int num = noutputs;
1804
1805      if (num == 0)
1806	num = 1;
1807
1808      body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1809
1810      /* For each output operand, store a SET.  */
1811      for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1812	{
1813	  XVECEXP (body, 0, i)
1814	    = gen_rtx_SET (VOIDmode,
1815			   output_rtx[i],
1816			   gen_rtx_ASM_OPERANDS
1817			   (GET_MODE (output_rtx[i]),
1818			    TREE_STRING_POINTER (string),
1819			    constraints[i], i, argvec, constraintvec,
1820			    locus.file, locus.line));
1821
1822	  MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1823	}
1824
1825      /* If there are no outputs (but there are some clobbers)
1826	 store the bare ASM_OPERANDS into the PARALLEL.  */
1827
1828      if (i == 0)
1829	XVECEXP (body, 0, i++) = obody;
1830
1831      /* Store (clobber REG) for each clobbered register specified.  */
1832
1833      for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1834	{
1835	  const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1836	  int j = decode_reg_name (regname);
1837	  rtx clobbered_reg;
1838
1839	  if (j < 0)
1840	    {
1841	      if (j == -3)	/* `cc', which is not a register */
1842		continue;
1843
1844	      if (j == -4)	/* `memory', don't cache memory across asm */
1845		{
1846		  XVECEXP (body, 0, i++)
1847		    = gen_rtx_CLOBBER (VOIDmode,
1848				       gen_rtx_MEM
1849				       (BLKmode,
1850					gen_rtx_SCRATCH (VOIDmode)));
1851		  continue;
1852		}
1853
1854	      /* Ignore unknown register, error already signaled.  */
1855	      continue;
1856	    }
1857
1858	  /* Use QImode since that's guaranteed to clobber just one reg.  */
1859	  clobbered_reg = gen_rtx_REG (QImode, j);
1860
1861	  /* Do sanity check for overlap between clobbers and respectively
1862	     input and outputs that hasn't been handled.  Such overlap
1863	     should have been detected and reported above.  */
1864	  if (!clobber_conflict_found)
1865	    {
1866	      int opno;
1867
1868	      /* We test the old body (obody) contents to avoid tripping
1869		 over the under-construction body.  */
1870	      for (opno = 0; opno < noutputs; opno++)
1871		if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1872		  internal_error ("asm clobber conflict with output operand");
1873
1874	      for (opno = 0; opno < ninputs - ninout; opno++)
1875		if (reg_overlap_mentioned_p (clobbered_reg,
1876					     ASM_OPERANDS_INPUT (obody, opno)))
1877		  internal_error ("asm clobber conflict with input operand");
1878	    }
1879
1880	  XVECEXP (body, 0, i++)
1881	    = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
1882	}
1883
1884      emit_insn (body);
1885    }
1886
1887  /* For any outputs that needed reloading into registers, spill them
1888     back to where they belong.  */
1889  for (i = 0; i < noutputs; ++i)
1890    if (real_output_rtx[i])
1891      emit_move_insn (real_output_rtx[i], output_rtx[i]);
1892
1893  free_temp_slots ();
1894}
1895
1896/* A subroutine of expand_asm_operands.  Check that all operands have
1897   the same number of alternatives.  Return true if so.  */
1898
1899static bool
1900check_operand_nalternatives (tree outputs, tree inputs)
1901{
1902  if (outputs || inputs)
1903    {
1904      tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1905      int nalternatives
1906	= n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1907      tree next = inputs;
1908
1909      if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1910	{
1911	  error ("too many alternatives in `asm'");
1912	  return false;
1913	}
1914
1915      tmp = outputs;
1916      while (tmp)
1917	{
1918	  const char *constraint
1919	    = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1920
1921	  if (n_occurrences (',', constraint) != nalternatives)
1922	    {
1923	      error ("operand constraints for `asm' differ in number of alternatives");
1924	      return false;
1925	    }
1926
1927	  if (TREE_CHAIN (tmp))
1928	    tmp = TREE_CHAIN (tmp);
1929	  else
1930	    tmp = next, next = 0;
1931	}
1932    }
1933
1934  return true;
1935}
1936
1937/* A subroutine of expand_asm_operands.  Check that all operand names
1938   are unique.  Return true if so.  We rely on the fact that these names
1939   are identifiers, and so have been canonicalized by get_identifier,
1940   so all we need are pointer comparisons.  */
1941
1942static bool
1943check_unique_operand_names (tree outputs, tree inputs)
1944{
1945  tree i, j;
1946
1947  for (i = outputs; i ; i = TREE_CHAIN (i))
1948    {
1949      tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1950      if (! i_name)
1951	continue;
1952
1953      for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1954	if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1955	  goto failure;
1956    }
1957
1958  for (i = inputs; i ; i = TREE_CHAIN (i))
1959    {
1960      tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1961      if (! i_name)
1962	continue;
1963
1964      for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
1965	if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1966	  goto failure;
1967      for (j = outputs; j ; j = TREE_CHAIN (j))
1968	if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
1969	  goto failure;
1970    }
1971
1972  return true;
1973
1974 failure:
1975  error ("duplicate asm operand name '%s'",
1976	 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
1977  return false;
1978}
1979
1980/* A subroutine of expand_asm_operands.  Resolve the names of the operands
1981   in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1982   STRING and in the constraints to those numbers.  */
1983
1984tree
1985resolve_asm_operand_names (tree string, tree outputs, tree inputs)
1986{
1987  char *buffer;
1988  char *p;
1989  const char *c;
1990  tree t;
1991
1992  check_unique_operand_names (outputs, inputs);
1993
1994  /* Substitute [<name>] in input constraint strings.  There should be no
1995     named operands in output constraints.  */
1996  for (t = inputs; t ; t = TREE_CHAIN (t))
1997    {
1998      c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1999      if (strchr (c, '[') != NULL)
2000	{
2001	  p = buffer = xstrdup (c);
2002	  while ((p = strchr (p, '[')) != NULL)
2003	    p = resolve_operand_name_1 (p, outputs, inputs);
2004	  TREE_VALUE (TREE_PURPOSE (t))
2005	    = build_string (strlen (buffer), buffer);
2006	  free (buffer);
2007	}
2008    }
2009
2010  /* Now check for any needed substitutions in the template.  */
2011  c = TREE_STRING_POINTER (string);
2012  while ((c = strchr (c, '%')) != NULL)
2013    {
2014      if (c[1] == '[')
2015	break;
2016      else if (ISALPHA (c[1]) && c[2] == '[')
2017	break;
2018      else
2019	{
2020	  c += 1;
2021	  continue;
2022	}
2023    }
2024
2025  if (c)
2026    {
2027      /* OK, we need to make a copy so we can perform the substitutions.
2028	 Assume that we will not need extra space--we get to remove '['
2029	 and ']', which means we cannot have a problem until we have more
2030	 than 999 operands.  */
2031      buffer = xstrdup (TREE_STRING_POINTER (string));
2032      p = buffer + (c - TREE_STRING_POINTER (string));
2033
2034      while ((p = strchr (p, '%')) != NULL)
2035	{
2036	  if (p[1] == '[')
2037	    p += 1;
2038	  else if (ISALPHA (p[1]) && p[2] == '[')
2039	    p += 2;
2040	  else
2041	    {
2042	      p += 1;
2043	      continue;
2044	    }
2045
2046	  p = resolve_operand_name_1 (p, outputs, inputs);
2047	}
2048
2049      string = build_string (strlen (buffer), buffer);
2050      free (buffer);
2051    }
2052
2053  return string;
2054}
2055
2056/* A subroutine of resolve_operand_names.  P points to the '[' for a
2057   potential named operand of the form [<name>].  In place, replace
2058   the name and brackets with a number.  Return a pointer to the
2059   balance of the string after substitution.  */
2060
2061static char *
2062resolve_operand_name_1 (char *p, tree outputs, tree inputs)
2063{
2064  char *q;
2065  int op;
2066  tree t;
2067  size_t len;
2068
2069  /* Collect the operand name.  */
2070  q = strchr (p, ']');
2071  if (!q)
2072    {
2073      error ("missing close brace for named operand");
2074      return strchr (p, '\0');
2075    }
2076  len = q - p - 1;
2077
2078  /* Resolve the name to a number.  */
2079  for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2080    {
2081      tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2082      if (name)
2083	{
2084	  const char *c = TREE_STRING_POINTER (name);
2085	  if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2086	    goto found;
2087	}
2088    }
2089  for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2090    {
2091      tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2092      if (name)
2093	{
2094	  const char *c = TREE_STRING_POINTER (name);
2095	  if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2096	    goto found;
2097	}
2098    }
2099
2100  *q = '\0';
2101  error ("undefined named operand '%s'", p + 1);
2102  op = 0;
2103 found:
2104
2105  /* Replace the name with the number.  Unfortunately, not all libraries
2106     get the return value of sprintf correct, so search for the end of the
2107     generated string by hand.  */
2108  sprintf (p, "%d", op);
2109  p = strchr (p, '\0');
2110
2111  /* Verify the no extra buffer space assumption.  */
2112  if (p > q)
2113    abort ();
2114
2115  /* Shift the rest of the buffer down to fill the gap.  */
2116  memmove (p, q + 1, strlen (q + 1) + 1);
2117
2118  return p;
2119}
2120
2121/* Generate RTL to evaluate the expression EXP
2122   and remember it in case this is the VALUE in a ({... VALUE; }) constr.
2123   Provided just for backward-compatibility.  expand_expr_stmt_value()
2124   should be used for new code.  */
2125
2126void
2127expand_expr_stmt (tree exp)
2128{
2129  expand_expr_stmt_value (exp, -1, 1);
2130}
2131
2132/* Generate RTL to evaluate the expression EXP.  WANT_VALUE tells
2133   whether to (1) save the value of the expression, (0) discard it or
2134   (-1) use expr_stmts_for_value to tell.  The use of -1 is
2135   deprecated, and retained only for backward compatibility.  */
2136
2137void
2138expand_expr_stmt_value (tree exp, int want_value, int maybe_last)
2139{
2140  rtx value;
2141  tree type;
2142  rtx alt_rtl = NULL;
2143
2144  if (want_value == -1)
2145    want_value = expr_stmts_for_value != 0;
2146
2147  /* If -Wextra, warn about statements with no side effects,
2148     except for an explicit cast to void (e.g. for assert()), and
2149     except for last statement in ({...}) where they may be useful.  */
2150  if (! want_value
2151      && (expr_stmts_for_value == 0 || ! maybe_last)
2152      && exp != error_mark_node
2153      && warn_unused_value)
2154    {
2155      if (TREE_SIDE_EFFECTS (exp))
2156	warn_if_unused_value (exp);
2157      else if (!VOID_TYPE_P (TREE_TYPE (exp)))
2158	warning ("%Hstatement with no effect", &emit_locus);
2159    }
2160
2161  /* If EXP is of function type and we are expanding statements for
2162     value, convert it to pointer-to-function.  */
2163  if (want_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2164    exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2165
2166  /* The call to `expand_expr' could cause last_expr_type and
2167     last_expr_value to get reset.  Therefore, we set last_expr_value
2168     and last_expr_type *after* calling expand_expr.  */
2169  value = expand_expr_real (exp, want_value ? NULL_RTX : const0_rtx,
2170			    VOIDmode, 0, &alt_rtl);
2171  type = TREE_TYPE (exp);
2172
2173  /* If all we do is reference a volatile value in memory,
2174     copy it to a register to be sure it is actually touched.  */
2175  if (value && GET_CODE (value) == MEM && TREE_THIS_VOLATILE (exp))
2176    {
2177      if (TYPE_MODE (type) == VOIDmode)
2178	;
2179      else if (TYPE_MODE (type) != BLKmode)
2180	value = copy_to_reg (value);
2181      else
2182	{
2183	  rtx lab = gen_label_rtx ();
2184
2185	  /* Compare the value with itself to reference it.  */
2186	  emit_cmp_and_jump_insns (value, value, EQ,
2187				   expand_expr (TYPE_SIZE (type),
2188						NULL_RTX, VOIDmode, 0),
2189				   BLKmode, 0, lab);
2190	  emit_label (lab);
2191	}
2192    }
2193
2194  /* If this expression is part of a ({...}) and is in memory, we may have
2195     to preserve temporaries.  */
2196  preserve_temp_slots (value);
2197
2198  /* Free any temporaries used to evaluate this expression.  Any temporary
2199     used as a result of this expression will already have been preserved
2200     above.  */
2201  free_temp_slots ();
2202
2203  if (want_value)
2204    {
2205      last_expr_value = value;
2206      last_expr_alt_rtl = alt_rtl;
2207      last_expr_type = type;
2208    }
2209
2210  emit_queue ();
2211}
2212
2213/* Warn if EXP contains any computations whose results are not used.
2214   Return 1 if a warning is printed; 0 otherwise.  */
2215
2216int
2217warn_if_unused_value (tree exp)
2218{
2219  if (TREE_USED (exp))
2220    return 0;
2221
2222  /* Don't warn about void constructs.  This includes casting to void,
2223     void function calls, and statement expressions with a final cast
2224     to void.  */
2225  if (VOID_TYPE_P (TREE_TYPE (exp)))
2226    return 0;
2227
2228  switch (TREE_CODE (exp))
2229    {
2230    case PREINCREMENT_EXPR:
2231    case POSTINCREMENT_EXPR:
2232    case PREDECREMENT_EXPR:
2233    case POSTDECREMENT_EXPR:
2234    case MODIFY_EXPR:
2235    case INIT_EXPR:
2236    case TARGET_EXPR:
2237    case CALL_EXPR:
2238    case RTL_EXPR:
2239    case TRY_CATCH_EXPR:
2240    case WITH_CLEANUP_EXPR:
2241    case EXIT_EXPR:
2242      return 0;
2243
2244    case BIND_EXPR:
2245      /* For a binding, warn if no side effect within it.  */
2246      return warn_if_unused_value (TREE_OPERAND (exp, 1));
2247
2248    case SAVE_EXPR:
2249      return warn_if_unused_value (TREE_OPERAND (exp, 1));
2250
2251    case TRUTH_ORIF_EXPR:
2252    case TRUTH_ANDIF_EXPR:
2253      /* In && or ||, warn if 2nd operand has no side effect.  */
2254      return warn_if_unused_value (TREE_OPERAND (exp, 1));
2255
2256    case COMPOUND_EXPR:
2257      if (TREE_NO_UNUSED_WARNING (exp))
2258	return 0;
2259      if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2260	return 1;
2261      /* Let people do `(foo (), 0)' without a warning.  */
2262      if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2263	return 0;
2264      return warn_if_unused_value (TREE_OPERAND (exp, 1));
2265
2266    case NOP_EXPR:
2267    case CONVERT_EXPR:
2268    case NON_LVALUE_EXPR:
2269      /* Don't warn about conversions not explicit in the user's program.  */
2270      if (TREE_NO_UNUSED_WARNING (exp))
2271	return 0;
2272      /* Assignment to a cast usually results in a cast of a modify.
2273	 Don't complain about that.  There can be an arbitrary number of
2274	 casts before the modify, so we must loop until we find the first
2275	 non-cast expression and then test to see if that is a modify.  */
2276      {
2277	tree tem = TREE_OPERAND (exp, 0);
2278
2279	while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2280	  tem = TREE_OPERAND (tem, 0);
2281
2282	if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2283	    || TREE_CODE (tem) == CALL_EXPR)
2284	  return 0;
2285      }
2286      goto maybe_warn;
2287
2288    case INDIRECT_REF:
2289      /* Don't warn about automatic dereferencing of references, since
2290	 the user cannot control it.  */
2291      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2292	return warn_if_unused_value (TREE_OPERAND (exp, 0));
2293      /* Fall through.  */
2294
2295    default:
2296      /* Referencing a volatile value is a side effect, so don't warn.  */
2297      if ((DECL_P (exp)
2298	   || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2299	  && TREE_THIS_VOLATILE (exp))
2300	return 0;
2301
2302      /* If this is an expression which has no operands, there is no value
2303	 to be unused.  There are no such language-independent codes,
2304	 but front ends may define such.  */
2305      if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2306	  && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2307	return 0;
2308
2309    maybe_warn:
2310      /* If this is an expression with side effects, don't warn.  */
2311      if (TREE_SIDE_EFFECTS (exp))
2312	return 0;
2313
2314      warning ("%Hvalue computed is not used", &emit_locus);
2315      return 1;
2316    }
2317}
2318
2319/* Clear out the memory of the last expression evaluated.  */
2320
2321void
2322clear_last_expr (void)
2323{
2324  last_expr_type = NULL_TREE;
2325  last_expr_value = NULL_RTX;
2326  last_expr_alt_rtl = NULL_RTX;
2327}
2328
2329/* Begin a statement-expression, i.e., a series of statements which
2330   may return a value.  Return the RTL_EXPR for this statement expr.
2331   The caller must save that value and pass it to
2332   expand_end_stmt_expr.  If HAS_SCOPE is nonzero, temporaries created
2333   in the statement-expression are deallocated at the end of the
2334   expression.  */
2335
2336tree
2337expand_start_stmt_expr (int has_scope)
2338{
2339  tree t;
2340
2341  /* Make the RTL_EXPR node temporary, not momentary,
2342     so that rtl_expr_chain doesn't become garbage.  */
2343  t = make_node (RTL_EXPR);
2344  do_pending_stack_adjust ();
2345  if (has_scope)
2346    start_sequence_for_rtl_expr (t);
2347  else
2348    start_sequence ();
2349  NO_DEFER_POP;
2350  expr_stmts_for_value++;
2351  return t;
2352}
2353
2354/* Restore the previous state at the end of a statement that returns a value.
2355   Returns a tree node representing the statement's value and the
2356   insns to compute the value.
2357
2358   The nodes of that expression have been freed by now, so we cannot use them.
2359   But we don't want to do that anyway; the expression has already been
2360   evaluated and now we just want to use the value.  So generate a RTL_EXPR
2361   with the proper type and RTL value.
2362
2363   If the last substatement was not an expression,
2364   return something with type `void'.  */
2365
2366tree
2367expand_end_stmt_expr (tree t)
2368{
2369  OK_DEFER_POP;
2370
2371  if (! last_expr_value || ! last_expr_type)
2372    {
2373      last_expr_value = const0_rtx;
2374      last_expr_alt_rtl = NULL_RTX;
2375      last_expr_type = void_type_node;
2376    }
2377  else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2378    /* Remove any possible QUEUED.  */
2379    last_expr_value = protect_from_queue (last_expr_value, 0);
2380
2381  emit_queue ();
2382
2383  TREE_TYPE (t) = last_expr_type;
2384  RTL_EXPR_RTL (t) = last_expr_value;
2385  RTL_EXPR_ALT_RTL (t) = last_expr_alt_rtl;
2386  RTL_EXPR_SEQUENCE (t) = get_insns ();
2387
2388  rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2389
2390  end_sequence ();
2391
2392  /* Don't consider deleting this expr or containing exprs at tree level.  */
2393  TREE_SIDE_EFFECTS (t) = 1;
2394  /* Propagate volatility of the actual RTL expr.  */
2395  TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2396
2397  clear_last_expr ();
2398  expr_stmts_for_value--;
2399
2400  return t;
2401}
2402
2403/* Generate RTL for the start of an if-then.  COND is the expression
2404   whose truth should be tested.
2405
2406   If EXITFLAG is nonzero, this conditional is visible to
2407   `exit_something'.  */
2408
2409void
2410expand_start_cond (tree cond, int exitflag)
2411{
2412  struct nesting *thiscond = ALLOC_NESTING ();
2413
2414  /* Make an entry on cond_stack for the cond we are entering.  */
2415
2416  thiscond->desc = COND_NESTING;
2417  thiscond->next = cond_stack;
2418  thiscond->all = nesting_stack;
2419  thiscond->depth = ++nesting_depth;
2420  thiscond->data.cond.next_label = gen_label_rtx ();
2421  /* Before we encounter an `else', we don't need a separate exit label
2422     unless there are supposed to be exit statements
2423     to exit this conditional.  */
2424  thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2425  thiscond->data.cond.endif_label = thiscond->exit_label;
2426  cond_stack = thiscond;
2427  nesting_stack = thiscond;
2428
2429  do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2430}
2431
2432/* Generate RTL between then-clause and the elseif-clause
2433   of an if-then-elseif-....  */
2434
2435void
2436expand_start_elseif (tree cond)
2437{
2438  if (cond_stack->data.cond.endif_label == 0)
2439    cond_stack->data.cond.endif_label = gen_label_rtx ();
2440  emit_jump (cond_stack->data.cond.endif_label);
2441  emit_label (cond_stack->data.cond.next_label);
2442  cond_stack->data.cond.next_label = gen_label_rtx ();
2443  do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2444}
2445
2446/* Generate RTL between the then-clause and the else-clause
2447   of an if-then-else.  */
2448
2449void
2450expand_start_else (void)
2451{
2452  if (cond_stack->data.cond.endif_label == 0)
2453    cond_stack->data.cond.endif_label = gen_label_rtx ();
2454
2455  emit_jump (cond_stack->data.cond.endif_label);
2456  emit_label (cond_stack->data.cond.next_label);
2457  cond_stack->data.cond.next_label = 0;  /* No more _else or _elseif calls.  */
2458}
2459
2460/* After calling expand_start_else, turn this "else" into an "else if"
2461   by providing another condition.  */
2462
2463void
2464expand_elseif (tree cond)
2465{
2466  cond_stack->data.cond.next_label = gen_label_rtx ();
2467  do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2468}
2469
2470/* Generate RTL for the end of an if-then.
2471   Pop the record for it off of cond_stack.  */
2472
2473void
2474expand_end_cond (void)
2475{
2476  struct nesting *thiscond = cond_stack;
2477
2478  do_pending_stack_adjust ();
2479  if (thiscond->data.cond.next_label)
2480    emit_label (thiscond->data.cond.next_label);
2481  if (thiscond->data.cond.endif_label)
2482    emit_label (thiscond->data.cond.endif_label);
2483
2484  POPSTACK (cond_stack);
2485  clear_last_expr ();
2486}
2487
2488/* Generate RTL for the start of a loop.  EXIT_FLAG is nonzero if this
2489   loop should be exited by `exit_something'.  This is a loop for which
2490   `expand_continue' will jump to the top of the loop.
2491
2492   Make an entry on loop_stack to record the labels associated with
2493   this loop.  */
2494
2495struct nesting *
2496expand_start_loop (int exit_flag)
2497{
2498  struct nesting *thisloop = ALLOC_NESTING ();
2499
2500  /* Make an entry on loop_stack for the loop we are entering.  */
2501
2502  thisloop->desc = LOOP_NESTING;
2503  thisloop->next = loop_stack;
2504  thisloop->all = nesting_stack;
2505  thisloop->depth = ++nesting_depth;
2506  thisloop->data.loop.start_label = gen_label_rtx ();
2507  thisloop->data.loop.end_label = gen_label_rtx ();
2508  thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2509  thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2510  loop_stack = thisloop;
2511  nesting_stack = thisloop;
2512
2513  do_pending_stack_adjust ();
2514  emit_queue ();
2515  emit_note (NOTE_INSN_LOOP_BEG);
2516  emit_label (thisloop->data.loop.start_label);
2517
2518  return thisloop;
2519}
2520
2521/* Like expand_start_loop but for a loop where the continuation point
2522   (for expand_continue_loop) will be specified explicitly.  */
2523
2524struct nesting *
2525expand_start_loop_continue_elsewhere (int exit_flag)
2526{
2527  struct nesting *thisloop = expand_start_loop (exit_flag);
2528  loop_stack->data.loop.continue_label = gen_label_rtx ();
2529  return thisloop;
2530}
2531
2532/* Begin a null, aka do { } while (0) "loop".  But since the contents
2533   of said loop can still contain a break, we must frob the loop nest.  */
2534
2535struct nesting *
2536expand_start_null_loop (void)
2537{
2538  struct nesting *thisloop = ALLOC_NESTING ();
2539
2540  /* Make an entry on loop_stack for the loop we are entering.  */
2541
2542  thisloop->desc = LOOP_NESTING;
2543  thisloop->next = loop_stack;
2544  thisloop->all = nesting_stack;
2545  thisloop->depth = ++nesting_depth;
2546  thisloop->data.loop.start_label = emit_note (NOTE_INSN_DELETED);
2547  thisloop->data.loop.end_label = gen_label_rtx ();
2548  thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2549  thisloop->exit_label = thisloop->data.loop.end_label;
2550  loop_stack = thisloop;
2551  nesting_stack = thisloop;
2552
2553  return thisloop;
2554}
2555
2556/* Specify the continuation point for a loop started with
2557   expand_start_loop_continue_elsewhere.
2558   Use this at the point in the code to which a continue statement
2559   should jump.  */
2560
2561void
2562expand_loop_continue_here (void)
2563{
2564  do_pending_stack_adjust ();
2565  emit_note (NOTE_INSN_LOOP_CONT);
2566  emit_label (loop_stack->data.loop.continue_label);
2567}
2568
2569/* Finish a loop.  Generate a jump back to the top and the loop-exit label.
2570   Pop the block off of loop_stack.  */
2571
2572void
2573expand_end_loop (void)
2574{
2575  rtx start_label = loop_stack->data.loop.start_label;
2576  rtx etc_note;
2577  int eh_regions, debug_blocks;
2578  bool empty_test;
2579
2580  /* Mark the continue-point at the top of the loop if none elsewhere.  */
2581  if (start_label == loop_stack->data.loop.continue_label)
2582    emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2583
2584  do_pending_stack_adjust ();
2585
2586  /* If the loop starts with a loop exit, roll that to the end where
2587     it will optimize together with the jump back.
2588
2589     If the loop presently looks like this (in pseudo-C):
2590
2591	LOOP_BEG
2592	start_label:
2593	  if (test) goto end_label;
2594	LOOP_END_TOP_COND
2595	  body;
2596	  goto start_label;
2597	end_label:
2598
2599     transform it to look like:
2600
2601	LOOP_BEG
2602	  goto start_label;
2603	top_label:
2604	  body;
2605	start_label:
2606	  if (test) goto end_label;
2607	  goto top_label;
2608	end_label:
2609
2610     We rely on the presence of NOTE_INSN_LOOP_END_TOP_COND to mark
2611     the end of the entry conditional.  Without this, our lexical scan
2612     can't tell the difference between an entry conditional and a
2613     body conditional that exits the loop.  Mistaking the two means
2614     that we can misplace the NOTE_INSN_LOOP_CONT note, which can
2615     screw up loop unrolling.
2616
2617     Things will be oh so much better when loop optimization is done
2618     off of a proper control flow graph...  */
2619
2620  /* Scan insns from the top of the loop looking for the END_TOP_COND note.  */
2621
2622  empty_test = true;
2623  eh_regions = debug_blocks = 0;
2624  for (etc_note = start_label; etc_note ; etc_note = NEXT_INSN (etc_note))
2625    if (GET_CODE (etc_note) == NOTE)
2626      {
2627	if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_END_TOP_COND)
2628	  break;
2629
2630	/* We must not walk into a nested loop.  */
2631	else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_BEG)
2632	  {
2633	    etc_note = NULL_RTX;
2634	    break;
2635	  }
2636
2637	/* At the same time, scan for EH region notes, as we don't want
2638	   to scrog region nesting.  This shouldn't happen, but...  */
2639	else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_BEG)
2640	  eh_regions++;
2641	else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_END)
2642	  {
2643	    if (--eh_regions < 0)
2644	      /* We've come to the end of an EH region, but never saw the
2645		 beginning of that region.  That means that an EH region
2646		 begins before the top of the loop, and ends in the middle
2647		 of it.  The existence of such a situation violates a basic
2648		 assumption in this code, since that would imply that even
2649		 when EH_REGIONS is zero, we might move code out of an
2650		 exception region.  */
2651	      abort ();
2652	  }
2653
2654	/* Likewise for debug scopes.  In this case we'll either (1) move
2655	   all of the notes if they are properly nested or (2) leave the
2656	   notes alone and only rotate the loop at high optimization
2657	   levels when we expect to scrog debug info.  */
2658	else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_BEG)
2659	  debug_blocks++;
2660	else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_END)
2661	  debug_blocks--;
2662      }
2663    else if (INSN_P (etc_note))
2664      empty_test = false;
2665
2666  if (etc_note
2667      && optimize
2668      && ! empty_test
2669      && eh_regions == 0
2670      && (debug_blocks == 0 || optimize >= 2)
2671      && NEXT_INSN (etc_note) != NULL_RTX
2672      && ! any_condjump_p (get_last_insn ()))
2673    {
2674      /* We found one.  Move everything from START to ETC to the end
2675	 of the loop, and add a jump from the top of the loop.  */
2676      rtx top_label = gen_label_rtx ();
2677      rtx start_move = start_label;
2678
2679      /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2680	 then we want to move this note also.  */
2681      if (GET_CODE (PREV_INSN (start_move)) == NOTE
2682	  && NOTE_LINE_NUMBER (PREV_INSN (start_move)) == NOTE_INSN_LOOP_CONT)
2683	start_move = PREV_INSN (start_move);
2684
2685      emit_label_before (top_label, start_move);
2686
2687      /* Actually move the insns.  If the debug scopes are nested, we
2688	 can move everything at once.  Otherwise we have to move them
2689	 one by one and squeeze out the block notes.  */
2690      if (debug_blocks == 0)
2691	reorder_insns (start_move, etc_note, get_last_insn ());
2692      else
2693	{
2694	  rtx insn, next_insn;
2695	  for (insn = start_move; insn; insn = next_insn)
2696	    {
2697	      /* Figure out which insn comes after this one.  We have
2698		 to do this before we move INSN.  */
2699	      next_insn = (insn == etc_note ? NULL : NEXT_INSN (insn));
2700
2701	      if (GET_CODE (insn) == NOTE
2702		  && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2703		      || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2704		continue;
2705
2706	      reorder_insns (insn, insn, get_last_insn ());
2707	    }
2708	}
2709
2710      /* Add the jump from the top of the loop.  */
2711      emit_jump_insn_before (gen_jump (start_label), top_label);
2712      emit_barrier_before (top_label);
2713      start_label = top_label;
2714    }
2715
2716  emit_jump (start_label);
2717  emit_note (NOTE_INSN_LOOP_END);
2718  emit_label (loop_stack->data.loop.end_label);
2719
2720  POPSTACK (loop_stack);
2721
2722  clear_last_expr ();
2723}
2724
2725/* Finish a null loop, aka do { } while (0).  */
2726
2727void
2728expand_end_null_loop (void)
2729{
2730  do_pending_stack_adjust ();
2731  emit_label (loop_stack->data.loop.end_label);
2732
2733  POPSTACK (loop_stack);
2734
2735  clear_last_expr ();
2736}
2737
2738/* Generate a jump to the current loop's continue-point.
2739   This is usually the top of the loop, but may be specified
2740   explicitly elsewhere.  If not currently inside a loop,
2741   return 0 and do nothing; caller will print an error message.  */
2742
2743int
2744expand_continue_loop (struct nesting *whichloop)
2745{
2746  /* Emit information for branch prediction.  */
2747  rtx note;
2748
2749  if (flag_guess_branch_prob)
2750    {
2751      note = emit_note (NOTE_INSN_PREDICTION);
2752      NOTE_PREDICTION (note) = NOTE_PREDICT (PRED_CONTINUE, IS_TAKEN);
2753    }
2754  clear_last_expr ();
2755  if (whichloop == 0)
2756    whichloop = loop_stack;
2757  if (whichloop == 0)
2758    return 0;
2759  expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2760			NULL_RTX);
2761  return 1;
2762}
2763
2764/* Generate a jump to exit the current loop.  If not currently inside a loop,
2765   return 0 and do nothing; caller will print an error message.  */
2766
2767int
2768expand_exit_loop (struct nesting *whichloop)
2769{
2770  clear_last_expr ();
2771  if (whichloop == 0)
2772    whichloop = loop_stack;
2773  if (whichloop == 0)
2774    return 0;
2775  expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2776  return 1;
2777}
2778
2779/* Generate a conditional jump to exit the current loop if COND
2780   evaluates to zero.  If not currently inside a loop,
2781   return 0 and do nothing; caller will print an error message.  */
2782
2783int
2784expand_exit_loop_if_false (struct nesting *whichloop, tree cond)
2785{
2786  rtx label;
2787  clear_last_expr ();
2788
2789  if (whichloop == 0)
2790    whichloop = loop_stack;
2791  if (whichloop == 0)
2792    return 0;
2793
2794  if (integer_nonzerop (cond))
2795    return 1;
2796  if (integer_zerop (cond))
2797    return expand_exit_loop (whichloop);
2798
2799  /* Check if we definitely won't need a fixup.  */
2800  if (whichloop == nesting_stack)
2801    {
2802      jumpifnot (cond, whichloop->data.loop.end_label);
2803      return 1;
2804    }
2805
2806  /* In order to handle fixups, we actually create a conditional jump
2807     around an unconditional branch to exit the loop.  If fixups are
2808     necessary, they go before the unconditional branch.  */
2809
2810  label = gen_label_rtx ();
2811  jumpif (cond, label);
2812  expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2813			NULL_RTX);
2814  emit_label (label);
2815
2816  return 1;
2817}
2818
2819/* Like expand_exit_loop_if_false except also emit a note marking
2820   the end of the conditional.  Should only be used immediately
2821   after expand_loop_start.  */
2822
2823int
2824expand_exit_loop_top_cond (struct nesting *whichloop, tree cond)
2825{
2826  if (! expand_exit_loop_if_false (whichloop, cond))
2827    return 0;
2828
2829  emit_note (NOTE_INSN_LOOP_END_TOP_COND);
2830  return 1;
2831}
2832
2833/* Return nonzero if we should preserve sub-expressions as separate
2834   pseudos.  We never do so if we aren't optimizing.  We always do so
2835   if -fexpensive-optimizations.
2836
2837   Otherwise, we only do so if we are in the "early" part of a loop.  I.e.,
2838   the loop may still be a small one.  */
2839
2840int
2841preserve_subexpressions_p (void)
2842{
2843  rtx insn;
2844
2845  if (flag_expensive_optimizations)
2846    return 1;
2847
2848  if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2849    return 0;
2850
2851  insn = get_last_insn_anywhere ();
2852
2853  return (insn
2854	  && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2855	      < n_non_fixed_regs * 3));
2856
2857}
2858
2859/* Generate a jump to exit the current loop, conditional, binding contour
2860   or case statement.  Not all such constructs are visible to this function,
2861   only those started with EXIT_FLAG nonzero.  Individual languages use
2862   the EXIT_FLAG parameter to control which kinds of constructs you can
2863   exit this way.
2864
2865   If not currently inside anything that can be exited,
2866   return 0 and do nothing; caller will print an error message.  */
2867
2868int
2869expand_exit_something (void)
2870{
2871  struct nesting *n;
2872  clear_last_expr ();
2873  for (n = nesting_stack; n; n = n->all)
2874    if (n->exit_label != 0)
2875      {
2876	expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2877	return 1;
2878      }
2879
2880  return 0;
2881}
2882
2883/* Generate RTL to return from the current function, with no value.
2884   (That is, we do not do anything about returning any value.)  */
2885
2886void
2887expand_null_return (void)
2888{
2889  rtx last_insn;
2890
2891  last_insn = get_last_insn ();
2892
2893  /* If this function was declared to return a value, but we
2894     didn't, clobber the return registers so that they are not
2895     propagated live to the rest of the function.  */
2896  clobber_return_register ();
2897
2898  expand_null_return_1 (last_insn);
2899}
2900
2901/* Generate RTL to return directly from the current function.
2902   (That is, we bypass any return value.)  */
2903
2904void
2905expand_naked_return (void)
2906{
2907  rtx last_insn, end_label;
2908
2909  last_insn = get_last_insn ();
2910  end_label = naked_return_label;
2911
2912  clear_pending_stack_adjust ();
2913  do_pending_stack_adjust ();
2914  clear_last_expr ();
2915
2916  if (end_label == 0)
2917    end_label = naked_return_label = gen_label_rtx ();
2918  expand_goto_internal (NULL_TREE, end_label, last_insn);
2919}
2920
2921/* Try to guess whether the value of return means error code.  */
2922static enum br_predictor
2923return_prediction (rtx val)
2924{
2925  /* Different heuristics for pointers and scalars.  */
2926  if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
2927    {
2928      /* NULL is usually not returned.  */
2929      if (val == const0_rtx)
2930	return PRED_NULL_RETURN;
2931    }
2932  else
2933    {
2934      /* Negative return values are often used to indicate
2935         errors.  */
2936      if (GET_CODE (val) == CONST_INT
2937	  && INTVAL (val) < 0)
2938	return PRED_NEGATIVE_RETURN;
2939      /* Constant return values are also usually erors,
2940         zero/one often mean booleans so exclude them from the
2941	 heuristics.  */
2942      if (CONSTANT_P (val)
2943	  && (val != const0_rtx && val != const1_rtx))
2944	return PRED_CONST_RETURN;
2945    }
2946  return PRED_NO_PREDICTION;
2947}
2948
2949
2950/* If the current function returns values in the most significant part
2951   of a register, shift return value VAL appropriately.  The mode of
2952   the function's return type is known not to be BLKmode.  */
2953
2954static rtx
2955shift_return_value (rtx val)
2956{
2957  tree type;
2958
2959  type = TREE_TYPE (DECL_RESULT (current_function_decl));
2960  if (targetm.calls.return_in_msb (type))
2961    {
2962      rtx target;
2963      HOST_WIDE_INT shift;
2964
2965      target = DECL_RTL (DECL_RESULT (current_function_decl));
2966      shift = (GET_MODE_BITSIZE (GET_MODE (target))
2967	       - BITS_PER_UNIT * int_size_in_bytes (type));
2968      if (shift > 0)
2969	val = expand_binop (GET_MODE (target), ashl_optab,
2970			    gen_lowpart (GET_MODE (target), val),
2971			    GEN_INT (shift), target, 1, OPTAB_WIDEN);
2972    }
2973  return val;
2974}
2975
2976
2977/* Generate RTL to return from the current function, with value VAL.  */
2978
2979static void
2980expand_value_return (rtx val)
2981{
2982  rtx last_insn;
2983  rtx return_reg;
2984  enum br_predictor pred;
2985
2986  if (flag_guess_branch_prob
2987      && (pred = return_prediction (val)) != PRED_NO_PREDICTION)
2988    {
2989      /* Emit information for branch prediction.  */
2990      rtx note;
2991
2992      note = emit_note (NOTE_INSN_PREDICTION);
2993
2994      NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
2995
2996    }
2997
2998  last_insn = get_last_insn ();
2999  return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
3000
3001  /* Copy the value to the return location
3002     unless it's already there.  */
3003
3004  if (return_reg != val)
3005    {
3006      tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
3007      if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
3008      {
3009	int unsignedp = TREE_UNSIGNED (type);
3010	enum machine_mode old_mode
3011	  = DECL_MODE (DECL_RESULT (current_function_decl));
3012	enum machine_mode mode
3013	  = promote_mode (type, old_mode, &unsignedp, 1);
3014
3015	if (mode != old_mode)
3016	  val = convert_modes (mode, old_mode, val, unsignedp);
3017      }
3018      if (GET_CODE (return_reg) == PARALLEL)
3019	emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3020      else
3021	emit_move_insn (return_reg, val);
3022    }
3023
3024  expand_null_return_1 (last_insn);
3025}
3026
3027/* Output a return with no value.  If LAST_INSN is nonzero,
3028   pretend that the return takes place after LAST_INSN.  */
3029
3030static void
3031expand_null_return_1 (rtx last_insn)
3032{
3033  rtx end_label = cleanup_label ? cleanup_label : return_label;
3034
3035  clear_pending_stack_adjust ();
3036  do_pending_stack_adjust ();
3037  clear_last_expr ();
3038
3039  if (end_label == 0)
3040     end_label = return_label = gen_label_rtx ();
3041  expand_goto_internal (NULL_TREE, end_label, last_insn);
3042}
3043
3044/* Generate RTL to evaluate the expression RETVAL and return it
3045   from the current function.  */
3046
3047void
3048expand_return (tree retval)
3049{
3050  /* If there are any cleanups to be performed, then they will
3051     be inserted following LAST_INSN.  It is desirable
3052     that the last_insn, for such purposes, should be the
3053     last insn before computing the return value.  Otherwise, cleanups
3054     which call functions can clobber the return value.  */
3055  /* ??? rms: I think that is erroneous, because in C++ it would
3056     run destructors on variables that might be used in the subsequent
3057     computation of the return value.  */
3058  rtx last_insn = 0;
3059  rtx result_rtl;
3060  rtx val = 0;
3061  tree retval_rhs;
3062
3063  /* If function wants no value, give it none.  */
3064  if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3065    {
3066      expand_expr (retval, NULL_RTX, VOIDmode, 0);
3067      emit_queue ();
3068      expand_null_return ();
3069      return;
3070    }
3071
3072  if (retval == error_mark_node)
3073    {
3074      /* Treat this like a return of no value from a function that
3075	 returns a value.  */
3076      expand_null_return ();
3077      return;
3078    }
3079  else if (TREE_CODE (retval) == RESULT_DECL)
3080    retval_rhs = retval;
3081  else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
3082	   && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3083    retval_rhs = TREE_OPERAND (retval, 1);
3084  else if (VOID_TYPE_P (TREE_TYPE (retval)))
3085    /* Recognize tail-recursive call to void function.  */
3086    retval_rhs = retval;
3087  else
3088    retval_rhs = NULL_TREE;
3089
3090  last_insn = get_last_insn ();
3091
3092  /* Distribute return down conditional expr if either of the sides
3093     may involve tail recursion (see test below).  This enhances the number
3094     of tail recursions we see.  Don't do this always since it can produce
3095     sub-optimal code in some cases and we distribute assignments into
3096     conditional expressions when it would help.  */
3097
3098  if (optimize && retval_rhs != 0
3099      && frame_offset == 0
3100      && TREE_CODE (retval_rhs) == COND_EXPR
3101      && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
3102	  || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
3103    {
3104      rtx label = gen_label_rtx ();
3105      tree expr;
3106
3107      do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
3108      start_cleanup_deferral ();
3109      expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3110		    DECL_RESULT (current_function_decl),
3111		    TREE_OPERAND (retval_rhs, 1));
3112      TREE_SIDE_EFFECTS (expr) = 1;
3113      expand_return (expr);
3114      emit_label (label);
3115
3116      expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3117		    DECL_RESULT (current_function_decl),
3118		    TREE_OPERAND (retval_rhs, 2));
3119      TREE_SIDE_EFFECTS (expr) = 1;
3120      expand_return (expr);
3121      end_cleanup_deferral ();
3122      return;
3123    }
3124
3125  result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3126
3127  /* If the result is an aggregate that is being returned in one (or more)
3128     registers, load the registers here.  The compiler currently can't handle
3129     copying a BLKmode value into registers.  We could put this code in a
3130     more general area (for use by everyone instead of just function
3131     call/return), but until this feature is generally usable it is kept here
3132     (and in expand_call).  The value must go into a pseudo in case there
3133     are cleanups that will clobber the real return register.  */
3134
3135  if (retval_rhs != 0
3136      && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3137      && GET_CODE (result_rtl) == REG)
3138    {
3139      int i;
3140      unsigned HOST_WIDE_INT bitpos, xbitpos;
3141      unsigned HOST_WIDE_INT padding_correction = 0;
3142      unsigned HOST_WIDE_INT bytes
3143	= int_size_in_bytes (TREE_TYPE (retval_rhs));
3144      int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3145      unsigned int bitsize
3146	= MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3147      rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
3148      rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3149      rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3150      enum machine_mode tmpmode, result_reg_mode;
3151
3152      if (bytes == 0)
3153	{
3154	  expand_null_return ();
3155	  return;
3156	}
3157
3158      /* If the structure doesn't take up a whole number of words, see
3159	 whether the register value should be padded on the left or on
3160	 the right.  Set PADDING_CORRECTION to the number of padding
3161	 bits needed on the left side.
3162
3163	 In most ABIs, the structure will be returned at the least end of
3164	 the register, which translates to right padding on little-endian
3165	 targets and left padding on big-endian targets.  The opposite
3166	 holds if the structure is returned at the most significant
3167	 end of the register.  */
3168      if (bytes % UNITS_PER_WORD != 0
3169	  && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
3170	      ? !BYTES_BIG_ENDIAN
3171	      : BYTES_BIG_ENDIAN))
3172	padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3173					       * BITS_PER_UNIT));
3174
3175      /* Copy the structure BITSIZE bits at a time.  */
3176      for (bitpos = 0, xbitpos = padding_correction;
3177	   bitpos < bytes * BITS_PER_UNIT;
3178	   bitpos += bitsize, xbitpos += bitsize)
3179	{
3180	  /* We need a new destination pseudo each time xbitpos is
3181	     on a word boundary and when xbitpos == padding_correction
3182	     (the first time through).  */
3183	  if (xbitpos % BITS_PER_WORD == 0
3184	      || xbitpos == padding_correction)
3185	    {
3186	      /* Generate an appropriate register.  */
3187	      dst = gen_reg_rtx (word_mode);
3188	      result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3189
3190	      /* Clear the destination before we move anything into it.  */
3191	      emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
3192	    }
3193
3194	  /* We need a new source operand each time bitpos is on a word
3195	     boundary.  */
3196	  if (bitpos % BITS_PER_WORD == 0)
3197	    src = operand_subword_force (result_val,
3198					 bitpos / BITS_PER_WORD,
3199					 BLKmode);
3200
3201	  /* Use bitpos for the source extraction (left justified) and
3202	     xbitpos for the destination store (right justified).  */
3203	  store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3204			   extract_bit_field (src, bitsize,
3205					      bitpos % BITS_PER_WORD, 1,
3206					      NULL_RTX, word_mode, word_mode,
3207					      BITS_PER_WORD),
3208			   BITS_PER_WORD);
3209	}
3210
3211      tmpmode = GET_MODE (result_rtl);
3212      if (tmpmode == BLKmode)
3213	{
3214	  /* Find the smallest integer mode large enough to hold the
3215	     entire structure and use that mode instead of BLKmode
3216	     on the USE insn for the return register.  */
3217	  for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3218	       tmpmode != VOIDmode;
3219	       tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3220	    /* Have we found a large enough mode?  */
3221	    if (GET_MODE_SIZE (tmpmode) >= bytes)
3222	      break;
3223
3224	  /* No suitable mode found.  */
3225	  if (tmpmode == VOIDmode)
3226	    abort ();
3227
3228	  PUT_MODE (result_rtl, tmpmode);
3229	}
3230
3231      if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3232	result_reg_mode = word_mode;
3233      else
3234	result_reg_mode = tmpmode;
3235      result_reg = gen_reg_rtx (result_reg_mode);
3236
3237      emit_queue ();
3238      for (i = 0; i < n_regs; i++)
3239	emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3240			result_pseudos[i]);
3241
3242      if (tmpmode != result_reg_mode)
3243	result_reg = gen_lowpart (tmpmode, result_reg);
3244
3245      expand_value_return (result_reg);
3246    }
3247  else if (retval_rhs != 0
3248	   && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3249	   && (GET_CODE (result_rtl) == REG
3250	       || (GET_CODE (result_rtl) == PARALLEL)))
3251    {
3252      /* Calculate the return value into a temporary (usually a pseudo
3253         reg).  */
3254      tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3255      tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3256
3257      val = assign_temp (nt, 0, 0, 1);
3258      val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3259      val = force_not_mem (val);
3260      emit_queue ();
3261      /* Return the calculated value, doing cleanups first.  */
3262      expand_value_return (shift_return_value (val));
3263    }
3264  else
3265    {
3266      /* No cleanups or no hard reg used;
3267	 calculate value into hard return reg.  */
3268      expand_expr (retval, const0_rtx, VOIDmode, 0);
3269      emit_queue ();
3270      expand_value_return (result_rtl);
3271    }
3272}
3273
3274/* Attempt to optimize a potential tail recursion call into a goto.
3275   ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3276   where to place the jump to the tail recursion label.
3277
3278   Return TRUE if the call was optimized into a goto.  */
3279
3280int
3281optimize_tail_recursion (tree arguments, rtx last_insn)
3282{
3283  /* Finish checking validity, and if valid emit code to set the
3284     argument variables for the new call.  */
3285  if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3286    {
3287      if (tail_recursion_label == 0)
3288	{
3289	  tail_recursion_label = gen_label_rtx ();
3290	  emit_label_after (tail_recursion_label,
3291			    tail_recursion_reentry);
3292	}
3293      emit_queue ();
3294      expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3295      emit_barrier ();
3296      return 1;
3297    }
3298  return 0;
3299}
3300
3301/* Emit code to alter this function's formal parms for a tail-recursive call.
3302   ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3303   FORMALS is the chain of decls of formals.
3304   Return 1 if this can be done;
3305   otherwise return 0 and do not emit any code.  */
3306
3307static int
3308tail_recursion_args (tree actuals, tree formals)
3309{
3310  tree a = actuals, f = formals;
3311  int i;
3312  rtx *argvec;
3313
3314  /* Check that number and types of actuals are compatible
3315     with the formals.  This is not always true in valid C code.
3316     Also check that no formal needs to be addressable
3317     and that all formals are scalars.  */
3318
3319  /* Also count the args.  */
3320
3321  for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3322    {
3323      if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3324	  != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3325	return 0;
3326      if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3327	return 0;
3328    }
3329  if (a != 0 || f != 0)
3330    return 0;
3331
3332  /* Compute all the actuals.  */
3333
3334  argvec = alloca (i * sizeof (rtx));
3335
3336  for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3337    argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3338
3339  /* Find which actual values refer to current values of previous formals.
3340     Copy each of them now, before any formal is changed.  */
3341
3342  for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3343    {
3344      int copy = 0;
3345      int j;
3346      for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3347	if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3348	  {
3349	    copy = 1;
3350	    break;
3351	  }
3352      if (copy)
3353	argvec[i] = copy_to_reg (argvec[i]);
3354    }
3355
3356  /* Store the values of the actuals into the formals.  */
3357
3358  for (f = formals, a = actuals, i = 0; f;
3359       f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3360    {
3361      if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3362	emit_move_insn (DECL_RTL (f), argvec[i]);
3363      else
3364	{
3365	  rtx tmp = argvec[i];
3366	  int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a)));
3367	  promote_mode(TREE_TYPE (TREE_VALUE (a)), GET_MODE (tmp),
3368		       &unsignedp, 0);
3369	  if (DECL_MODE (f) != GET_MODE (DECL_RTL (f)))
3370	    {
3371	      tmp = gen_reg_rtx (DECL_MODE (f));
3372	      convert_move (tmp, argvec[i], unsignedp);
3373	    }
3374	  convert_move (DECL_RTL (f), tmp, unsignedp);
3375	}
3376    }
3377
3378  free_temp_slots ();
3379  return 1;
3380}
3381
3382/* Generate the RTL code for entering a binding contour.
3383   The variables are declared one by one, by calls to `expand_decl'.
3384
3385   FLAGS is a bitwise or of the following flags:
3386
3387     1 - Nonzero if this construct should be visible to
3388         `exit_something'.
3389
3390     2 - Nonzero if this contour does not require a
3391	 NOTE_INSN_BLOCK_BEG note.  Virtually all calls from
3392	 language-independent code should set this flag because they
3393	 will not create corresponding BLOCK nodes.  (There should be
3394	 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3395	 and BLOCKs.)  If this flag is set, MARK_ENDS should be zero
3396	 when expand_end_bindings is called.
3397
3398    If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3399    optionally be supplied.  If so, it becomes the NOTE_BLOCK for the
3400    note.  */
3401
3402void
3403expand_start_bindings_and_block (int flags, tree block)
3404{
3405  struct nesting *thisblock = ALLOC_NESTING ();
3406  rtx note;
3407  int exit_flag = ((flags & 1) != 0);
3408  int block_flag = ((flags & 2) == 0);
3409
3410  /* If a BLOCK is supplied, then the caller should be requesting a
3411     NOTE_INSN_BLOCK_BEG note.  */
3412  if (!block_flag && block)
3413    abort ();
3414
3415  /* Create a note to mark the beginning of the block.  */
3416  if (block_flag)
3417    {
3418      note = emit_note (NOTE_INSN_BLOCK_BEG);
3419      NOTE_BLOCK (note) = block;
3420    }
3421  else
3422    note = emit_note (NOTE_INSN_DELETED);
3423
3424  /* Make an entry on block_stack for the block we are entering.  */
3425
3426  thisblock->desc = BLOCK_NESTING;
3427  thisblock->next = block_stack;
3428  thisblock->all = nesting_stack;
3429  thisblock->depth = ++nesting_depth;
3430  thisblock->data.block.stack_level = 0;
3431  thisblock->data.block.cleanups = 0;
3432  thisblock->data.block.exception_region = 0;
3433  thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3434
3435  thisblock->data.block.conditional_code = 0;
3436  thisblock->data.block.last_unconditional_cleanup = note;
3437  /* When we insert instructions after the last unconditional cleanup,
3438     we don't adjust last_insn.  That means that a later add_insn will
3439     clobber the instructions we've just added.  The easiest way to
3440     fix this is to just insert another instruction here, so that the
3441     instructions inserted after the last unconditional cleanup are
3442     never the last instruction.  */
3443  emit_note (NOTE_INSN_DELETED);
3444
3445  if (block_stack
3446      && !(block_stack->data.block.cleanups == NULL_TREE
3447	   && block_stack->data.block.outer_cleanups == NULL_TREE))
3448    thisblock->data.block.outer_cleanups
3449      = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3450		   block_stack->data.block.outer_cleanups);
3451  else
3452    thisblock->data.block.outer_cleanups = 0;
3453  thisblock->data.block.label_chain = 0;
3454  thisblock->data.block.innermost_stack_block = stack_block_stack;
3455  thisblock->data.block.first_insn = note;
3456  thisblock->data.block.block_start_count = ++current_block_start_count;
3457  thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3458  block_stack = thisblock;
3459  nesting_stack = thisblock;
3460
3461  /* Make a new level for allocating stack slots.  */
3462  push_temp_slots ();
3463}
3464
3465/* Specify the scope of temporaries created by TARGET_EXPRs.  Similar
3466   to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3467   expand_expr are made.  After we end the region, we know that all
3468   space for all temporaries that were created by TARGET_EXPRs will be
3469   destroyed and their space freed for reuse.  */
3470
3471void
3472expand_start_target_temps (void)
3473{
3474  /* This is so that even if the result is preserved, the space
3475     allocated will be freed, as we know that it is no longer in use.  */
3476  push_temp_slots ();
3477
3478  /* Start a new binding layer that will keep track of all cleanup
3479     actions to be performed.  */
3480  expand_start_bindings (2);
3481
3482  target_temp_slot_level = temp_slot_level;
3483}
3484
3485void
3486expand_end_target_temps (void)
3487{
3488  expand_end_bindings (NULL_TREE, 0, 0);
3489
3490  /* This is so that even if the result is preserved, the space
3491     allocated will be freed, as we know that it is no longer in use.  */
3492  pop_temp_slots ();
3493}
3494
3495/* Given a pointer to a BLOCK node return nonzero if (and only if) the node
3496   in question represents the outermost pair of curly braces (i.e. the "body
3497   block") of a function or method.
3498
3499   For any BLOCK node representing a "body block" of a function or method, the
3500   BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3501   represents the outermost (function) scope for the function or method (i.e.
3502   the one which includes the formal parameters).  The BLOCK_SUPERCONTEXT of
3503   *that* node in turn will point to the relevant FUNCTION_DECL node.  */
3504
3505int
3506is_body_block (tree stmt)
3507{
3508  if (lang_hooks.no_body_blocks)
3509    return 0;
3510
3511  if (TREE_CODE (stmt) == BLOCK)
3512    {
3513      tree parent = BLOCK_SUPERCONTEXT (stmt);
3514
3515      if (parent && TREE_CODE (parent) == BLOCK)
3516	{
3517	  tree grandparent = BLOCK_SUPERCONTEXT (parent);
3518
3519	  if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3520	    return 1;
3521	}
3522    }
3523
3524  return 0;
3525}
3526
3527/* True if we are currently emitting insns in an area of output code
3528   that is controlled by a conditional expression.  This is used by
3529   the cleanup handling code to generate conditional cleanup actions.  */
3530
3531int
3532conditional_context (void)
3533{
3534  return block_stack && block_stack->data.block.conditional_code;
3535}
3536
3537/* Return an opaque pointer to the current nesting level, so frontend code
3538   can check its own sanity.  */
3539
3540struct nesting *
3541current_nesting_level (void)
3542{
3543  return cfun ? block_stack : 0;
3544}
3545
3546/* Emit a handler label for a nonlocal goto handler.
3547   Also emit code to store the handler label in SLOT before BEFORE_INSN.  */
3548
3549static rtx
3550expand_nl_handler_label (rtx slot, rtx before_insn)
3551{
3552  rtx insns;
3553  rtx handler_label = gen_label_rtx ();
3554
3555  /* Don't let cleanup_cfg delete the handler.  */
3556  LABEL_PRESERVE_P (handler_label) = 1;
3557
3558  start_sequence ();
3559  emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3560  insns = get_insns ();
3561  end_sequence ();
3562  emit_insn_before (insns, before_insn);
3563
3564  emit_label (handler_label);
3565
3566  return handler_label;
3567}
3568
3569/* Emit code to restore vital registers at the beginning of a nonlocal goto
3570   handler.  */
3571static void
3572expand_nl_goto_receiver (void)
3573{
3574    /* Clobber the FP when we get here, so we have to make sure it's
3575     marked as used by this function.  */
3576  emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
3577
3578  /* Mark the static chain as clobbered here so life information
3579     doesn't get messed up for it.  */
3580  emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
3581
3582#ifdef HAVE_nonlocal_goto
3583  if (! HAVE_nonlocal_goto)
3584#endif
3585    /* First adjust our frame pointer to its actual value.  It was
3586       previously set to the start of the virtual area corresponding to
3587       the stacked variables when we branched here and now needs to be
3588       adjusted to the actual hardware fp value.
3589
3590       Assignments are to virtual registers are converted by
3591       instantiate_virtual_regs into the corresponding assignment
3592       to the underlying register (fp in this case) that makes
3593       the original assignment true.
3594       So the following insn will actually be
3595       decrementing fp by STARTING_FRAME_OFFSET.  */
3596    emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3597
3598#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3599  if (fixed_regs[ARG_POINTER_REGNUM])
3600    {
3601#ifdef ELIMINABLE_REGS
3602      /* If the argument pointer can be eliminated in favor of the
3603	 frame pointer, we don't need to restore it.  We assume here
3604	 that if such an elimination is present, it can always be used.
3605	 This is the case on all known machines; if we don't make this
3606	 assumption, we do unnecessary saving on many machines.  */
3607      static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
3608      size_t i;
3609
3610      for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3611	if (elim_regs[i].from == ARG_POINTER_REGNUM
3612	    && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3613	  break;
3614
3615      if (i == ARRAY_SIZE (elim_regs))
3616#endif
3617	{
3618	  /* Now restore our arg pointer from the address at which it
3619	     was saved in our stack frame.  */
3620	  emit_move_insn (virtual_incoming_args_rtx,
3621			  copy_to_reg (get_arg_pointer_save_area (cfun)));
3622	}
3623    }
3624#endif
3625
3626#ifdef HAVE_nonlocal_goto_receiver
3627  if (HAVE_nonlocal_goto_receiver)
3628    emit_insn (gen_nonlocal_goto_receiver ());
3629#endif
3630
3631  /* @@@ This is a kludge.  Not all machine descriptions define a blockage
3632     insn, but we must not allow the code we just generated to be reordered
3633     by scheduling.  Specifically, the update of the frame pointer must
3634     happen immediately, not later.  So emit an ASM_INPUT to act as blockage
3635     insn.  */
3636  emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
3637}
3638
3639/* Make handlers for nonlocal gotos taking place in the function calls in
3640   block THISBLOCK.  */
3641
3642static void
3643expand_nl_goto_receivers (struct nesting *thisblock)
3644{
3645  tree link;
3646  rtx afterward = gen_label_rtx ();
3647  rtx insns, slot;
3648  rtx label_list;
3649  int any_invalid;
3650
3651  /* Record the handler address in the stack slot for that purpose,
3652     during this block, saving and restoring the outer value.  */
3653  if (thisblock->next != 0)
3654    for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3655      {
3656	rtx save_receiver = gen_reg_rtx (Pmode);
3657	emit_move_insn (XEXP (slot, 0), save_receiver);
3658
3659	start_sequence ();
3660	emit_move_insn (save_receiver, XEXP (slot, 0));
3661	insns = get_insns ();
3662	end_sequence ();
3663	emit_insn_before (insns, thisblock->data.block.first_insn);
3664      }
3665
3666  /* Jump around the handlers; they run only when specially invoked.  */
3667  emit_jump (afterward);
3668
3669  /* Make a separate handler for each label.  */
3670  link = nonlocal_labels;
3671  slot = nonlocal_goto_handler_slots;
3672  label_list = NULL_RTX;
3673  for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3674    /* Skip any labels we shouldn't be able to jump to from here,
3675       we generate one special handler for all of them below which just calls
3676       abort.  */
3677    if (! DECL_TOO_LATE (TREE_VALUE (link)))
3678      {
3679	rtx lab;
3680	lab = expand_nl_handler_label (XEXP (slot, 0),
3681				       thisblock->data.block.first_insn);
3682	label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3683
3684	expand_nl_goto_receiver ();
3685
3686	/* Jump to the "real" nonlocal label.  */
3687	expand_goto (TREE_VALUE (link));
3688      }
3689
3690  /* A second pass over all nonlocal labels; this time we handle those
3691     we should not be able to jump to at this point.  */
3692  link = nonlocal_labels;
3693  slot = nonlocal_goto_handler_slots;
3694  any_invalid = 0;
3695  for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3696    if (DECL_TOO_LATE (TREE_VALUE (link)))
3697      {
3698	rtx lab;
3699	lab = expand_nl_handler_label (XEXP (slot, 0),
3700				       thisblock->data.block.first_insn);
3701	label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3702	any_invalid = 1;
3703      }
3704
3705  if (any_invalid)
3706    {
3707      expand_nl_goto_receiver ();
3708      expand_builtin_trap ();
3709    }
3710
3711  nonlocal_goto_handler_labels = label_list;
3712  emit_label (afterward);
3713}
3714
3715/* Warn about any unused VARS (which may contain nodes other than
3716   VAR_DECLs, but such nodes are ignored).  The nodes are connected
3717   via the TREE_CHAIN field.  */
3718
3719void
3720warn_about_unused_variables (tree vars)
3721{
3722  tree decl;
3723
3724  if (warn_unused_variable)
3725    for (decl = vars; decl; decl = TREE_CHAIN (decl))
3726      if (TREE_CODE (decl) == VAR_DECL
3727	  && ! TREE_USED (decl)
3728	  && ! DECL_IN_SYSTEM_HEADER (decl)
3729	  && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3730	warning ("%Junused variable '%D'", decl, decl);
3731}
3732
3733/* Generate RTL code to terminate a binding contour.
3734
3735   VARS is the chain of VAR_DECL nodes for the variables bound in this
3736   contour.  There may actually be other nodes in this chain, but any
3737   nodes other than VAR_DECLS are ignored.
3738
3739   MARK_ENDS is nonzero if we should put a note at the beginning
3740   and end of this binding contour.
3741
3742   DONT_JUMP_IN is positive if it is not valid to jump into this contour,
3743   zero if we can jump into this contour only if it does not have a saved
3744   stack level, and negative if we are not to check for invalid use of
3745   labels (because the front end does that).  */
3746
3747void
3748expand_end_bindings (tree vars, int mark_ends, int dont_jump_in)
3749{
3750  struct nesting *thisblock = block_stack;
3751
3752  /* If any of the variables in this scope were not used, warn the
3753     user.  */
3754  warn_about_unused_variables (vars);
3755
3756  if (thisblock->exit_label)
3757    {
3758      do_pending_stack_adjust ();
3759      emit_label (thisblock->exit_label);
3760    }
3761
3762  /* If necessary, make handlers for nonlocal gotos taking
3763     place in the function calls in this block.  */
3764  if (function_call_count != 0 && nonlocal_labels
3765      /* Make handler for outermost block
3766	 if there were any nonlocal gotos to this function.  */
3767      && (thisblock->next == 0 ? current_function_has_nonlocal_label
3768	  /* Make handler for inner block if it has something
3769	     special to do when you jump out of it.  */
3770	  : (thisblock->data.block.cleanups != 0
3771	     || thisblock->data.block.stack_level != 0)))
3772    expand_nl_goto_receivers (thisblock);
3773
3774  /* Don't allow jumping into a block that has a stack level.
3775     Cleanups are allowed, though.  */
3776  if (dont_jump_in > 0
3777      || (dont_jump_in == 0 && thisblock->data.block.stack_level != 0))
3778    {
3779      struct label_chain *chain;
3780
3781      /* Any labels in this block are no longer valid to go to.
3782	 Mark them to cause an error message.  */
3783      for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3784	{
3785	  DECL_TOO_LATE (chain->label) = 1;
3786	  /* If any goto without a fixup came to this label,
3787	     that must be an error, because gotos without fixups
3788	     come from outside all saved stack-levels.  */
3789	  if (TREE_ADDRESSABLE (chain->label))
3790	    error ("%Jlabel '%D' used before containing binding contour",
3791		   chain->label, chain->label);
3792	}
3793    }
3794
3795  /* Restore stack level in effect before the block
3796     (only if variable-size objects allocated).  */
3797  /* Perform any cleanups associated with the block.  */
3798
3799  if (thisblock->data.block.stack_level != 0
3800      || thisblock->data.block.cleanups != 0)
3801    {
3802      int reachable;
3803      rtx insn;
3804
3805      /* Don't let cleanups affect ({...}) constructs.  */
3806      int old_expr_stmts_for_value = expr_stmts_for_value;
3807      rtx old_last_expr_value = last_expr_value;
3808      rtx old_last_expr_alt_rtl = last_expr_alt_rtl;
3809      tree old_last_expr_type = last_expr_type;
3810      expr_stmts_for_value = 0;
3811
3812      /* Only clean up here if this point can actually be reached.  */
3813      insn = get_last_insn ();
3814      if (GET_CODE (insn) == NOTE)
3815	insn = prev_nonnote_insn (insn);
3816      reachable = (! insn || GET_CODE (insn) != BARRIER);
3817
3818      /* Do the cleanups.  */
3819      expand_cleanups (thisblock->data.block.cleanups, 0, reachable);
3820      if (reachable)
3821	do_pending_stack_adjust ();
3822
3823      expr_stmts_for_value = old_expr_stmts_for_value;
3824      last_expr_value = old_last_expr_value;
3825      last_expr_alt_rtl = old_last_expr_alt_rtl;
3826      last_expr_type = old_last_expr_type;
3827
3828      /* Restore the stack level.  */
3829
3830      if (reachable && thisblock->data.block.stack_level != 0)
3831	{
3832	  emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3833			      thisblock->data.block.stack_level, NULL_RTX);
3834	  if (nonlocal_goto_handler_slots != 0)
3835	    emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3836			     NULL_RTX);
3837	}
3838
3839      /* Any gotos out of this block must also do these things.
3840	 Also report any gotos with fixups that came to labels in this
3841	 level.  */
3842      fixup_gotos (thisblock,
3843		   thisblock->data.block.stack_level,
3844		   thisblock->data.block.cleanups,
3845		   thisblock->data.block.first_insn,
3846		   dont_jump_in);
3847    }
3848
3849  /* Mark the beginning and end of the scope if requested.
3850     We do this now, after running cleanups on the variables
3851     just going out of scope, so they are in scope for their cleanups.  */
3852
3853  if (mark_ends)
3854    {
3855      rtx note = emit_note (NOTE_INSN_BLOCK_END);
3856      NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3857    }
3858  else
3859    /* Get rid of the beginning-mark if we don't make an end-mark.  */
3860    NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3861
3862  /* Restore the temporary level of TARGET_EXPRs.  */
3863  target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3864
3865  /* Restore block_stack level for containing block.  */
3866
3867  stack_block_stack = thisblock->data.block.innermost_stack_block;
3868  POPSTACK (block_stack);
3869
3870  /* Pop the stack slot nesting and free any slots at this level.  */
3871  pop_temp_slots ();
3872}
3873
3874/* Generate code to save the stack pointer at the start of the current block
3875   and set up to restore it on exit.  */
3876
3877void
3878save_stack_pointer (void)
3879{
3880  struct nesting *thisblock = block_stack;
3881
3882  if (thisblock->data.block.stack_level == 0)
3883    {
3884      emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3885		       &thisblock->data.block.stack_level,
3886		       thisblock->data.block.first_insn);
3887      stack_block_stack = thisblock;
3888    }
3889}
3890
3891/* Generate RTL for the automatic variable declaration DECL.
3892   (Other kinds of declarations are simply ignored if seen here.)  */
3893
3894void
3895expand_decl (tree decl)
3896{
3897  tree type;
3898
3899  type = TREE_TYPE (decl);
3900
3901  /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3902     type in case this node is used in a reference.  */
3903  if (TREE_CODE (decl) == CONST_DECL)
3904    {
3905      DECL_MODE (decl) = TYPE_MODE (type);
3906      DECL_ALIGN (decl) = TYPE_ALIGN (type);
3907      DECL_SIZE (decl) = TYPE_SIZE (type);
3908      DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3909      return;
3910    }
3911
3912  /* Otherwise, only automatic variables need any expansion done.  Static and
3913     external variables, and external functions, will be handled by
3914     `assemble_variable' (called from finish_decl).  TYPE_DECL requires
3915     nothing.  PARM_DECLs are handled in `assign_parms'.  */
3916  if (TREE_CODE (decl) != VAR_DECL)
3917    return;
3918
3919  if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3920    return;
3921
3922  /* Create the RTL representation for the variable.  */
3923
3924  if (type == error_mark_node)
3925    SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3926
3927  else if (DECL_SIZE (decl) == 0)
3928    /* Variable with incomplete type.  */
3929    {
3930      rtx x;
3931      if (DECL_INITIAL (decl) == 0)
3932	/* Error message was already done; now avoid a crash.  */
3933	x = gen_rtx_MEM (BLKmode, const0_rtx);
3934      else
3935	/* An initializer is going to decide the size of this array.
3936	   Until we know the size, represent its address with a reg.  */
3937	x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3938
3939      set_mem_attributes (x, decl, 1);
3940      SET_DECL_RTL (decl, x);
3941    }
3942  else if (DECL_MODE (decl) != BLKmode
3943	   /* If -ffloat-store, don't put explicit float vars
3944	      into regs.  */
3945	   && !(flag_float_store
3946		&& TREE_CODE (type) == REAL_TYPE)
3947	   && ! TREE_THIS_VOLATILE (decl)
3948	   && ! DECL_NONLOCAL (decl)
3949	   && (DECL_REGISTER (decl) || DECL_ARTIFICIAL (decl) || optimize))
3950    {
3951      /* Automatic variable that can go in a register.  */
3952      int unsignedp = TREE_UNSIGNED (type);
3953      enum machine_mode reg_mode
3954	= promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3955
3956      SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
3957
3958      if (!DECL_ARTIFICIAL (decl))
3959	mark_user_reg (DECL_RTL (decl));
3960
3961      if (POINTER_TYPE_P (type))
3962	mark_reg_pointer (DECL_RTL (decl),
3963			  TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3964
3965      maybe_set_unchanging (DECL_RTL (decl), decl);
3966
3967      /* If something wants our address, try to use ADDRESSOF.  */
3968      if (TREE_ADDRESSABLE (decl))
3969	put_var_into_stack (decl, /*rescan=*/false);
3970    }
3971
3972  else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3973	   && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3974		 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3975					  STACK_CHECK_MAX_VAR_SIZE)))
3976    {
3977      /* Variable of fixed size that goes on the stack.  */
3978      rtx oldaddr = 0;
3979      rtx addr;
3980      rtx x;
3981
3982      /* If we previously made RTL for this decl, it must be an array
3983	 whose size was determined by the initializer.
3984	 The old address was a register; set that register now
3985	 to the proper address.  */
3986      if (DECL_RTL_SET_P (decl))
3987	{
3988	  if (GET_CODE (DECL_RTL (decl)) != MEM
3989	      || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3990	    abort ();
3991	  oldaddr = XEXP (DECL_RTL (decl), 0);
3992	}
3993
3994      /* Set alignment we actually gave this decl.  */
3995      DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3996			   : GET_MODE_BITSIZE (DECL_MODE (decl)));
3997      DECL_USER_ALIGN (decl) = 0;
3998
3999      x = assign_temp (decl, 1, 1, 1);
4000      set_mem_attributes (x, decl, 1);
4001      SET_DECL_RTL (decl, x);
4002
4003      if (oldaddr)
4004	{
4005	  addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
4006	  if (addr != oldaddr)
4007	    emit_move_insn (oldaddr, addr);
4008	}
4009    }
4010  else
4011    /* Dynamic-size object: must push space on the stack.  */
4012    {
4013      rtx address, size, x;
4014
4015      /* Record the stack pointer on entry to block, if have
4016	 not already done so.  */
4017      do_pending_stack_adjust ();
4018      save_stack_pointer ();
4019
4020      /* In function-at-a-time mode, variable_size doesn't expand this,
4021	 so do it now.  */
4022      if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
4023	expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
4024		     const0_rtx, VOIDmode, 0);
4025
4026      /* Compute the variable's size, in bytes.  */
4027      size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
4028      free_temp_slots ();
4029
4030      /* Allocate space on the stack for the variable.  Note that
4031	 DECL_ALIGN says how the variable is to be aligned and we
4032	 cannot use it to conclude anything about the alignment of
4033	 the size.  */
4034      address = allocate_dynamic_stack_space (size, NULL_RTX,
4035					      TYPE_ALIGN (TREE_TYPE (decl)));
4036
4037      /* Reference the variable indirect through that rtx.  */
4038      x = gen_rtx_MEM (DECL_MODE (decl), address);
4039      set_mem_attributes (x, decl, 1);
4040      SET_DECL_RTL (decl, x);
4041
4042
4043      /* Indicate the alignment we actually gave this variable.  */
4044#ifdef STACK_BOUNDARY
4045      DECL_ALIGN (decl) = STACK_BOUNDARY;
4046#else
4047      DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
4048#endif
4049      DECL_USER_ALIGN (decl) = 0;
4050    }
4051}
4052
4053/* Emit code to perform the initialization of a declaration DECL.  */
4054
4055void
4056expand_decl_init (tree decl)
4057{
4058  int was_used = TREE_USED (decl);
4059
4060  /* If this is a CONST_DECL, we don't have to generate any code.  Likewise
4061     for static decls.  */
4062  if (TREE_CODE (decl) == CONST_DECL
4063      || TREE_STATIC (decl))
4064    return;
4065
4066  /* Compute and store the initial value now.  */
4067
4068  push_temp_slots ();
4069
4070  if (DECL_INITIAL (decl) == error_mark_node)
4071    {
4072      enum tree_code code = TREE_CODE (TREE_TYPE (decl));
4073
4074      if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
4075	  || code == POINTER_TYPE || code == REFERENCE_TYPE)
4076	expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
4077			   0);
4078      emit_queue ();
4079    }
4080  else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
4081    {
4082      emit_line_note (DECL_SOURCE_LOCATION (decl));
4083      expand_assignment (decl, DECL_INITIAL (decl), 0);
4084      emit_queue ();
4085    }
4086
4087  /* Don't let the initialization count as "using" the variable.  */
4088  TREE_USED (decl) = was_used;
4089
4090  /* Free any temporaries we made while initializing the decl.  */
4091  preserve_temp_slots (NULL_RTX);
4092  free_temp_slots ();
4093  pop_temp_slots ();
4094}
4095
4096/* CLEANUP is an expression to be executed at exit from this binding contour;
4097   for example, in C++, it might call the destructor for this variable.
4098
4099   We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4100   CLEANUP multiple times, and have the correct semantics.  This
4101   happens in exception handling, for gotos, returns, breaks that
4102   leave the current scope.
4103
4104   If CLEANUP is nonzero and DECL is zero, we record a cleanup
4105   that is not associated with any particular variable.  */
4106
4107int
4108expand_decl_cleanup (tree decl, tree cleanup)
4109{
4110  struct nesting *thisblock;
4111
4112  /* Error if we are not in any block.  */
4113  if (cfun == 0 || block_stack == 0)
4114    return 0;
4115
4116  thisblock = block_stack;
4117
4118  /* Record the cleanup if there is one.  */
4119
4120  if (cleanup != 0)
4121    {
4122      tree t;
4123      rtx seq;
4124      tree *cleanups = &thisblock->data.block.cleanups;
4125      int cond_context = conditional_context ();
4126
4127      if (cond_context)
4128	{
4129	  rtx flag = gen_reg_rtx (word_mode);
4130	  rtx set_flag_0;
4131	  tree cond;
4132
4133	  start_sequence ();
4134	  emit_move_insn (flag, const0_rtx);
4135	  set_flag_0 = get_insns ();
4136	  end_sequence ();
4137
4138	  thisblock->data.block.last_unconditional_cleanup
4139	    = emit_insn_after (set_flag_0,
4140				thisblock->data.block.last_unconditional_cleanup);
4141
4142	  emit_move_insn (flag, const1_rtx);
4143
4144	  cond = build_decl (VAR_DECL, NULL_TREE,
4145			     (*lang_hooks.types.type_for_mode) (word_mode, 1));
4146	  SET_DECL_RTL (cond, flag);
4147
4148	  /* Conditionalize the cleanup.  */
4149	  cleanup = build (COND_EXPR, void_type_node,
4150			   (*lang_hooks.truthvalue_conversion) (cond),
4151			   cleanup, integer_zero_node);
4152	  cleanup = fold (cleanup);
4153
4154	  cleanups = &thisblock->data.block.cleanups;
4155	}
4156
4157      cleanup = unsave_expr (cleanup);
4158
4159      t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4160
4161      if (! cond_context)
4162	/* If this block has a cleanup, it belongs in stack_block_stack.  */
4163	stack_block_stack = thisblock;
4164
4165      if (cond_context)
4166	{
4167	  start_sequence ();
4168	}
4169
4170      if (! using_eh_for_cleanups_p)
4171	TREE_ADDRESSABLE (t) = 1;
4172      else
4173	expand_eh_region_start ();
4174
4175      if (cond_context)
4176	{
4177	  seq = get_insns ();
4178	  end_sequence ();
4179	  if (seq)
4180	    thisblock->data.block.last_unconditional_cleanup
4181	      = emit_insn_after (seq,
4182				 thisblock->data.block.last_unconditional_cleanup);
4183	}
4184      else
4185	{
4186	  thisblock->data.block.last_unconditional_cleanup
4187	    = get_last_insn ();
4188	  /* When we insert instructions after the last unconditional cleanup,
4189	     we don't adjust last_insn.  That means that a later add_insn will
4190	     clobber the instructions we've just added.  The easiest way to
4191	     fix this is to just insert another instruction here, so that the
4192	     instructions inserted after the last unconditional cleanup are
4193	     never the last instruction.  */
4194	  emit_note (NOTE_INSN_DELETED);
4195	}
4196    }
4197  return 1;
4198}
4199
4200/* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
4201   is thrown.  */
4202
4203int
4204expand_decl_cleanup_eh (tree decl, tree cleanup, int eh_only)
4205{
4206  int ret = expand_decl_cleanup (decl, cleanup);
4207  if (cleanup && ret)
4208    {
4209      tree node = block_stack->data.block.cleanups;
4210      CLEANUP_EH_ONLY (node) = eh_only;
4211    }
4212  return ret;
4213}
4214
4215/* DECL is an anonymous union.  CLEANUP is a cleanup for DECL.
4216   DECL_ELTS is the list of elements that belong to DECL's type.
4217   In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup.  */
4218
4219void
4220expand_anon_union_decl (tree decl, tree cleanup, tree decl_elts)
4221{
4222  struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4223  rtx x;
4224  tree t;
4225
4226  /* If any of the elements are addressable, so is the entire union.  */
4227  for (t = decl_elts; t; t = TREE_CHAIN (t))
4228    if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4229      {
4230	TREE_ADDRESSABLE (decl) = 1;
4231	break;
4232      }
4233
4234  expand_decl (decl);
4235  expand_decl_cleanup (decl, cleanup);
4236  x = DECL_RTL (decl);
4237
4238  /* Go through the elements, assigning RTL to each.  */
4239  for (t = decl_elts; t; t = TREE_CHAIN (t))
4240    {
4241      tree decl_elt = TREE_VALUE (t);
4242      tree cleanup_elt = TREE_PURPOSE (t);
4243      enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4244
4245      /* If any of the elements are addressable, so is the entire
4246	 union.  */
4247      if (TREE_USED (decl_elt))
4248	TREE_USED (decl) = 1;
4249
4250      /* Propagate the union's alignment to the elements.  */
4251      DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4252      DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4253
4254      /* If the element has BLKmode and the union doesn't, the union is
4255         aligned such that the element doesn't need to have BLKmode, so
4256         change the element's mode to the appropriate one for its size.  */
4257      if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4258	DECL_MODE (decl_elt) = mode
4259	  = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4260
4261      /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4262         instead create a new MEM rtx with the proper mode.  */
4263      if (GET_CODE (x) == MEM)
4264	{
4265	  if (mode == GET_MODE (x))
4266	    SET_DECL_RTL (decl_elt, x);
4267	  else
4268	    SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
4269	}
4270      else if (GET_CODE (x) == REG)
4271	{
4272	  if (mode == GET_MODE (x))
4273	    SET_DECL_RTL (decl_elt, x);
4274	  else
4275	    SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
4276	}
4277      else
4278	abort ();
4279
4280      /* Record the cleanup if there is one.  */
4281
4282      if (cleanup != 0)
4283	thisblock->data.block.cleanups
4284	  = tree_cons (decl_elt, cleanup_elt,
4285		       thisblock->data.block.cleanups);
4286    }
4287}
4288
4289/* Expand a list of cleanups LIST.
4290   Elements may be expressions or may be nested lists.
4291
4292   If IN_FIXUP is nonzero, we are generating this cleanup for a fixup
4293   goto and handle protection regions specially in that case.
4294
4295   If REACHABLE, we emit code, otherwise just inform the exception handling
4296   code about this finalization.  */
4297
4298static void
4299expand_cleanups (tree list, int in_fixup, int reachable)
4300{
4301  tree tail;
4302  for (tail = list; tail; tail = TREE_CHAIN (tail))
4303    if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4304      expand_cleanups (TREE_VALUE (tail), in_fixup, reachable);
4305    else
4306      {
4307	if (! in_fixup && using_eh_for_cleanups_p)
4308	  expand_eh_region_end_cleanup (TREE_VALUE (tail));
4309
4310	if (reachable && !CLEANUP_EH_ONLY (tail))
4311	  {
4312	    /* Cleanups may be run multiple times.  For example,
4313	       when exiting a binding contour, we expand the
4314	       cleanups associated with that contour.  When a goto
4315	       within that binding contour has a target outside that
4316	       contour, it will expand all cleanups from its scope to
4317	       the target.  Though the cleanups are expanded multiple
4318	       times, the control paths are non-overlapping so the
4319	       cleanups will not be executed twice.  */
4320
4321	    /* We may need to protect from outer cleanups.  */
4322	    if (in_fixup && using_eh_for_cleanups_p)
4323	      {
4324		expand_eh_region_start ();
4325
4326		expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4327
4328		expand_eh_region_end_fixup (TREE_VALUE (tail));
4329	      }
4330	    else
4331	      expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4332
4333	    free_temp_slots ();
4334	  }
4335      }
4336}
4337
4338/* Mark when the context we are emitting RTL for as a conditional
4339   context, so that any cleanup actions we register with
4340   expand_decl_init will be properly conditionalized when those
4341   cleanup actions are later performed.  Must be called before any
4342   expression (tree) is expanded that is within a conditional context.  */
4343
4344void
4345start_cleanup_deferral (void)
4346{
4347  /* block_stack can be NULL if we are inside the parameter list.  It is
4348     OK to do nothing, because cleanups aren't possible here.  */
4349  if (block_stack)
4350    ++block_stack->data.block.conditional_code;
4351}
4352
4353/* Mark the end of a conditional region of code.  Because cleanup
4354   deferrals may be nested, we may still be in a conditional region
4355   after we end the currently deferred cleanups, only after we end all
4356   deferred cleanups, are we back in unconditional code.  */
4357
4358void
4359end_cleanup_deferral (void)
4360{
4361  /* block_stack can be NULL if we are inside the parameter list.  It is
4362     OK to do nothing, because cleanups aren't possible here.  */
4363  if (block_stack)
4364    --block_stack->data.block.conditional_code;
4365}
4366
4367tree
4368last_cleanup_this_contour (void)
4369{
4370  if (block_stack == 0)
4371    return 0;
4372
4373  return block_stack->data.block.cleanups;
4374}
4375
4376/* Return 1 if there are any pending cleanups at this point.
4377   Check the current contour as well as contours that enclose
4378   the current contour.  */
4379
4380int
4381any_pending_cleanups (void)
4382{
4383  struct nesting *block;
4384
4385  if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4386    return 0;
4387
4388  if (block_stack->data.block.cleanups != NULL)
4389    return 1;
4390
4391  if (block_stack->data.block.outer_cleanups == 0)
4392    return 0;
4393
4394  for (block = block_stack->next; block; block = block->next)
4395    if (block->data.block.cleanups != 0)
4396      return 1;
4397
4398  return 0;
4399}
4400
4401/* Enter a case (Pascal) or switch (C) statement.
4402   Push a block onto case_stack and nesting_stack
4403   to accumulate the case-labels that are seen
4404   and to record the labels generated for the statement.
4405
4406   EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4407   Otherwise, this construct is transparent for `exit_something'.
4408
4409   EXPR is the index-expression to be dispatched on.
4410   TYPE is its nominal type.  We could simply convert EXPR to this type,
4411   but instead we take short cuts.  */
4412
4413void
4414expand_start_case (int exit_flag, tree expr, tree type,
4415		   const char *printname)
4416{
4417  struct nesting *thiscase = ALLOC_NESTING ();
4418
4419  /* Make an entry on case_stack for the case we are entering.  */
4420
4421  thiscase->desc = CASE_NESTING;
4422  thiscase->next = case_stack;
4423  thiscase->all = nesting_stack;
4424  thiscase->depth = ++nesting_depth;
4425  thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4426  thiscase->data.case_stmt.case_list = 0;
4427  thiscase->data.case_stmt.index_expr = expr;
4428  thiscase->data.case_stmt.nominal_type = type;
4429  thiscase->data.case_stmt.default_label = 0;
4430  thiscase->data.case_stmt.printname = printname;
4431  thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4432  case_stack = thiscase;
4433  nesting_stack = thiscase;
4434
4435  do_pending_stack_adjust ();
4436  emit_queue ();
4437
4438  /* Make sure case_stmt.start points to something that won't
4439     need any transformation before expand_end_case.  */
4440  if (GET_CODE (get_last_insn ()) != NOTE)
4441    emit_note (NOTE_INSN_DELETED);
4442
4443  thiscase->data.case_stmt.start = get_last_insn ();
4444
4445  start_cleanup_deferral ();
4446}
4447
4448/* Start a "dummy case statement" within which case labels are invalid
4449   and are not connected to any larger real case statement.
4450   This can be used if you don't want to let a case statement jump
4451   into the middle of certain kinds of constructs.  */
4452
4453void
4454expand_start_case_dummy (void)
4455{
4456  struct nesting *thiscase = ALLOC_NESTING ();
4457
4458  /* Make an entry on case_stack for the dummy.  */
4459
4460  thiscase->desc = CASE_NESTING;
4461  thiscase->next = case_stack;
4462  thiscase->all = nesting_stack;
4463  thiscase->depth = ++nesting_depth;
4464  thiscase->exit_label = 0;
4465  thiscase->data.case_stmt.case_list = 0;
4466  thiscase->data.case_stmt.start = 0;
4467  thiscase->data.case_stmt.nominal_type = 0;
4468  thiscase->data.case_stmt.default_label = 0;
4469  case_stack = thiscase;
4470  nesting_stack = thiscase;
4471  start_cleanup_deferral ();
4472}
4473
4474static void
4475check_seenlabel (void)
4476{
4477  /* If this is the first label, warn if any insns have been emitted.  */
4478  if (case_stack->data.case_stmt.line_number_status >= 0)
4479    {
4480      rtx insn;
4481
4482      restore_line_number_status
4483	(case_stack->data.case_stmt.line_number_status);
4484      case_stack->data.case_stmt.line_number_status = -1;
4485
4486      for (insn = case_stack->data.case_stmt.start;
4487	   insn;
4488	   insn = NEXT_INSN (insn))
4489	{
4490	  if (GET_CODE (insn) == CODE_LABEL)
4491	    break;
4492	  if (GET_CODE (insn) != NOTE
4493	      && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4494	    {
4495	      do
4496		insn = PREV_INSN (insn);
4497	      while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4498
4499	      /* If insn is zero, then there must have been a syntax error.  */
4500	      if (insn)
4501                {
4502                  location_t locus;
4503                  locus.file = NOTE_SOURCE_FILE (insn);
4504                  locus.line = NOTE_LINE_NUMBER (insn);
4505                  warning ("%Hunreachable code at beginning of %s", &locus,
4506                           case_stack->data.case_stmt.printname);
4507                }
4508	      break;
4509	    }
4510	}
4511    }
4512}
4513
4514/* Accumulate one case or default label inside a case or switch statement.
4515   VALUE is the value of the case (a null pointer, for a default label).
4516   The function CONVERTER, when applied to arguments T and V,
4517   converts the value V to the type T.
4518
4519   If not currently inside a case or switch statement, return 1 and do
4520   nothing.  The caller will print a language-specific error message.
4521   If VALUE is a duplicate or overlaps, return 2 and do nothing
4522   except store the (first) duplicate node in *DUPLICATE.
4523   If VALUE is out of range, return 3 and do nothing.
4524   If we are jumping into the scope of a cleanup or var-sized array, return 5.
4525   Return 0 on success.
4526
4527   Extended to handle range statements.  */
4528
4529int
4530pushcase (tree value, tree (*converter) (tree, tree), tree label,
4531	  tree *duplicate)
4532{
4533  tree index_type;
4534  tree nominal_type;
4535
4536  /* Fail if not inside a real case statement.  */
4537  if (! (case_stack && case_stack->data.case_stmt.start))
4538    return 1;
4539
4540  if (stack_block_stack
4541      && stack_block_stack->depth > case_stack->depth)
4542    return 5;
4543
4544  index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4545  nominal_type = case_stack->data.case_stmt.nominal_type;
4546
4547  /* If the index is erroneous, avoid more problems: pretend to succeed.  */
4548  if (index_type == error_mark_node)
4549    return 0;
4550
4551  /* Convert VALUE to the type in which the comparisons are nominally done.  */
4552  if (value != 0)
4553    value = (*converter) (nominal_type, value);
4554
4555  check_seenlabel ();
4556
4557  /* Fail if this value is out of range for the actual type of the index
4558     (which may be narrower than NOMINAL_TYPE).  */
4559  if (value != 0
4560      && (TREE_CONSTANT_OVERFLOW (value)
4561	  || ! int_fits_type_p (value, index_type)))
4562    return 3;
4563
4564  return add_case_node (value, value, label, duplicate);
4565}
4566
4567/* Like pushcase but this case applies to all values between VALUE1 and
4568   VALUE2 (inclusive).  If VALUE1 is NULL, the range starts at the lowest
4569   value of the index type and ends at VALUE2.  If VALUE2 is NULL, the range
4570   starts at VALUE1 and ends at the highest value of the index type.
4571   If both are NULL, this case applies to all values.
4572
4573   The return value is the same as that of pushcase but there is one
4574   additional error code: 4 means the specified range was empty.  */
4575
4576int
4577pushcase_range (tree value1, tree value2, tree (*converter) (tree, tree),
4578		tree label, tree *duplicate)
4579{
4580  tree index_type;
4581  tree nominal_type;
4582
4583  /* Fail if not inside a real case statement.  */
4584  if (! (case_stack && case_stack->data.case_stmt.start))
4585    return 1;
4586
4587  if (stack_block_stack
4588      && stack_block_stack->depth > case_stack->depth)
4589    return 5;
4590
4591  index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4592  nominal_type = case_stack->data.case_stmt.nominal_type;
4593
4594  /* If the index is erroneous, avoid more problems: pretend to succeed.  */
4595  if (index_type == error_mark_node)
4596    return 0;
4597
4598  check_seenlabel ();
4599
4600  /* Convert VALUEs to type in which the comparisons are nominally done
4601     and replace any unspecified value with the corresponding bound.  */
4602  if (value1 == 0)
4603    value1 = TYPE_MIN_VALUE (index_type);
4604  if (value2 == 0)
4605    value2 = TYPE_MAX_VALUE (index_type);
4606
4607  /* Fail if the range is empty.  Do this before any conversion since
4608     we want to allow out-of-range empty ranges.  */
4609  if (value2 != 0 && tree_int_cst_lt (value2, value1))
4610    return 4;
4611
4612  /* If the max was unbounded, use the max of the nominal_type we are
4613     converting to.  Do this after the < check above to suppress false
4614     positives.  */
4615  if (value2 == 0)
4616    value2 = TYPE_MAX_VALUE (nominal_type);
4617
4618  value1 = (*converter) (nominal_type, value1);
4619  value2 = (*converter) (nominal_type, value2);
4620
4621  /* Fail if these values are out of range.  */
4622  if (TREE_CONSTANT_OVERFLOW (value1)
4623      || ! int_fits_type_p (value1, index_type))
4624    return 3;
4625
4626  if (TREE_CONSTANT_OVERFLOW (value2)
4627      || ! int_fits_type_p (value2, index_type))
4628    return 3;
4629
4630  return add_case_node (value1, value2, label, duplicate);
4631}
4632
4633/* Do the actual insertion of a case label for pushcase and pushcase_range
4634   into case_stack->data.case_stmt.case_list.  Use an AVL tree to avoid
4635   slowdown for large switch statements.  */
4636
4637int
4638add_case_node (tree low, tree high, tree label, tree *duplicate)
4639{
4640  struct case_node *p, **q, *r;
4641
4642  /* If there's no HIGH value, then this is not a case range; it's
4643     just a simple case label.  But that's just a degenerate case
4644     range.  */
4645  if (!high)
4646    high = low;
4647
4648  /* Handle default labels specially.  */
4649  if (!high && !low)
4650    {
4651      if (case_stack->data.case_stmt.default_label != 0)
4652	{
4653	  *duplicate = case_stack->data.case_stmt.default_label;
4654	  return 2;
4655	}
4656      case_stack->data.case_stmt.default_label = label;
4657      expand_label (label);
4658      return 0;
4659    }
4660
4661  q = &case_stack->data.case_stmt.case_list;
4662  p = *q;
4663
4664  while ((r = *q))
4665    {
4666      p = r;
4667
4668      /* Keep going past elements distinctly greater than HIGH.  */
4669      if (tree_int_cst_lt (high, p->low))
4670	q = &p->left;
4671
4672      /* or distinctly less than LOW.  */
4673      else if (tree_int_cst_lt (p->high, low))
4674	q = &p->right;
4675
4676      else
4677	{
4678	  /* We have an overlap; this is an error.  */
4679	  *duplicate = p->code_label;
4680	  return 2;
4681	}
4682    }
4683
4684  /* Add this label to the chain, and succeed.  */
4685
4686  r = ggc_alloc (sizeof (struct case_node));
4687  r->low = low;
4688
4689  /* If the bounds are equal, turn this into the one-value case.  */
4690  if (tree_int_cst_equal (low, high))
4691    r->high = r->low;
4692  else
4693    r->high = high;
4694
4695  r->code_label = label;
4696  expand_label (label);
4697
4698  *q = r;
4699  r->parent = p;
4700  r->left = 0;
4701  r->right = 0;
4702  r->balance = 0;
4703
4704  while (p)
4705    {
4706      struct case_node *s;
4707
4708      if (r == p->left)
4709	{
4710	  int b;
4711
4712	  if (! (b = p->balance))
4713	    /* Growth propagation from left side.  */
4714	    p->balance = -1;
4715	  else if (b < 0)
4716	    {
4717	      if (r->balance < 0)
4718		{
4719		  /* R-Rotation */
4720		  if ((p->left = s = r->right))
4721		    s->parent = p;
4722
4723		  r->right = p;
4724		  p->balance = 0;
4725		  r->balance = 0;
4726		  s = p->parent;
4727		  p->parent = r;
4728
4729		  if ((r->parent = s))
4730		    {
4731		      if (s->left == p)
4732			s->left = r;
4733		      else
4734			s->right = r;
4735		    }
4736		  else
4737		    case_stack->data.case_stmt.case_list = r;
4738		}
4739	      else
4740		/* r->balance == +1 */
4741		{
4742		  /* LR-Rotation */
4743
4744		  int b2;
4745		  struct case_node *t = r->right;
4746
4747		  if ((p->left = s = t->right))
4748		    s->parent = p;
4749
4750		  t->right = p;
4751		  if ((r->right = s = t->left))
4752		    s->parent = r;
4753
4754		  t->left = r;
4755		  b = t->balance;
4756		  b2 = b < 0;
4757		  p->balance = b2;
4758		  b2 = -b2 - b;
4759		  r->balance = b2;
4760		  t->balance = 0;
4761		  s = p->parent;
4762		  p->parent = t;
4763		  r->parent = t;
4764
4765		  if ((t->parent = s))
4766		    {
4767		      if (s->left == p)
4768			s->left = t;
4769		      else
4770			s->right = t;
4771		    }
4772		  else
4773		    case_stack->data.case_stmt.case_list = t;
4774		}
4775	      break;
4776	    }
4777
4778	  else
4779	    {
4780	      /* p->balance == +1; growth of left side balances the node.  */
4781	      p->balance = 0;
4782	      break;
4783	    }
4784	}
4785      else
4786	/* r == p->right */
4787	{
4788	  int b;
4789
4790	  if (! (b = p->balance))
4791	    /* Growth propagation from right side.  */
4792	    p->balance++;
4793	  else if (b > 0)
4794	    {
4795	      if (r->balance > 0)
4796		{
4797		  /* L-Rotation */
4798
4799		  if ((p->right = s = r->left))
4800		    s->parent = p;
4801
4802		  r->left = p;
4803		  p->balance = 0;
4804		  r->balance = 0;
4805		  s = p->parent;
4806		  p->parent = r;
4807		  if ((r->parent = s))
4808		    {
4809		      if (s->left == p)
4810			s->left = r;
4811		      else
4812			s->right = r;
4813		    }
4814
4815		  else
4816		    case_stack->data.case_stmt.case_list = r;
4817		}
4818
4819	      else
4820		/* r->balance == -1 */
4821		{
4822		  /* RL-Rotation */
4823		  int b2;
4824		  struct case_node *t = r->left;
4825
4826		  if ((p->right = s = t->left))
4827		    s->parent = p;
4828
4829		  t->left = p;
4830
4831		  if ((r->left = s = t->right))
4832		    s->parent = r;
4833
4834		  t->right = r;
4835		  b = t->balance;
4836		  b2 = b < 0;
4837		  r->balance = b2;
4838		  b2 = -b2 - b;
4839		  p->balance = b2;
4840		  t->balance = 0;
4841		  s = p->parent;
4842		  p->parent = t;
4843		  r->parent = t;
4844
4845		  if ((t->parent = s))
4846		    {
4847		      if (s->left == p)
4848			s->left = t;
4849		      else
4850			s->right = t;
4851		    }
4852
4853		  else
4854		    case_stack->data.case_stmt.case_list = t;
4855		}
4856	      break;
4857	    }
4858	  else
4859	    {
4860	      /* p->balance == -1; growth of right side balances the node.  */
4861	      p->balance = 0;
4862	      break;
4863	    }
4864	}
4865
4866      r = p;
4867      p = p->parent;
4868    }
4869
4870  return 0;
4871}
4872
4873/* Returns the number of possible values of TYPE.
4874   Returns -1 if the number is unknown, variable, or if the number does not
4875   fit in a HOST_WIDE_INT.
4876   Sets *SPARSENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4877   do not increase monotonically (there may be duplicates);
4878   to 1 if the values increase monotonically, but not always by 1;
4879   otherwise sets it to 0.  */
4880
4881HOST_WIDE_INT
4882all_cases_count (tree type, int *sparseness)
4883{
4884  tree t;
4885  HOST_WIDE_INT count, minval, lastval;
4886
4887  *sparseness = 0;
4888
4889  switch (TREE_CODE (type))
4890    {
4891    case BOOLEAN_TYPE:
4892      count = 2;
4893      break;
4894
4895    case CHAR_TYPE:
4896      count = 1 << BITS_PER_UNIT;
4897      break;
4898
4899    default:
4900    case INTEGER_TYPE:
4901      if (TYPE_MAX_VALUE (type) != 0
4902	  && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4903				    TYPE_MIN_VALUE (type))))
4904	  && 0 != (t = fold (build (PLUS_EXPR, type, t,
4905				    convert (type, integer_zero_node))))
4906	  && host_integerp (t, 1))
4907	count = tree_low_cst (t, 1);
4908      else
4909	return -1;
4910      break;
4911
4912    case ENUMERAL_TYPE:
4913      /* Don't waste time with enumeral types with huge values.  */
4914      if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4915	  || TYPE_MAX_VALUE (type) == 0
4916	  || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4917	return -1;
4918
4919      lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
4920      count = 0;
4921
4922      for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4923	{
4924	  HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
4925
4926	  if (*sparseness == 2 || thisval <= lastval)
4927	    *sparseness = 2;
4928	  else if (thisval != minval + count)
4929	    *sparseness = 1;
4930
4931	  lastval = thisval;
4932	  count++;
4933	}
4934    }
4935
4936  return count;
4937}
4938
4939#define BITARRAY_TEST(ARRAY, INDEX) \
4940  ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4941			  & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4942#define BITARRAY_SET(ARRAY, INDEX) \
4943  ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4944			  |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4945
4946/* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4947   with the case values we have seen, assuming the case expression
4948   has the given TYPE.
4949   SPARSENESS is as determined by all_cases_count.
4950
4951   The time needed is proportional to COUNT, unless
4952   SPARSENESS is 2, in which case quadratic time is needed.  */
4953
4954void
4955mark_seen_cases (tree type, unsigned char *cases_seen, HOST_WIDE_INT count,
4956		 int sparseness)
4957{
4958  tree next_node_to_try = NULL_TREE;
4959  HOST_WIDE_INT next_node_offset = 0;
4960
4961  struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4962  tree val = make_node (INTEGER_CST);
4963
4964  TREE_TYPE (val) = type;
4965  if (! root)
4966    /* Do nothing.  */
4967    ;
4968  else if (sparseness == 2)
4969    {
4970      tree t;
4971      unsigned HOST_WIDE_INT xlo;
4972
4973      /* This less efficient loop is only needed to handle
4974	 duplicate case values (multiple enum constants
4975	 with the same value).  */
4976      TREE_TYPE (val) = TREE_TYPE (root->low);
4977      for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
4978	   t = TREE_CHAIN (t), xlo++)
4979	{
4980	  TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4981	  TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4982	  n = root;
4983	  do
4984	    {
4985	      /* Keep going past elements distinctly greater than VAL.  */
4986	      if (tree_int_cst_lt (val, n->low))
4987		n = n->left;
4988
4989	      /* or distinctly less than VAL.  */
4990	      else if (tree_int_cst_lt (n->high, val))
4991		n = n->right;
4992
4993	      else
4994		{
4995		  /* We have found a matching range.  */
4996		  BITARRAY_SET (cases_seen, xlo);
4997		  break;
4998		}
4999	    }
5000	  while (n);
5001	}
5002    }
5003  else
5004    {
5005      if (root->left)
5006	case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
5007
5008      for (n = root; n; n = n->right)
5009	{
5010	  TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5011	  TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5012	  while (! tree_int_cst_lt (n->high, val))
5013	    {
5014	      /* Calculate (into xlo) the "offset" of the integer (val).
5015		 The element with lowest value has offset 0, the next smallest
5016		 element has offset 1, etc.  */
5017
5018	      unsigned HOST_WIDE_INT xlo;
5019	      HOST_WIDE_INT xhi;
5020	      tree t;
5021
5022	      if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5023		{
5024		  /* The TYPE_VALUES will be in increasing order, so
5025		     starting searching where we last ended.  */
5026		  t = next_node_to_try;
5027		  xlo = next_node_offset;
5028		  xhi = 0;
5029		  for (;;)
5030		    {
5031		      if (t == NULL_TREE)
5032			{
5033			  t = TYPE_VALUES (type);
5034			  xlo = 0;
5035			}
5036		      if (tree_int_cst_equal (val, TREE_VALUE (t)))
5037			{
5038			  next_node_to_try = TREE_CHAIN (t);
5039			  next_node_offset = xlo + 1;
5040			  break;
5041			}
5042		      xlo++;
5043		      t = TREE_CHAIN (t);
5044		      if (t == next_node_to_try)
5045			{
5046			  xlo = -1;
5047			  break;
5048			}
5049		    }
5050		}
5051	      else
5052		{
5053		  t = TYPE_MIN_VALUE (type);
5054		  if (t)
5055		    neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5056				&xlo, &xhi);
5057		  else
5058		    xlo = xhi = 0;
5059		  add_double (xlo, xhi,
5060			      TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5061			      &xlo, &xhi);
5062		}
5063
5064	      if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
5065		BITARRAY_SET (cases_seen, xlo);
5066
5067	      add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5068			  1, 0,
5069			  &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5070	    }
5071	}
5072    }
5073}
5074
5075/* Given a switch statement with an expression that is an enumeration
5076   type, warn if any of the enumeration type's literals are not
5077   covered by the case expressions of the switch.  Also, warn if there
5078   are any extra switch cases that are *not* elements of the
5079   enumerated type.
5080
5081   Historical note:
5082
5083   At one stage this function would: ``If all enumeration literals
5084   were covered by the case expressions, turn one of the expressions
5085   into the default expression since it should not be possible to fall
5086   through such a switch.''
5087
5088   That code has since been removed as: ``This optimization is
5089   disabled because it causes valid programs to fail.  ANSI C does not
5090   guarantee that an expression with enum type will have a value that
5091   is the same as one of the enumeration literals.''  */
5092
5093void
5094check_for_full_enumeration_handling (tree type)
5095{
5096  struct case_node *n;
5097  tree chain;
5098
5099  /* True iff the selector type is a numbered set mode.  */
5100  int sparseness = 0;
5101
5102  /* The number of possible selector values.  */
5103  HOST_WIDE_INT size;
5104
5105  /* For each possible selector value. a one iff it has been matched
5106     by a case value alternative.  */
5107  unsigned char *cases_seen;
5108
5109  /* The allocated size of cases_seen, in chars.  */
5110  HOST_WIDE_INT bytes_needed;
5111
5112  size = all_cases_count (type, &sparseness);
5113  bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5114
5115  if (size > 0 && size < 600000
5116      /* We deliberately use calloc here, not cmalloc, so that we can suppress
5117	 this optimization if we don't have enough memory rather than
5118	 aborting, as xmalloc would do.  */
5119      && (cases_seen = really_call_calloc (bytes_needed, 1)) != NULL)
5120    {
5121      HOST_WIDE_INT i;
5122      tree v = TYPE_VALUES (type);
5123
5124      /* The time complexity of this code is normally O(N), where
5125	 N being the number of members in the enumerated type.
5126	 However, if type is an ENUMERAL_TYPE whose values do not
5127	 increase monotonically, O(N*log(N)) time may be needed.  */
5128
5129      mark_seen_cases (type, cases_seen, size, sparseness);
5130
5131      for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5132	if (BITARRAY_TEST (cases_seen, i) == 0)
5133	  warning ("enumeration value `%s' not handled in switch",
5134		   IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5135
5136      free (cases_seen);
5137    }
5138
5139  /* Now we go the other way around; we warn if there are case
5140     expressions that don't correspond to enumerators.  This can
5141     occur since C and C++ don't enforce type-checking of
5142     assignments to enumeration variables.  */
5143
5144  if (case_stack->data.case_stmt.case_list
5145      && case_stack->data.case_stmt.case_list->left)
5146    case_stack->data.case_stmt.case_list
5147      = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5148  for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5149    {
5150      for (chain = TYPE_VALUES (type);
5151	   chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5152	   chain = TREE_CHAIN (chain))
5153	;
5154
5155      if (!chain)
5156	{
5157	  if (TYPE_NAME (type) == 0)
5158	    warning ("case value `%ld' not in enumerated type",
5159		     (long) TREE_INT_CST_LOW (n->low));
5160	  else
5161	    warning ("case value `%ld' not in enumerated type `%s'",
5162		     (long) TREE_INT_CST_LOW (n->low),
5163		     IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5164					  == IDENTIFIER_NODE)
5165					 ? TYPE_NAME (type)
5166					 : DECL_NAME (TYPE_NAME (type))));
5167	}
5168      if (!tree_int_cst_equal (n->low, n->high))
5169	{
5170	  for (chain = TYPE_VALUES (type);
5171	       chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5172	       chain = TREE_CHAIN (chain))
5173	    ;
5174
5175	  if (!chain)
5176	    {
5177	      if (TYPE_NAME (type) == 0)
5178		warning ("case value `%ld' not in enumerated type",
5179			 (long) TREE_INT_CST_LOW (n->high));
5180	      else
5181		warning ("case value `%ld' not in enumerated type `%s'",
5182			 (long) TREE_INT_CST_LOW (n->high),
5183			 IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5184					      == IDENTIFIER_NODE)
5185					     ? TYPE_NAME (type)
5186					     : DECL_NAME (TYPE_NAME (type))));
5187	    }
5188	}
5189    }
5190}
5191
5192
5193/* Maximum number of case bit tests.  */
5194#define MAX_CASE_BIT_TESTS  3
5195
5196/* By default, enable case bit tests on targets with ashlsi3.  */
5197#ifndef CASE_USE_BIT_TESTS
5198#define CASE_USE_BIT_TESTS  (ashl_optab->handlers[word_mode].insn_code \
5199			     != CODE_FOR_nothing)
5200#endif
5201
5202
5203/* A case_bit_test represents a set of case nodes that may be
5204   selected from using a bit-wise comparison.  HI and LO hold
5205   the integer to be tested against, LABEL contains the label
5206   to jump to upon success and BITS counts the number of case
5207   nodes handled by this test, typically the number of bits
5208   set in HI:LO.  */
5209
5210struct case_bit_test
5211{
5212  HOST_WIDE_INT hi;
5213  HOST_WIDE_INT lo;
5214  rtx label;
5215  int bits;
5216};
5217
5218/* Determine whether "1 << x" is relatively cheap in word_mode.  */
5219
5220static
5221bool lshift_cheap_p (void)
5222{
5223  static bool init = false;
5224  static bool cheap = true;
5225
5226  if (!init)
5227    {
5228      rtx reg = gen_rtx_REG (word_mode, 10000);
5229      int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
5230      cheap = cost < COSTS_N_INSNS (3);
5231      init = true;
5232    }
5233
5234  return cheap;
5235}
5236
5237/* Comparison function for qsort to order bit tests by decreasing
5238   number of case nodes, i.e. the node with the most cases gets
5239   tested first.  */
5240
5241static
5242int case_bit_test_cmp (const void *p1, const void *p2)
5243{
5244  const struct case_bit_test *d1 = p1;
5245  const struct case_bit_test *d2 = p2;
5246
5247  return d2->bits - d1->bits;
5248}
5249
5250/*  Expand a switch statement by a short sequence of bit-wise
5251    comparisons.  "switch(x)" is effectively converted into
5252    "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
5253    integer constants.
5254
5255    INDEX_EXPR is the value being switched on, which is of
5256    type INDEX_TYPE.  MINVAL is the lowest case value of in
5257    the case nodes, of INDEX_TYPE type, and RANGE is highest
5258    value minus MINVAL, also of type INDEX_TYPE.  NODES is
5259    the set of case nodes, and DEFAULT_LABEL is the label to
5260    branch to should none of the cases match.
5261
5262    There *MUST* be MAX_CASE_BIT_TESTS or less unique case
5263    node targets.  */
5264
5265static void
5266emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
5267		     tree range, case_node_ptr nodes, rtx default_label)
5268{
5269  struct case_bit_test test[MAX_CASE_BIT_TESTS];
5270  enum machine_mode mode;
5271  rtx expr, index, label;
5272  unsigned int i,j,lo,hi;
5273  struct case_node *n;
5274  unsigned int count;
5275
5276  count = 0;
5277  for (n = nodes; n; n = n->right)
5278    {
5279      label = label_rtx (n->code_label);
5280      for (i = 0; i < count; i++)
5281	if (same_case_target_p (label, test[i].label))
5282	  break;
5283
5284      if (i == count)
5285	{
5286	  if (count >= MAX_CASE_BIT_TESTS)
5287	    abort ();
5288          test[i].hi = 0;
5289          test[i].lo = 0;
5290	  test[i].label = label;
5291	  test[i].bits = 1;
5292	  count++;
5293	}
5294      else
5295        test[i].bits++;
5296
5297      lo = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5298				      n->low, minval)), 1);
5299      hi = tree_low_cst (fold (build (MINUS_EXPR, index_type,
5300				      n->high, minval)), 1);
5301      for (j = lo; j <= hi; j++)
5302        if (j >= HOST_BITS_PER_WIDE_INT)
5303	  test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
5304	else
5305	  test[i].lo |= (HOST_WIDE_INT) 1 << j;
5306    }
5307
5308  qsort (test, count, sizeof(*test), case_bit_test_cmp);
5309
5310  index_expr = fold (build (MINUS_EXPR, index_type,
5311			    convert (index_type, index_expr),
5312			    convert (index_type, minval)));
5313  index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5314  emit_queue ();
5315  index = protect_from_queue (index, 0);
5316  do_pending_stack_adjust ();
5317
5318  mode = TYPE_MODE (index_type);
5319  expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
5320  emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
5321			   default_label);
5322
5323  index = convert_to_mode (word_mode, index, 0);
5324  index = expand_binop (word_mode, ashl_optab, const1_rtx,
5325			index, NULL_RTX, 1, OPTAB_WIDEN);
5326
5327  for (i = 0; i < count; i++)
5328    {
5329      expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
5330      expr = expand_binop (word_mode, and_optab, index, expr,
5331			   NULL_RTX, 1, OPTAB_WIDEN);
5332      emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
5333			       word_mode, 1, test[i].label);
5334    }
5335
5336  emit_jump (default_label);
5337}
5338
5339/* Terminate a case (Pascal) or switch (C) statement
5340   in which ORIG_INDEX is the expression to be tested.
5341   If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
5342   type as given in the source before any compiler conversions.
5343   Generate the code to test it and jump to the right place.  */
5344
5345void
5346expand_end_case_type (tree orig_index, tree orig_type)
5347{
5348  tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
5349  rtx default_label = 0;
5350  struct case_node *n, *m;
5351  unsigned int count, uniq;
5352  rtx index;
5353  rtx table_label;
5354  int ncases;
5355  rtx *labelvec;
5356  int i;
5357  rtx before_case, end, lab;
5358  struct nesting *thiscase = case_stack;
5359  tree index_expr, index_type;
5360  bool exit_done = false;
5361  int unsignedp;
5362
5363  /* Don't crash due to previous errors.  */
5364  if (thiscase == NULL)
5365    return;
5366
5367  index_expr = thiscase->data.case_stmt.index_expr;
5368  index_type = TREE_TYPE (index_expr);
5369  unsignedp = TREE_UNSIGNED (index_type);
5370  if (orig_type == NULL)
5371    orig_type = TREE_TYPE (orig_index);
5372
5373  do_pending_stack_adjust ();
5374
5375  /* This might get a spurious warning in the presence of a syntax error;
5376     it could be fixed by moving the call to check_seenlabel after the
5377     check for error_mark_node, and copying the code of check_seenlabel that
5378     deals with case_stack->data.case_stmt.line_number_status /
5379     restore_line_number_status in front of the call to end_cleanup_deferral;
5380     However, this might miss some useful warnings in the presence of
5381     non-syntax errors.  */
5382  check_seenlabel ();
5383
5384  /* An ERROR_MARK occurs for various reasons including invalid data type.  */
5385  if (index_type != error_mark_node)
5386    {
5387      /* If the switch expression was an enumerated type, check that
5388	 exactly all enumeration literals are covered by the cases.
5389	 The check is made when -Wswitch was specified and there is no
5390	 default case, or when -Wswitch-enum was specified.  */
5391      if (((warn_switch && !thiscase->data.case_stmt.default_label)
5392	   || warn_switch_enum)
5393	  && TREE_CODE (orig_type) == ENUMERAL_TYPE
5394	  && TREE_CODE (index_expr) != INTEGER_CST)
5395	check_for_full_enumeration_handling (orig_type);
5396
5397      if (warn_switch_default && !thiscase->data.case_stmt.default_label)
5398	warning ("switch missing default case");
5399
5400      /* If we don't have a default-label, create one here,
5401	 after the body of the switch.  */
5402      if (thiscase->data.case_stmt.default_label == 0)
5403	{
5404	  thiscase->data.case_stmt.default_label
5405	    = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5406	  /* Share the exit label if possible.  */
5407          if (thiscase->exit_label)
5408	    {
5409	      SET_DECL_RTL (thiscase->data.case_stmt.default_label,
5410			    thiscase->exit_label);
5411	      exit_done = true;
5412	    }
5413	  expand_label (thiscase->data.case_stmt.default_label);
5414	}
5415      default_label = label_rtx (thiscase->data.case_stmt.default_label);
5416
5417      before_case = get_last_insn ();
5418
5419      if (thiscase->data.case_stmt.case_list
5420	  && thiscase->data.case_stmt.case_list->left)
5421	thiscase->data.case_stmt.case_list
5422	  = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5423
5424      /* Simplify the case-list before we count it.  */
5425      group_case_nodes (thiscase->data.case_stmt.case_list);
5426      strip_default_case_nodes (&thiscase->data.case_stmt.case_list,
5427				default_label);
5428
5429      /* Get upper and lower bounds of case values.
5430	 Also convert all the case values to the index expr's data type.  */
5431
5432      uniq = 0;
5433      count = 0;
5434      for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5435	{
5436	  /* Check low and high label values are integers.  */
5437	  if (TREE_CODE (n->low) != INTEGER_CST)
5438	    abort ();
5439	  if (TREE_CODE (n->high) != INTEGER_CST)
5440	    abort ();
5441
5442	  n->low = convert (index_type, n->low);
5443	  n->high = convert (index_type, n->high);
5444
5445	  /* Count the elements and track the largest and smallest
5446	     of them (treating them as signed even if they are not).  */
5447	  if (count++ == 0)
5448	    {
5449	      minval = n->low;
5450	      maxval = n->high;
5451	    }
5452	  else
5453	    {
5454	      if (INT_CST_LT (n->low, minval))
5455		minval = n->low;
5456	      if (INT_CST_LT (maxval, n->high))
5457		maxval = n->high;
5458	    }
5459	  /* A range counts double, since it requires two compares.  */
5460	  if (! tree_int_cst_equal (n->low, n->high))
5461	    count++;
5462
5463	  /* Count the number of unique case node targets.  */
5464          uniq++;
5465	  lab = label_rtx (n->code_label);
5466          for (m = thiscase->data.case_stmt.case_list; m != n; m = m->right)
5467            if (same_case_target_p (label_rtx (m->code_label), lab))
5468              {
5469                uniq--;
5470                break;
5471              }
5472	}
5473
5474      /* Compute span of values.  */
5475      if (count != 0)
5476	range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5477
5478      end_cleanup_deferral ();
5479
5480      if (count == 0)
5481	{
5482	  expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5483	  emit_queue ();
5484	  emit_jump (default_label);
5485	}
5486
5487      /* Try implementing this switch statement by a short sequence of
5488	 bit-wise comparisons.  However, we let the binary-tree case
5489	 below handle constant index expressions.  */
5490      else if (CASE_USE_BIT_TESTS
5491	       && ! TREE_CONSTANT (index_expr)
5492	       && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
5493	       && compare_tree_int (range, 0) > 0
5494	       && lshift_cheap_p ()
5495	       && ((uniq == 1 && count >= 3)
5496		   || (uniq == 2 && count >= 5)
5497		   || (uniq == 3 && count >= 6)))
5498	{
5499	  /* Optimize the case where all the case values fit in a
5500	     word without having to subtract MINVAL.  In this case,
5501	     we can optimize away the subtraction.  */
5502	  if (compare_tree_int (minval, 0) > 0
5503	      && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
5504	    {
5505	      minval = integer_zero_node;
5506	      range = maxval;
5507	    }
5508	  emit_case_bit_tests (index_type, index_expr, minval, range,
5509			       thiscase->data.case_stmt.case_list,
5510			       default_label);
5511	}
5512
5513      /* If range of values is much bigger than number of values,
5514	 make a sequence of conditional branches instead of a dispatch.
5515	 If the switch-index is a constant, do it this way
5516	 because we can optimize it.  */
5517
5518      else if (count < case_values_threshold ()
5519	       || compare_tree_int (range,
5520				    (optimize_size ? 3 : 10) * count) > 0
5521	       /* RANGE may be signed, and really large ranges will show up
5522		  as negative numbers.  */
5523	       || compare_tree_int (range, 0) < 0
5524#ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5525	       || flag_pic
5526#endif
5527	       || TREE_CONSTANT (index_expr))
5528	{
5529	  index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5530
5531	  /* If the index is a short or char that we do not have
5532	     an insn to handle comparisons directly, convert it to
5533	     a full integer now, rather than letting each comparison
5534	     generate the conversion.  */
5535
5536	  if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5537	      && ! have_insn_for (COMPARE, GET_MODE (index)))
5538	    {
5539	      enum machine_mode wider_mode;
5540	      for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5541		   wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5542		if (have_insn_for (COMPARE, wider_mode))
5543		  {
5544		    index = convert_to_mode (wider_mode, index, unsignedp);
5545		    break;
5546		  }
5547	    }
5548
5549	  emit_queue ();
5550	  do_pending_stack_adjust ();
5551
5552	  index = protect_from_queue (index, 0);
5553	  if (GET_CODE (index) == MEM)
5554	    index = copy_to_reg (index);
5555	  if (GET_CODE (index) == CONST_INT
5556	      || TREE_CODE (index_expr) == INTEGER_CST)
5557	    {
5558	      /* Make a tree node with the proper constant value
5559		 if we don't already have one.  */
5560	      if (TREE_CODE (index_expr) != INTEGER_CST)
5561		{
5562		  index_expr
5563		    = build_int_2 (INTVAL (index),
5564				   unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5565		  index_expr = convert (index_type, index_expr);
5566		}
5567
5568	      /* For constant index expressions we need only
5569		 issue an unconditional branch to the appropriate
5570		 target code.  The job of removing any unreachable
5571		 code is left to the optimization phase if the
5572		 "-O" option is specified.  */
5573	      for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5574		if (! tree_int_cst_lt (index_expr, n->low)
5575		    && ! tree_int_cst_lt (n->high, index_expr))
5576		  break;
5577
5578	      if (n)
5579		emit_jump (label_rtx (n->code_label));
5580	      else
5581		emit_jump (default_label);
5582	    }
5583	  else
5584	    {
5585	      /* If the index expression is not constant we generate
5586		 a binary decision tree to select the appropriate
5587		 target code.  This is done as follows:
5588
5589		 The list of cases is rearranged into a binary tree,
5590		 nearly optimal assuming equal probability for each case.
5591
5592		 The tree is transformed into RTL, eliminating
5593		 redundant test conditions at the same time.
5594
5595		 If program flow could reach the end of the
5596		 decision tree an unconditional jump to the
5597		 default code is emitted.  */
5598
5599	      use_cost_table
5600		= (TREE_CODE (orig_type) != ENUMERAL_TYPE
5601		   && estimate_case_costs (thiscase->data.case_stmt.case_list));
5602	      balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
5603	      emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5604			       default_label, index_type);
5605	      emit_jump_if_reachable (default_label);
5606	    }
5607	}
5608      else
5609	{
5610	  table_label = gen_label_rtx ();
5611	  if (! try_casesi (index_type, index_expr, minval, range,
5612			    table_label, default_label))
5613	    {
5614	      index_type = thiscase->data.case_stmt.nominal_type;
5615
5616	      /* Index jumptables from zero for suitable values of
5617                 minval to avoid a subtraction.  */
5618	      if (! optimize_size
5619		  && compare_tree_int (minval, 0) > 0
5620		  && compare_tree_int (minval, 3) < 0)
5621		{
5622		  minval = integer_zero_node;
5623		  range = maxval;
5624		}
5625
5626	      if (! try_tablejump (index_type, index_expr, minval, range,
5627				   table_label, default_label))
5628		abort ();
5629	    }
5630
5631	  /* Get table of labels to jump to, in order of case index.  */
5632
5633	  ncases = tree_low_cst (range, 0) + 1;
5634	  labelvec = alloca (ncases * sizeof (rtx));
5635	  memset (labelvec, 0, ncases * sizeof (rtx));
5636
5637	  for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5638	    {
5639	      /* Compute the low and high bounds relative to the minimum
5640		 value since that should fit in a HOST_WIDE_INT while the
5641		 actual values may not.  */
5642	      HOST_WIDE_INT i_low
5643		= tree_low_cst (fold (build (MINUS_EXPR, index_type,
5644					     n->low, minval)), 1);
5645	      HOST_WIDE_INT i_high
5646		= tree_low_cst (fold (build (MINUS_EXPR, index_type,
5647					     n->high, minval)), 1);
5648	      HOST_WIDE_INT i;
5649
5650	      for (i = i_low; i <= i_high; i ++)
5651		labelvec[i]
5652		  = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5653	    }
5654
5655	  /* Fill in the gaps with the default.  */
5656	  for (i = 0; i < ncases; i++)
5657	    if (labelvec[i] == 0)
5658	      labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5659
5660	  /* Output the table.  */
5661	  emit_label (table_label);
5662
5663	  if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5664	    emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5665						   gen_rtx_LABEL_REF (Pmode, table_label),
5666						   gen_rtvec_v (ncases, labelvec),
5667						   const0_rtx, const0_rtx));
5668	  else
5669	    emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5670					      gen_rtvec_v (ncases, labelvec)));
5671
5672	  /* If the case insn drops through the table,
5673	     after the table we must jump to the default-label.
5674	     Otherwise record no drop-through after the table.  */
5675#ifdef CASE_DROPS_THROUGH
5676	  emit_jump (default_label);
5677#else
5678	  emit_barrier ();
5679#endif
5680	}
5681
5682      before_case = NEXT_INSN (before_case);
5683      end = get_last_insn ();
5684      if (squeeze_notes (&before_case, &end))
5685	abort ();
5686      reorder_insns (before_case, end,
5687		     thiscase->data.case_stmt.start);
5688    }
5689  else
5690    end_cleanup_deferral ();
5691
5692  if (thiscase->exit_label && !exit_done)
5693    emit_label (thiscase->exit_label);
5694
5695  POPSTACK (case_stack);
5696
5697  free_temp_slots ();
5698}
5699
5700/* Convert the tree NODE into a list linked by the right field, with the left
5701   field zeroed.  RIGHT is used for recursion; it is a list to be placed
5702   rightmost in the resulting list.  */
5703
5704static struct case_node *
5705case_tree2list (struct case_node *node, struct case_node *right)
5706{
5707  struct case_node *left;
5708
5709  if (node->right)
5710    right = case_tree2list (node->right, right);
5711
5712  node->right = right;
5713  if ((left = node->left))
5714    {
5715      node->left = 0;
5716      return case_tree2list (left, node);
5717    }
5718
5719  return node;
5720}
5721
5722/* Generate code to jump to LABEL if OP1 and OP2 are equal.  */
5723
5724static void
5725do_jump_if_equal (rtx op1, rtx op2, rtx label, int unsignedp)
5726{
5727  if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
5728    {
5729      if (op1 == op2)
5730	emit_jump (label);
5731    }
5732  else
5733    emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
5734			     (GET_MODE (op1) == VOIDmode
5735			     ? GET_MODE (op2) : GET_MODE (op1)),
5736			     unsignedp, label);
5737}
5738
5739/* Not all case values are encountered equally.  This function
5740   uses a heuristic to weight case labels, in cases where that
5741   looks like a reasonable thing to do.
5742
5743   Right now, all we try to guess is text, and we establish the
5744   following weights:
5745
5746	chars above space:	16
5747	digits:			16
5748	default:		12
5749	space, punct:		8
5750	tab:			4
5751	newline:		2
5752	other "\" chars:	1
5753	remaining chars:	0
5754
5755   If we find any cases in the switch that are not either -1 or in the range
5756   of valid ASCII characters, or are control characters other than those
5757   commonly used with "\", don't treat this switch scanning text.
5758
5759   Return 1 if these nodes are suitable for cost estimation, otherwise
5760   return 0.  */
5761
5762static int
5763estimate_case_costs (case_node_ptr node)
5764{
5765  tree min_ascii = integer_minus_one_node;
5766  tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5767  case_node_ptr n;
5768  int i;
5769
5770  /* If we haven't already made the cost table, make it now.  Note that the
5771     lower bound of the table is -1, not zero.  */
5772
5773  if (! cost_table_initialized)
5774    {
5775      cost_table_initialized = 1;
5776
5777      for (i = 0; i < 128; i++)
5778	{
5779	  if (ISALNUM (i))
5780	    COST_TABLE (i) = 16;
5781	  else if (ISPUNCT (i))
5782	    COST_TABLE (i) = 8;
5783	  else if (ISCNTRL (i))
5784	    COST_TABLE (i) = -1;
5785	}
5786
5787      COST_TABLE (' ') = 8;
5788      COST_TABLE ('\t') = 4;
5789      COST_TABLE ('\0') = 4;
5790      COST_TABLE ('\n') = 2;
5791      COST_TABLE ('\f') = 1;
5792      COST_TABLE ('\v') = 1;
5793      COST_TABLE ('\b') = 1;
5794    }
5795
5796  /* See if all the case expressions look like text.  It is text if the
5797     constant is >= -1 and the highest constant is <= 127.  Do all comparisons
5798     as signed arithmetic since we don't want to ever access cost_table with a
5799     value less than -1.  Also check that none of the constants in a range
5800     are strange control characters.  */
5801
5802  for (n = node; n; n = n->right)
5803    {
5804      if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5805	return 0;
5806
5807      for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5808	   i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5809	if (COST_TABLE (i) < 0)
5810	  return 0;
5811    }
5812
5813  /* All interesting values are within the range of interesting
5814     ASCII characters.  */
5815  return 1;
5816}
5817
5818/* Determine whether two case labels branch to the same target.  */
5819
5820static bool
5821same_case_target_p (rtx l1, rtx l2)
5822{
5823  rtx i1, i2;
5824
5825  if (l1 == l2)
5826    return true;
5827
5828  i1 = next_real_insn (l1);
5829  i2 = next_real_insn (l2);
5830  if (i1 == i2)
5831    return true;
5832
5833  if (i1 && simplejump_p (i1))
5834    {
5835      l1 = XEXP (SET_SRC (PATTERN (i1)), 0);
5836    }
5837
5838  if (i2 && simplejump_p (i2))
5839    {
5840      l2 = XEXP (SET_SRC (PATTERN (i2)), 0);
5841    }
5842  return l1 == l2;
5843}
5844
5845/* Delete nodes that branch to the default label from a list of
5846   case nodes.  Eg. case 5: default: becomes just default:  */
5847
5848static void
5849strip_default_case_nodes (case_node_ptr *prev, rtx deflab)
5850{
5851  case_node_ptr ptr;
5852
5853  while (*prev)
5854    {
5855      ptr = *prev;
5856      if (same_case_target_p (label_rtx (ptr->code_label), deflab))
5857	*prev = ptr->right;
5858      else
5859	prev = &ptr->right;
5860    }
5861}
5862
5863/* Scan an ordered list of case nodes
5864   combining those with consecutive values or ranges.
5865
5866   Eg. three separate entries 1: 2: 3: become one entry 1..3:  */
5867
5868static void
5869group_case_nodes (case_node_ptr head)
5870{
5871  case_node_ptr node = head;
5872
5873  while (node)
5874    {
5875      rtx lab = label_rtx (node->code_label);
5876      case_node_ptr np = node;
5877
5878      /* Try to group the successors of NODE with NODE.  */
5879      while (((np = np->right) != 0)
5880	     /* Do they jump to the same place?  */
5881	     && same_case_target_p (label_rtx (np->code_label), lab)
5882	     /* Are their ranges consecutive?  */
5883	     && tree_int_cst_equal (np->low,
5884				    fold (build (PLUS_EXPR,
5885						 TREE_TYPE (node->high),
5886						 node->high,
5887						 integer_one_node)))
5888	     /* An overflow is not consecutive.  */
5889	     && tree_int_cst_lt (node->high,
5890				 fold (build (PLUS_EXPR,
5891					      TREE_TYPE (node->high),
5892					      node->high,
5893					      integer_one_node))))
5894	{
5895	  node->high = np->high;
5896	}
5897      /* NP is the first node after NODE which can't be grouped with it.
5898	 Delete the nodes in between, and move on to that node.  */
5899      node->right = np;
5900      node = np;
5901    }
5902}
5903
5904/* Take an ordered list of case nodes
5905   and transform them into a near optimal binary tree,
5906   on the assumption that any target code selection value is as
5907   likely as any other.
5908
5909   The transformation is performed by splitting the ordered
5910   list into two equal sections plus a pivot.  The parts are
5911   then attached to the pivot as left and right branches.  Each
5912   branch is then transformed recursively.  */
5913
5914static void
5915balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
5916{
5917  case_node_ptr np;
5918
5919  np = *head;
5920  if (np)
5921    {
5922      int cost = 0;
5923      int i = 0;
5924      int ranges = 0;
5925      case_node_ptr *npp;
5926      case_node_ptr left;
5927
5928      /* Count the number of entries on branch.  Also count the ranges.  */
5929
5930      while (np)
5931	{
5932	  if (!tree_int_cst_equal (np->low, np->high))
5933	    {
5934	      ranges++;
5935	      if (use_cost_table)
5936		cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5937	    }
5938
5939	  if (use_cost_table)
5940	    cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5941
5942	  i++;
5943	  np = np->right;
5944	}
5945
5946      if (i > 2)
5947	{
5948	  /* Split this list if it is long enough for that to help.  */
5949	  npp = head;
5950	  left = *npp;
5951	  if (use_cost_table)
5952	    {
5953	      /* Find the place in the list that bisects the list's total cost,
5954		 Here I gets half the total cost.  */
5955	      int n_moved = 0;
5956	      i = (cost + 1) / 2;
5957	      while (1)
5958		{
5959		  /* Skip nodes while their cost does not reach that amount.  */
5960		  if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5961		    i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5962		  i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5963		  if (i <= 0)
5964		    break;
5965		  npp = &(*npp)->right;
5966		  n_moved += 1;
5967		}
5968	      if (n_moved == 0)
5969		{
5970		  /* Leave this branch lopsided, but optimize left-hand
5971		     side and fill in `parent' fields for right-hand side.  */
5972		  np = *head;
5973		  np->parent = parent;
5974		  balance_case_nodes (&np->left, np);
5975		  for (; np->right; np = np->right)
5976		    np->right->parent = np;
5977		  return;
5978		}
5979	    }
5980	  /* If there are just three nodes, split at the middle one.  */
5981	  else if (i == 3)
5982	    npp = &(*npp)->right;
5983	  else
5984	    {
5985	      /* Find the place in the list that bisects the list's total cost,
5986		 where ranges count as 2.
5987		 Here I gets half the total cost.  */
5988	      i = (i + ranges + 1) / 2;
5989	      while (1)
5990		{
5991		  /* Skip nodes while their cost does not reach that amount.  */
5992		  if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5993		    i--;
5994		  i--;
5995		  if (i <= 0)
5996		    break;
5997		  npp = &(*npp)->right;
5998		}
5999	    }
6000	  *head = np = *npp;
6001	  *npp = 0;
6002	  np->parent = parent;
6003	  np->left = left;
6004
6005	  /* Optimize each of the two split parts.  */
6006	  balance_case_nodes (&np->left, np);
6007	  balance_case_nodes (&np->right, np);
6008	}
6009      else
6010	{
6011	  /* Else leave this branch as one level,
6012	     but fill in `parent' fields.  */
6013	  np = *head;
6014	  np->parent = parent;
6015	  for (; np->right; np = np->right)
6016	    np->right->parent = np;
6017	}
6018    }
6019}
6020
6021/* Search the parent sections of the case node tree
6022   to see if a test for the lower bound of NODE would be redundant.
6023   INDEX_TYPE is the type of the index expression.
6024
6025   The instructions to generate the case decision tree are
6026   output in the same order as nodes are processed so it is
6027   known that if a parent node checks the range of the current
6028   node minus one that the current node is bounded at its lower
6029   span.  Thus the test would be redundant.  */
6030
6031static int
6032node_has_low_bound (case_node_ptr node, tree index_type)
6033{
6034  tree low_minus_one;
6035  case_node_ptr pnode;
6036
6037  /* If the lower bound of this node is the lowest value in the index type,
6038     we need not test it.  */
6039
6040  if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
6041    return 1;
6042
6043  /* If this node has a left branch, the value at the left must be less
6044     than that at this node, so it cannot be bounded at the bottom and
6045     we need not bother testing any further.  */
6046
6047  if (node->left)
6048    return 0;
6049
6050  low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
6051			       node->low, integer_one_node));
6052
6053  /* If the subtraction above overflowed, we can't verify anything.
6054     Otherwise, look for a parent that tests our value - 1.  */
6055
6056  if (! tree_int_cst_lt (low_minus_one, node->low))
6057    return 0;
6058
6059  for (pnode = node->parent; pnode; pnode = pnode->parent)
6060    if (tree_int_cst_equal (low_minus_one, pnode->high))
6061      return 1;
6062
6063  return 0;
6064}
6065
6066/* Search the parent sections of the case node tree
6067   to see if a test for the upper bound of NODE would be redundant.
6068   INDEX_TYPE is the type of the index expression.
6069
6070   The instructions to generate the case decision tree are
6071   output in the same order as nodes are processed so it is
6072   known that if a parent node checks the range of the current
6073   node plus one that the current node is bounded at its upper
6074   span.  Thus the test would be redundant.  */
6075
6076static int
6077node_has_high_bound (case_node_ptr node, tree index_type)
6078{
6079  tree high_plus_one;
6080  case_node_ptr pnode;
6081
6082  /* If there is no upper bound, obviously no test is needed.  */
6083
6084  if (TYPE_MAX_VALUE (index_type) == NULL)
6085    return 1;
6086
6087  /* If the upper bound of this node is the highest value in the type
6088     of the index expression, we need not test against it.  */
6089
6090  if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
6091    return 1;
6092
6093  /* If this node has a right branch, the value at the right must be greater
6094     than that at this node, so it cannot be bounded at the top and
6095     we need not bother testing any further.  */
6096
6097  if (node->right)
6098    return 0;
6099
6100  high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
6101			       node->high, integer_one_node));
6102
6103  /* If the addition above overflowed, we can't verify anything.
6104     Otherwise, look for a parent that tests our value + 1.  */
6105
6106  if (! tree_int_cst_lt (node->high, high_plus_one))
6107    return 0;
6108
6109  for (pnode = node->parent; pnode; pnode = pnode->parent)
6110    if (tree_int_cst_equal (high_plus_one, pnode->low))
6111      return 1;
6112
6113  return 0;
6114}
6115
6116/* Search the parent sections of the
6117   case node tree to see if both tests for the upper and lower
6118   bounds of NODE would be redundant.  */
6119
6120static int
6121node_is_bounded (case_node_ptr node, tree index_type)
6122{
6123  return (node_has_low_bound (node, index_type)
6124	  && node_has_high_bound (node, index_type));
6125}
6126
6127/*  Emit an unconditional jump to LABEL unless it would be dead code.  */
6128
6129static void
6130emit_jump_if_reachable (rtx label)
6131{
6132  if (GET_CODE (get_last_insn ()) != BARRIER)
6133    emit_jump (label);
6134}
6135
6136/* Emit step-by-step code to select a case for the value of INDEX.
6137   The thus generated decision tree follows the form of the
6138   case-node binary tree NODE, whose nodes represent test conditions.
6139   INDEX_TYPE is the type of the index of the switch.
6140
6141   Care is taken to prune redundant tests from the decision tree
6142   by detecting any boundary conditions already checked by
6143   emitted rtx.  (See node_has_high_bound, node_has_low_bound
6144   and node_is_bounded, above.)
6145
6146   Where the test conditions can be shown to be redundant we emit
6147   an unconditional jump to the target code.  As a further
6148   optimization, the subordinates of a tree node are examined to
6149   check for bounded nodes.  In this case conditional and/or
6150   unconditional jumps as a result of the boundary check for the
6151   current node are arranged to target the subordinates associated
6152   code for out of bound conditions on the current node.
6153
6154   We can assume that when control reaches the code generated here,
6155   the index value has already been compared with the parents
6156   of this node, and determined to be on the same side of each parent
6157   as this node is.  Thus, if this node tests for the value 51,
6158   and a parent tested for 52, we don't need to consider
6159   the possibility of a value greater than 51.  If another parent
6160   tests for the value 50, then this node need not test anything.  */
6161
6162static void
6163emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
6164		 tree index_type)
6165{
6166  /* If INDEX has an unsigned type, we must make unsigned branches.  */
6167  int unsignedp = TREE_UNSIGNED (index_type);
6168  enum machine_mode mode = GET_MODE (index);
6169  enum machine_mode imode = TYPE_MODE (index_type);
6170
6171  /* See if our parents have already tested everything for us.
6172     If they have, emit an unconditional jump for this node.  */
6173  if (node_is_bounded (node, index_type))
6174    emit_jump (label_rtx (node->code_label));
6175
6176  else if (tree_int_cst_equal (node->low, node->high))
6177    {
6178      /* Node is single valued.  First see if the index expression matches
6179	 this node and then check our children, if any.  */
6180
6181      do_jump_if_equal (index,
6182			convert_modes (mode, imode,
6183				       expand_expr (node->low, NULL_RTX,
6184						    VOIDmode, 0),
6185				       unsignedp),
6186			label_rtx (node->code_label), unsignedp);
6187
6188      if (node->right != 0 && node->left != 0)
6189	{
6190	  /* This node has children on both sides.
6191	     Dispatch to one side or the other
6192	     by comparing the index value with this node's value.
6193	     If one subtree is bounded, check that one first,
6194	     so we can avoid real branches in the tree.  */
6195
6196	  if (node_is_bounded (node->right, index_type))
6197	    {
6198	      emit_cmp_and_jump_insns (index,
6199				       convert_modes
6200				       (mode, imode,
6201					expand_expr (node->high, NULL_RTX,
6202						     VOIDmode, 0),
6203					unsignedp),
6204				       GT, NULL_RTX, mode, unsignedp,
6205				       label_rtx (node->right->code_label));
6206	      emit_case_nodes (index, node->left, default_label, index_type);
6207	    }
6208
6209	  else if (node_is_bounded (node->left, index_type))
6210	    {
6211	      emit_cmp_and_jump_insns (index,
6212				       convert_modes
6213				       (mode, imode,
6214					expand_expr (node->high, NULL_RTX,
6215						     VOIDmode, 0),
6216					unsignedp),
6217				       LT, NULL_RTX, mode, unsignedp,
6218				       label_rtx (node->left->code_label));
6219	      emit_case_nodes (index, node->right, default_label, index_type);
6220	    }
6221
6222	  else
6223	    {
6224	      /* Neither node is bounded.  First distinguish the two sides;
6225		 then emit the code for one side at a time.  */
6226
6227	      tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6228
6229	      /* See if the value is on the right.  */
6230	      emit_cmp_and_jump_insns (index,
6231				       convert_modes
6232				       (mode, imode,
6233					expand_expr (node->high, NULL_RTX,
6234						     VOIDmode, 0),
6235					unsignedp),
6236				       GT, NULL_RTX, mode, unsignedp,
6237				       label_rtx (test_label));
6238
6239	      /* Value must be on the left.
6240		 Handle the left-hand subtree.  */
6241	      emit_case_nodes (index, node->left, default_label, index_type);
6242	      /* If left-hand subtree does nothing,
6243		 go to default.  */
6244	      emit_jump_if_reachable (default_label);
6245
6246	      /* Code branches here for the right-hand subtree.  */
6247	      expand_label (test_label);
6248	      emit_case_nodes (index, node->right, default_label, index_type);
6249	    }
6250	}
6251
6252      else if (node->right != 0 && node->left == 0)
6253	{
6254	  /* Here we have a right child but no left so we issue conditional
6255	     branch to default and process the right child.
6256
6257	     Omit the conditional branch to default if we it avoid only one
6258	     right child; it costs too much space to save so little time.  */
6259
6260	  if (node->right->right || node->right->left
6261	      || !tree_int_cst_equal (node->right->low, node->right->high))
6262	    {
6263	      if (!node_has_low_bound (node, index_type))
6264		{
6265		  emit_cmp_and_jump_insns (index,
6266					   convert_modes
6267					   (mode, imode,
6268					    expand_expr (node->high, NULL_RTX,
6269							 VOIDmode, 0),
6270					    unsignedp),
6271					   LT, NULL_RTX, mode, unsignedp,
6272					   default_label);
6273		}
6274
6275	      emit_case_nodes (index, node->right, default_label, index_type);
6276	    }
6277	  else
6278	    /* We cannot process node->right normally
6279	       since we haven't ruled out the numbers less than
6280	       this node's value.  So handle node->right explicitly.  */
6281	    do_jump_if_equal (index,
6282			      convert_modes
6283			      (mode, imode,
6284			       expand_expr (node->right->low, NULL_RTX,
6285					    VOIDmode, 0),
6286			       unsignedp),
6287			      label_rtx (node->right->code_label), unsignedp);
6288	}
6289
6290      else if (node->right == 0 && node->left != 0)
6291	{
6292	  /* Just one subtree, on the left.  */
6293	  if (node->left->left || node->left->right
6294	      || !tree_int_cst_equal (node->left->low, node->left->high))
6295	    {
6296	      if (!node_has_high_bound (node, index_type))
6297		{
6298		  emit_cmp_and_jump_insns (index,
6299					   convert_modes
6300					   (mode, imode,
6301					    expand_expr (node->high, NULL_RTX,
6302							 VOIDmode, 0),
6303					    unsignedp),
6304					   GT, NULL_RTX, mode, unsignedp,
6305					   default_label);
6306		}
6307
6308	      emit_case_nodes (index, node->left, default_label, index_type);
6309	    }
6310	  else
6311	    /* We cannot process node->left normally
6312	       since we haven't ruled out the numbers less than
6313	       this node's value.  So handle node->left explicitly.  */
6314	    do_jump_if_equal (index,
6315			      convert_modes
6316			      (mode, imode,
6317			       expand_expr (node->left->low, NULL_RTX,
6318					    VOIDmode, 0),
6319			       unsignedp),
6320			      label_rtx (node->left->code_label), unsignedp);
6321	}
6322    }
6323  else
6324    {
6325      /* Node is a range.  These cases are very similar to those for a single
6326	 value, except that we do not start by testing whether this node
6327	 is the one to branch to.  */
6328
6329      if (node->right != 0 && node->left != 0)
6330	{
6331	  /* Node has subtrees on both sides.
6332	     If the right-hand subtree is bounded,
6333	     test for it first, since we can go straight there.
6334	     Otherwise, we need to make a branch in the control structure,
6335	     then handle the two subtrees.  */
6336	  tree test_label = 0;
6337
6338	  if (node_is_bounded (node->right, index_type))
6339	    /* Right hand node is fully bounded so we can eliminate any
6340	       testing and branch directly to the target code.  */
6341	    emit_cmp_and_jump_insns (index,
6342				     convert_modes
6343				     (mode, imode,
6344				      expand_expr (node->high, NULL_RTX,
6345						   VOIDmode, 0),
6346				      unsignedp),
6347				     GT, NULL_RTX, mode, unsignedp,
6348				     label_rtx (node->right->code_label));
6349	  else
6350	    {
6351	      /* Right hand node requires testing.
6352		 Branch to a label where we will handle it later.  */
6353
6354	      test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6355	      emit_cmp_and_jump_insns (index,
6356				       convert_modes
6357				       (mode, imode,
6358					expand_expr (node->high, NULL_RTX,
6359						     VOIDmode, 0),
6360					unsignedp),
6361				       GT, NULL_RTX, mode, unsignedp,
6362				       label_rtx (test_label));
6363	    }
6364
6365	  /* Value belongs to this node or to the left-hand subtree.  */
6366
6367	  emit_cmp_and_jump_insns (index,
6368				   convert_modes
6369				   (mode, imode,
6370				    expand_expr (node->low, NULL_RTX,
6371						 VOIDmode, 0),
6372				    unsignedp),
6373				   GE, NULL_RTX, mode, unsignedp,
6374				   label_rtx (node->code_label));
6375
6376	  /* Handle the left-hand subtree.  */
6377	  emit_case_nodes (index, node->left, default_label, index_type);
6378
6379	  /* If right node had to be handled later, do that now.  */
6380
6381	  if (test_label)
6382	    {
6383	      /* If the left-hand subtree fell through,
6384		 don't let it fall into the right-hand subtree.  */
6385	      emit_jump_if_reachable (default_label);
6386
6387	      expand_label (test_label);
6388	      emit_case_nodes (index, node->right, default_label, index_type);
6389	    }
6390	}
6391
6392      else if (node->right != 0 && node->left == 0)
6393	{
6394	  /* Deal with values to the left of this node,
6395	     if they are possible.  */
6396	  if (!node_has_low_bound (node, index_type))
6397	    {
6398	      emit_cmp_and_jump_insns (index,
6399				       convert_modes
6400				       (mode, imode,
6401					expand_expr (node->low, NULL_RTX,
6402						     VOIDmode, 0),
6403					unsignedp),
6404				       LT, NULL_RTX, mode, unsignedp,
6405				       default_label);
6406	    }
6407
6408	  /* Value belongs to this node or to the right-hand subtree.  */
6409
6410	  emit_cmp_and_jump_insns (index,
6411				   convert_modes
6412				   (mode, imode,
6413				    expand_expr (node->high, NULL_RTX,
6414						 VOIDmode, 0),
6415				    unsignedp),
6416				   LE, NULL_RTX, mode, unsignedp,
6417				   label_rtx (node->code_label));
6418
6419	  emit_case_nodes (index, node->right, default_label, index_type);
6420	}
6421
6422      else if (node->right == 0 && node->left != 0)
6423	{
6424	  /* Deal with values to the right of this node,
6425	     if they are possible.  */
6426	  if (!node_has_high_bound (node, index_type))
6427	    {
6428	      emit_cmp_and_jump_insns (index,
6429				       convert_modes
6430				       (mode, imode,
6431					expand_expr (node->high, NULL_RTX,
6432						     VOIDmode, 0),
6433					unsignedp),
6434				       GT, NULL_RTX, mode, unsignedp,
6435				       default_label);
6436	    }
6437
6438	  /* Value belongs to this node or to the left-hand subtree.  */
6439
6440	  emit_cmp_and_jump_insns (index,
6441				   convert_modes
6442				   (mode, imode,
6443				    expand_expr (node->low, NULL_RTX,
6444						 VOIDmode, 0),
6445				    unsignedp),
6446				   GE, NULL_RTX, mode, unsignedp,
6447				   label_rtx (node->code_label));
6448
6449	  emit_case_nodes (index, node->left, default_label, index_type);
6450	}
6451
6452      else
6453	{
6454	  /* Node has no children so we check low and high bounds to remove
6455	     redundant tests.  Only one of the bounds can exist,
6456	     since otherwise this node is bounded--a case tested already.  */
6457	  int high_bound = node_has_high_bound (node, index_type);
6458	  int low_bound = node_has_low_bound (node, index_type);
6459
6460	  if (!high_bound && low_bound)
6461	    {
6462	      emit_cmp_and_jump_insns (index,
6463				       convert_modes
6464				       (mode, imode,
6465					expand_expr (node->high, NULL_RTX,
6466						     VOIDmode, 0),
6467					unsignedp),
6468				       GT, NULL_RTX, mode, unsignedp,
6469				       default_label);
6470	    }
6471
6472	  else if (!low_bound && high_bound)
6473	    {
6474	      emit_cmp_and_jump_insns (index,
6475				       convert_modes
6476				       (mode, imode,
6477					expand_expr (node->low, NULL_RTX,
6478						     VOIDmode, 0),
6479					unsignedp),
6480				       LT, NULL_RTX, mode, unsignedp,
6481				       default_label);
6482	    }
6483	  else if (!low_bound && !high_bound)
6484	    {
6485	      /* Widen LOW and HIGH to the same width as INDEX.  */
6486	      tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
6487	      tree low = build1 (CONVERT_EXPR, type, node->low);
6488	      tree high = build1 (CONVERT_EXPR, type, node->high);
6489	      rtx low_rtx, new_index, new_bound;
6490
6491	      /* Instead of doing two branches, emit one unsigned branch for
6492		 (index-low) > (high-low).  */
6493	      low_rtx = expand_expr (low, NULL_RTX, mode, 0);
6494	      new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
6495					       NULL_RTX, unsignedp,
6496					       OPTAB_WIDEN);
6497	      new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6498						    high, low)),
6499				       NULL_RTX, mode, 0);
6500
6501	      emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
6502				       mode, 1, default_label);
6503	    }
6504
6505	  emit_jump (label_rtx (node->code_label));
6506	}
6507    }
6508}
6509
6510#include "gt-stmt.h"
6511