1/* Expands front end tree to back end RTL for GNU C-Compiler
2   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3   1999, 2000 Free Software Foundation, Inc.
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING.  If not, write to
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA.  */
21
22
23/* This file handles the generation of rtl code from tree structure
24   above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
25   It also creates the rtl expressions for parameters and auto variables
26   and has full responsibility for allocating stack slots.
27
28   The functions whose names start with `expand_' are called by the
29   parser to generate RTL instructions for various kinds of constructs.
30
31   Some control and binding constructs require calling several such
32   functions at different times.  For example, a simple if-then
33   is expanded by calling `expand_start_cond' (with the condition-expression
34   as argument) before parsing the then-clause and calling `expand_end_cond'
35   after parsing the then-clause.  */
36
37#include "config.h"
38#include "system.h"
39
40#include "rtl.h"
41#include "tree.h"
42#include "flags.h"
43#include "except.h"
44#include "function.h"
45#include "insn-flags.h"
46#include "insn-config.h"
47#include "insn-codes.h"
48#include "expr.h"
49#include "hard-reg-set.h"
50#include "obstack.h"
51#include "loop.h"
52#include "recog.h"
53#include "machmode.h"
54#include "toplev.h"
55#include "output.h"
56
57#define obstack_chunk_alloc xmalloc
58#define obstack_chunk_free free
59struct obstack stmt_obstack;
60
61/* Assume that case vectors are not pc-relative.  */
62#ifndef CASE_VECTOR_PC_RELATIVE
63#define CASE_VECTOR_PC_RELATIVE 0
64#endif
65
66/* Filename and line number of last line-number note,
67   whether we actually emitted it or not.  */
68char *emit_filename;
69int emit_lineno;
70
71/* Nonzero if within a ({...}) grouping, in which case we must
72   always compute a value for each expr-stmt in case it is the last one.  */
73
74int expr_stmts_for_value;
75
76/* Each time we expand an expression-statement,
77   record the expr's type and its RTL value here.  */
78
79static tree last_expr_type;
80static rtx last_expr_value;
81
82/* Each time we expand the end of a binding contour (in `expand_end_bindings')
83   and we emit a new NOTE_INSN_BLOCK_END note, we save a pointer to it here.
84   This is used by the `remember_end_note' function to record the endpoint
85   of each generated block in its associated BLOCK node.  */
86
87static rtx last_block_end_note;
88
89/* Number of binding contours started so far in this function.  */
90
91int block_start_count;
92
93/* Functions and data structures for expanding case statements.  */
94
95/* Case label structure, used to hold info on labels within case
96   statements.  We handle "range" labels; for a single-value label
97   as in C, the high and low limits are the same.
98
99   An AVL tree of case nodes is initially created, and later transformed
100   to a list linked via the RIGHT fields in the nodes.  Nodes with
101   higher case values are later in the list.
102
103   Switch statements can be output in one of two forms.  A branch table
104   is used if there are more than a few labels and the labels are dense
105   within the range between the smallest and largest case value.  If a
106   branch table is used, no further manipulations are done with the case
107   node chain.
108
109   The alternative to the use of a branch table is to generate a series
110   of compare and jump insns.  When that is done, we use the LEFT, RIGHT,
111   and PARENT fields to hold a binary tree.  Initially the tree is
112   totally unbalanced, with everything on the right.  We balance the tree
113   with nodes on the left having lower case values than the parent
114   and nodes on the right having higher values.  We then output the tree
115   in order.  */
116
117struct case_node
118{
119  struct case_node	*left;	/* Left son in binary tree */
120  struct case_node	*right;	/* Right son in binary tree; also node chain */
121  struct case_node	*parent; /* Parent of node in binary tree */
122  tree			low;	/* Lowest index value for this label */
123  tree			high;	/* Highest index value for this label */
124  tree			code_label; /* Label to jump to when node matches */
125  int			balance;
126};
127
128typedef struct case_node case_node;
129typedef struct case_node *case_node_ptr;
130
131/* These are used by estimate_case_costs and balance_case_nodes.  */
132
133/* This must be a signed type, and non-ANSI compilers lack signed char.  */
134static short *cost_table;
135static int use_cost_table;
136
137/* Stack of control and binding constructs we are currently inside.
138
139   These constructs begin when you call `expand_start_WHATEVER'
140   and end when you call `expand_end_WHATEVER'.  This stack records
141   info about how the construct began that tells the end-function
142   what to do.  It also may provide information about the construct
143   to alter the behavior of other constructs within the body.
144   For example, they may affect the behavior of C `break' and `continue'.
145
146   Each construct gets one `struct nesting' object.
147   All of these objects are chained through the `all' field.
148   `nesting_stack' points to the first object (innermost construct).
149   The position of an entry on `nesting_stack' is in its `depth' field.
150
151   Each type of construct has its own individual stack.
152   For example, loops have `loop_stack'.  Each object points to the
153   next object of the same type through the `next' field.
154
155   Some constructs are visible to `break' exit-statements and others
156   are not.  Which constructs are visible depends on the language.
157   Therefore, the data structure allows each construct to be visible
158   or not, according to the args given when the construct is started.
159   The construct is visible if the `exit_label' field is non-null.
160   In that case, the value should be a CODE_LABEL rtx.  */
161
162struct nesting
163{
164  struct nesting *all;
165  struct nesting *next;
166  int depth;
167  rtx exit_label;
168  union
169    {
170      /* For conds (if-then and if-then-else statements).  */
171      struct
172	{
173	  /* Label for the end of the if construct.
174	     There is none if EXITFLAG was not set
175	     and no `else' has been seen yet.  */
176	  rtx endif_label;
177	  /* Label for the end of this alternative.
178	     This may be the end of the if or the next else/elseif.  */
179	  rtx next_label;
180	} cond;
181      /* For loops.  */
182      struct
183	{
184	  /* Label at the top of the loop; place to loop back to.  */
185	  rtx start_label;
186	  /* Label at the end of the whole construct.  */
187	  rtx end_label;
188	  /* Label before a jump that branches to the end of the whole
189	     construct.  This is where destructors go if any.  */
190	  rtx alt_end_label;
191	  /* Label for `continue' statement to jump to;
192	     this is in front of the stepper of the loop.  */
193	  rtx continue_label;
194	} loop;
195      /* For variable binding contours.  */
196      struct
197	{
198	  /* Sequence number of this binding contour within the function,
199	     in order of entry.  */
200	  int block_start_count;
201	  /* Nonzero => value to restore stack to on exit.  */
202	  rtx stack_level;
203	  /* The NOTE that starts this contour.
204	     Used by expand_goto to check whether the destination
205	     is within each contour or not.  */
206	  rtx first_insn;
207	  /* Innermost containing binding contour that has a stack level.  */
208	  struct nesting *innermost_stack_block;
209	  /* List of cleanups to be run on exit from this contour.
210	     This is a list of expressions to be evaluated.
211	     The TREE_PURPOSE of each link is the ..._DECL node
212	     which the cleanup pertains to.  */
213	  tree cleanups;
214	  /* List of cleanup-lists of blocks containing this block,
215	     as they were at the locus where this block appears.
216	     There is an element for each containing block,
217	     ordered innermost containing block first.
218	     The tail of this list can be 0,
219	     if all remaining elements would be empty lists.
220	     The element's TREE_VALUE is the cleanup-list of that block,
221	     which may be null.  */
222	  tree outer_cleanups;
223	  /* Chain of labels defined inside this binding contour.
224	     For contours that have stack levels or cleanups.  */
225	  struct label_chain *label_chain;
226	  /* Number of function calls seen, as of start of this block.  */
227	  int function_call_count;
228	  /* Nonzero if this is associated with a EH region.  */
229	  int exception_region;
230	  /* The saved target_temp_slot_level from our outer block.
231	     We may reset target_temp_slot_level to be the level of
232	     this block, if that is done, target_temp_slot_level
233	     reverts to the saved target_temp_slot_level at the very
234	     end of the block.  */
235	  int target_temp_slot_level;
236	  /* True if we are currently emitting insns in an area of
237	     output code that is controlled by a conditional
238	     expression.  This is used by the cleanup handling code to
239	     generate conditional cleanup actions.  */
240	  int conditional_code;
241	  /* A place to move the start of the exception region for any
242	     of the conditional cleanups, must be at the end or after
243	     the start of the last unconditional cleanup, and before any
244	     conditional branch points.  */
245	  rtx last_unconditional_cleanup;
246	  /* When in a conditional context, this is the specific
247	     cleanup list associated with last_unconditional_cleanup,
248	     where we place the conditionalized cleanups.  */
249	  tree *cleanup_ptr;
250	} block;
251      /* For switch (C) or case (Pascal) statements,
252	 and also for dummies (see `expand_start_case_dummy').  */
253      struct
254	{
255	  /* The insn after which the case dispatch should finally
256	     be emitted.  Zero for a dummy.  */
257	  rtx start;
258	  /* A list of case labels; it is first built as an AVL tree.
259	     During expand_end_case, this is converted to a list, and may be
260	     rearranged into a nearly balanced binary tree.  */
261	  struct case_node *case_list;
262	  /* Label to jump to if no case matches.  */
263	  tree default_label;
264	  /* The expression to be dispatched on.  */
265	  tree index_expr;
266	  /* Type that INDEX_EXPR should be converted to.  */
267	  tree nominal_type;
268	  /* Number of range exprs in case statement.  */
269	  int num_ranges;
270	  /* Name of this kind of statement, for warnings.  */
271	  const char *printname;
272	  /* Used to save no_line_numbers till we see the first case label.
273	     We set this to -1 when we see the first case label in this
274	     case statement.  */
275	  int line_number_status;
276	} case_stmt;
277    } data;
278};
279
280/* Chain of all pending binding contours.  */
281struct nesting *block_stack;
282
283/* If any new stacks are added here, add them to POPSTACKS too.  */
284
285/* Chain of all pending binding contours that restore stack levels
286   or have cleanups.  */
287struct nesting *stack_block_stack;
288
289/* Chain of all pending conditional statements.  */
290struct nesting *cond_stack;
291
292/* Chain of all pending loops.  */
293struct nesting *loop_stack;
294
295/* Chain of all pending case or switch statements.  */
296struct nesting *case_stack;
297
298/* Separate chain including all of the above,
299   chained through the `all' field.  */
300struct nesting *nesting_stack;
301
302/* Number of entries on nesting_stack now.  */
303int nesting_depth;
304
305/* Allocate and return a new `struct nesting'.  */
306
307#define ALLOC_NESTING() \
308 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
309
310/* Pop the nesting stack element by element until we pop off
311   the element which is at the top of STACK.
312   Update all the other stacks, popping off elements from them
313   as we pop them from nesting_stack.  */
314
315#define POPSTACK(STACK)					\
316do { struct nesting *target = STACK;			\
317     struct nesting *this;				\
318     do { this = nesting_stack;				\
319	  if (loop_stack == this)			\
320	    loop_stack = loop_stack->next;		\
321	  if (cond_stack == this)			\
322	    cond_stack = cond_stack->next;		\
323	  if (block_stack == this)			\
324	    block_stack = block_stack->next;		\
325	  if (stack_block_stack == this)		\
326	    stack_block_stack = stack_block_stack->next; \
327	  if (case_stack == this)			\
328	    case_stack = case_stack->next;		\
329	  nesting_depth = nesting_stack->depth - 1;	\
330	  nesting_stack = this->all;			\
331	  obstack_free (&stmt_obstack, this); }		\
332     while (this != target); } while (0)
333
334/* In some cases it is impossible to generate code for a forward goto
335   until the label definition is seen.  This happens when it may be necessary
336   for the goto to reset the stack pointer: we don't yet know how to do that.
337   So expand_goto puts an entry on this fixup list.
338   Each time a binding contour that resets the stack is exited,
339   we check each fixup.
340   If the target label has now been defined, we can insert the proper code.  */
341
342struct goto_fixup
343{
344  /* Points to following fixup.  */
345  struct goto_fixup *next;
346  /* Points to the insn before the jump insn.
347     If more code must be inserted, it goes after this insn.  */
348  rtx before_jump;
349  /* The LABEL_DECL that this jump is jumping to, or 0
350     for break, continue or return.  */
351  tree target;
352  /* The BLOCK for the place where this goto was found.  */
353  tree context;
354  /* The CODE_LABEL rtx that this is jumping to.  */
355  rtx target_rtl;
356  /* Number of binding contours started in current function
357     before the label reference.  */
358  int block_start_count;
359  /* The outermost stack level that should be restored for this jump.
360     Each time a binding contour that resets the stack is exited,
361     if the target label is *not* yet defined, this slot is updated.  */
362  rtx stack_level;
363  /* List of lists of cleanup expressions to be run by this goto.
364     There is one element for each block that this goto is within.
365     The tail of this list can be 0,
366     if all remaining elements would be empty.
367     The TREE_VALUE contains the cleanup list of that block as of the
368     time this goto was seen.
369     The TREE_ADDRESSABLE flag is 1 for a block that has been exited.  */
370  tree cleanup_list_list;
371};
372
373static struct goto_fixup *goto_fixup_chain;
374
375/* Within any binding contour that must restore a stack level,
376   all labels are recorded with a chain of these structures.  */
377
378struct label_chain
379{
380  /* Points to following fixup.  */
381  struct label_chain *next;
382  tree label;
383};
384
385
386/* Non-zero if we are using EH to handle cleanus.  */
387static int using_eh_for_cleanups_p = 0;
388
389
390static int n_occurrences		PROTO((int, const char *));
391static void expand_goto_internal	PROTO((tree, rtx, rtx));
392static int expand_fixup			PROTO((tree, rtx, rtx));
393static rtx expand_nl_handler_label	PROTO((rtx, rtx));
394static void expand_nl_goto_receiver	PROTO((void));
395static void expand_nl_goto_receivers	PROTO((struct nesting *));
396static void fixup_gotos			PROTO((struct nesting *, rtx, tree,
397					       rtx, int));
398static void expand_null_return_1	PROTO((rtx, int));
399static void expand_value_return		PROTO((rtx));
400static int tail_recursion_args		PROTO((tree, tree));
401static void expand_cleanups		PROTO((tree, tree, int, int));
402static void check_seenlabel		PROTO((void));
403static void do_jump_if_equal		PROTO((rtx, rtx, rtx, int));
404static int estimate_case_costs		PROTO((case_node_ptr));
405static void group_case_nodes		PROTO((case_node_ptr));
406static void balance_case_nodes		PROTO((case_node_ptr *,
407					       case_node_ptr));
408static int node_has_low_bound		PROTO((case_node_ptr, tree));
409static int node_has_high_bound		PROTO((case_node_ptr, tree));
410static int node_is_bounded		PROTO((case_node_ptr, tree));
411static void emit_jump_if_reachable	PROTO((rtx));
412static void emit_case_nodes		PROTO((rtx, case_node_ptr, rtx, tree));
413static int add_case_node		PROTO((tree, tree, tree, tree *));
414static struct case_node *case_tree2list	PROTO((case_node *, case_node *));
415
416void
417using_eh_for_cleanups ()
418{
419  using_eh_for_cleanups_p = 1;
420}
421
422void
423init_stmt ()
424{
425  gcc_obstack_init (&stmt_obstack);
426  init_eh ();
427}
428
429void
430init_stmt_for_function ()
431{
432  /* We are not currently within any block, conditional, loop or case.  */
433  block_stack = 0;
434  stack_block_stack = 0;
435  loop_stack = 0;
436  case_stack = 0;
437  cond_stack = 0;
438  nesting_stack = 0;
439  nesting_depth = 0;
440
441  block_start_count = 0;
442
443  /* No gotos have been expanded yet.  */
444  goto_fixup_chain = 0;
445
446  /* We are not processing a ({...}) grouping.  */
447  expr_stmts_for_value = 0;
448  last_expr_type = 0;
449
450  init_eh_for_function ();
451}
452
453void
454save_stmt_status (p)
455     struct function *p;
456{
457  p->block_stack = block_stack;
458  p->stack_block_stack = stack_block_stack;
459  p->cond_stack = cond_stack;
460  p->loop_stack = loop_stack;
461  p->case_stack = case_stack;
462  p->nesting_stack = nesting_stack;
463  p->nesting_depth = nesting_depth;
464  p->block_start_count = block_start_count;
465  p->last_expr_type = last_expr_type;
466  p->last_expr_value = last_expr_value;
467  p->expr_stmts_for_value = expr_stmts_for_value;
468  p->emit_filename = emit_filename;
469  p->emit_lineno = emit_lineno;
470  p->goto_fixup_chain = goto_fixup_chain;
471  save_eh_status (p);
472}
473
474void
475restore_stmt_status (p)
476     struct function *p;
477{
478  block_stack = p->block_stack;
479  stack_block_stack = p->stack_block_stack;
480  cond_stack = p->cond_stack;
481  loop_stack = p->loop_stack;
482  case_stack = p->case_stack;
483  nesting_stack = p->nesting_stack;
484  nesting_depth = p->nesting_depth;
485  block_start_count = p->block_start_count;
486  last_expr_type = p->last_expr_type;
487  last_expr_value = p->last_expr_value;
488  expr_stmts_for_value = p->expr_stmts_for_value;
489  emit_filename = p->emit_filename;
490  emit_lineno = p->emit_lineno;
491  goto_fixup_chain = p->goto_fixup_chain;
492  restore_eh_status (p);
493}
494
495/* Emit a no-op instruction.  */
496
497void
498emit_nop ()
499{
500  rtx last_insn;
501
502  last_insn = get_last_insn ();
503  if (!optimize
504      && (GET_CODE (last_insn) == CODE_LABEL
505	  || (GET_CODE (last_insn) == NOTE
506	      && prev_real_insn (last_insn) == 0)))
507    emit_insn (gen_nop ());
508}
509
510/* Return the rtx-label that corresponds to a LABEL_DECL,
511   creating it if necessary.  */
512
513rtx
514label_rtx (label)
515     tree label;
516{
517  if (TREE_CODE (label) != LABEL_DECL)
518    abort ();
519
520  if (DECL_RTL (label))
521    return DECL_RTL (label);
522
523  return DECL_RTL (label) = gen_label_rtx ();
524}
525
526/* Add an unconditional jump to LABEL as the next sequential instruction.  */
527
528void
529emit_jump (label)
530     rtx label;
531{
532  do_pending_stack_adjust ();
533  emit_jump_insn (gen_jump (label));
534  emit_barrier ();
535}
536
537/* Emit code to jump to the address
538   specified by the pointer expression EXP.  */
539
540void
541expand_computed_goto (exp)
542     tree exp;
543{
544  rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
545
546#ifdef POINTERS_EXTEND_UNSIGNED
547  x = convert_memory_address (Pmode, x);
548#endif
549
550  emit_queue ();
551  /* Be sure the function is executable.  */
552  if (current_function_check_memory_usage)
553    emit_library_call (chkr_check_exec_libfunc, 1,
554		       VOIDmode, 1, x, ptr_mode);
555
556  do_pending_stack_adjust ();
557  emit_indirect_jump (x);
558
559  current_function_has_computed_jump = 1;
560}
561
562/* Handle goto statements and the labels that they can go to.  */
563
564/* Specify the location in the RTL code of a label LABEL,
565   which is a LABEL_DECL tree node.
566
567   This is used for the kind of label that the user can jump to with a
568   goto statement, and for alternatives of a switch or case statement.
569   RTL labels generated for loops and conditionals don't go through here;
570   they are generated directly at the RTL level, by other functions below.
571
572   Note that this has nothing to do with defining label *names*.
573   Languages vary in how they do that and what that even means.  */
574
575void
576expand_label (label)
577     tree label;
578{
579  struct label_chain *p;
580
581  do_pending_stack_adjust ();
582  emit_label (label_rtx (label));
583  if (DECL_NAME (label))
584    LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
585
586  if (stack_block_stack != 0)
587    {
588      p = (struct label_chain *) oballoc (sizeof (struct label_chain));
589      p->next = stack_block_stack->data.block.label_chain;
590      stack_block_stack->data.block.label_chain = p;
591      p->label = label;
592    }
593}
594
595/* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
596   from nested functions.  */
597
598void
599declare_nonlocal_label (label)
600     tree label;
601{
602  rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
603
604  nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
605  LABEL_PRESERVE_P (label_rtx (label)) = 1;
606  if (nonlocal_goto_handler_slots == 0)
607    {
608      emit_stack_save (SAVE_NONLOCAL,
609		       &nonlocal_goto_stack_level,
610		       PREV_INSN (tail_recursion_reentry));
611    }
612  nonlocal_goto_handler_slots
613    = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
614}
615
616/* Generate RTL code for a `goto' statement with target label LABEL.
617   LABEL should be a LABEL_DECL tree node that was or will later be
618   defined with `expand_label'.  */
619
620void
621expand_goto (label)
622     tree label;
623{
624  tree context;
625
626  /* Check for a nonlocal goto to a containing function.  */
627  context = decl_function_context (label);
628  if (context != 0 && context != current_function_decl)
629    {
630      struct function *p = find_function_data (context);
631      rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
632      rtx temp, handler_slot;
633      tree link;
634
635      /* Find the corresponding handler slot for this label.  */
636      handler_slot = p->nonlocal_goto_handler_slots;
637      for (link = p->nonlocal_labels; TREE_VALUE (link) != label;
638	   link = TREE_CHAIN (link))
639	handler_slot = XEXP (handler_slot, 1);
640      handler_slot = XEXP (handler_slot, 0);
641
642      p->has_nonlocal_label = 1;
643      current_function_has_nonlocal_goto = 1;
644      LABEL_REF_NONLOCAL_P (label_ref) = 1;
645
646      /* Copy the rtl for the slots so that they won't be shared in
647	 case the virtual stack vars register gets instantiated differently
648	 in the parent than in the child.  */
649
650#if HAVE_nonlocal_goto
651      if (HAVE_nonlocal_goto)
652	emit_insn (gen_nonlocal_goto (lookup_static_chain (label),
653				      copy_rtx (handler_slot),
654				      copy_rtx (p->nonlocal_goto_stack_level),
655				      label_ref));
656      else
657#endif
658	{
659	  rtx addr;
660
661	  /* Restore frame pointer for containing function.
662	     This sets the actual hard register used for the frame pointer
663	     to the location of the function's incoming static chain info.
664	     The non-local goto handler will then adjust it to contain the
665	     proper value and reload the argument pointer, if needed.  */
666	  emit_move_insn (hard_frame_pointer_rtx, lookup_static_chain (label));
667
668	  /* We have now loaded the frame pointer hardware register with
669	     the address of that corresponds to the start of the virtual
670	     stack vars.  So replace virtual_stack_vars_rtx in all
671	     addresses we use with stack_pointer_rtx.  */
672
673	  /* Get addr of containing function's current nonlocal goto handler,
674	     which will do any cleanups and then jump to the label.  */
675	  addr = copy_rtx (handler_slot);
676	  temp = copy_to_reg (replace_rtx (addr, virtual_stack_vars_rtx,
677					   hard_frame_pointer_rtx));
678
679	  /* Restore the stack pointer.  Note this uses fp just restored.  */
680	  addr = p->nonlocal_goto_stack_level;
681	  if (addr)
682	    addr = replace_rtx (copy_rtx (addr),
683				virtual_stack_vars_rtx,
684				hard_frame_pointer_rtx);
685
686	  emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
687
688	  /* USE of hard_frame_pointer_rtx added for consistency; not clear if
689	     really needed.  */
690	  emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
691	  emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
692	  emit_indirect_jump (temp);
693	}
694     }
695  else
696    expand_goto_internal (label, label_rtx (label), NULL_RTX);
697}
698
699/* Generate RTL code for a `goto' statement with target label BODY.
700   LABEL should be a LABEL_REF.
701   LAST_INSN, if non-0, is the rtx we should consider as the last
702   insn emitted (for the purposes of cleaning up a return).  */
703
704static void
705expand_goto_internal (body, label, last_insn)
706     tree body;
707     rtx label;
708     rtx last_insn;
709{
710  struct nesting *block;
711  rtx stack_level = 0;
712
713  if (GET_CODE (label) != CODE_LABEL)
714    abort ();
715
716  /* If label has already been defined, we can tell now
717     whether and how we must alter the stack level.  */
718
719  if (PREV_INSN (label) != 0)
720    {
721      /* Find the innermost pending block that contains the label.
722	 (Check containment by comparing insn-uids.)
723	 Then restore the outermost stack level within that block,
724	 and do cleanups of all blocks contained in it.  */
725      for (block = block_stack; block; block = block->next)
726	{
727	  if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
728	    break;
729	  if (block->data.block.stack_level != 0)
730	    stack_level = block->data.block.stack_level;
731	  /* Execute the cleanups for blocks we are exiting.  */
732	  if (block->data.block.cleanups != 0)
733	    {
734	      expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
735	      do_pending_stack_adjust ();
736	    }
737	}
738
739      if (stack_level)
740	{
741	  /* Ensure stack adjust isn't done by emit_jump, as this
742	     would clobber the stack pointer.  This one should be
743	     deleted as dead by flow.  */
744	  clear_pending_stack_adjust ();
745	  do_pending_stack_adjust ();
746	  emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
747	}
748
749      if (body != 0 && DECL_TOO_LATE (body))
750	error ("jump to `%s' invalidly jumps into binding contour",
751	       IDENTIFIER_POINTER (DECL_NAME (body)));
752    }
753  /* Label not yet defined: may need to put this goto
754     on the fixup list.  */
755  else if (! expand_fixup (body, label, last_insn))
756    {
757      /* No fixup needed.  Record that the label is the target
758	 of at least one goto that has no fixup.  */
759      if (body != 0)
760	TREE_ADDRESSABLE (body) = 1;
761    }
762
763  emit_jump (label);
764}
765
766/* Generate if necessary a fixup for a goto
767   whose target label in tree structure (if any) is TREE_LABEL
768   and whose target in rtl is RTL_LABEL.
769
770   If LAST_INSN is nonzero, we pretend that the jump appears
771   after insn LAST_INSN instead of at the current point in the insn stream.
772
773   The fixup will be used later to insert insns just before the goto.
774   Those insns will restore the stack level as appropriate for the
775   target label, and will (in the case of C++) also invoke any object
776   destructors which have to be invoked when we exit the scopes which
777   are exited by the goto.
778
779   Value is nonzero if a fixup is made.  */
780
781static int
782expand_fixup (tree_label, rtl_label, last_insn)
783     tree tree_label;
784     rtx rtl_label;
785     rtx last_insn;
786{
787  struct nesting *block, *end_block;
788
789  /* See if we can recognize which block the label will be output in.
790     This is possible in some very common cases.
791     If we succeed, set END_BLOCK to that block.
792     Otherwise, set it to 0.  */
793
794  if (cond_stack
795      && (rtl_label == cond_stack->data.cond.endif_label
796	  || rtl_label == cond_stack->data.cond.next_label))
797    end_block = cond_stack;
798  /* If we are in a loop, recognize certain labels which
799     are likely targets.  This reduces the number of fixups
800     we need to create.  */
801  else if (loop_stack
802      && (rtl_label == loop_stack->data.loop.start_label
803	  || rtl_label == loop_stack->data.loop.end_label
804	  || rtl_label == loop_stack->data.loop.continue_label))
805    end_block = loop_stack;
806  else
807    end_block = 0;
808
809  /* Now set END_BLOCK to the binding level to which we will return.  */
810
811  if (end_block)
812    {
813      struct nesting *next_block = end_block->all;
814      block = block_stack;
815
816      /* First see if the END_BLOCK is inside the innermost binding level.
817	 If so, then no cleanups or stack levels are relevant.  */
818      while (next_block && next_block != block)
819	next_block = next_block->all;
820
821      if (next_block)
822	return 0;
823
824      /* Otherwise, set END_BLOCK to the innermost binding level
825	 which is outside the relevant control-structure nesting.  */
826      next_block = block_stack->next;
827      for (block = block_stack; block != end_block; block = block->all)
828	if (block == next_block)
829	  next_block = next_block->next;
830      end_block = next_block;
831    }
832
833  /* Does any containing block have a stack level or cleanups?
834     If not, no fixup is needed, and that is the normal case
835     (the only case, for standard C).  */
836  for (block = block_stack; block != end_block; block = block->next)
837    if (block->data.block.stack_level != 0
838	|| block->data.block.cleanups != 0)
839      break;
840
841  if (block != end_block)
842    {
843      /* Ok, a fixup is needed.  Add a fixup to the list of such.  */
844      struct goto_fixup *fixup
845	= (struct goto_fixup *) oballoc (sizeof (struct goto_fixup));
846      /* In case an old stack level is restored, make sure that comes
847	 after any pending stack adjust.  */
848      /* ?? If the fixup isn't to come at the present position,
849	 doing the stack adjust here isn't useful.  Doing it with our
850	 settings at that location isn't useful either.  Let's hope
851	 someone does it!  */
852      if (last_insn == 0)
853	do_pending_stack_adjust ();
854      fixup->target = tree_label;
855      fixup->target_rtl = rtl_label;
856
857      /* Create a BLOCK node and a corresponding matched set of
858	 NOTE_INSN_BEGIN_BLOCK and NOTE_INSN_END_BLOCK notes at
859	 this point.  The notes will encapsulate any and all fixup
860	 code which we might later insert at this point in the insn
861	 stream.  Also, the BLOCK node will be the parent (i.e. the
862	 `SUPERBLOCK') of any other BLOCK nodes which we might create
863	 later on when we are expanding the fixup code.
864
865	 Note that optimization passes (including expand_end_loop)
866	 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
867	 as a placeholder.  */
868
869      {
870        register rtx original_before_jump
871          = last_insn ? last_insn : get_last_insn ();
872	rtx start;
873
874        start_sequence ();
875        pushlevel (0);
876        start = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
877	fixup->before_jump = emit_note (NULL_PTR, NOTE_INSN_DELETED);
878        last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
879        fixup->context = poplevel (1, 0, 0);  /* Create the BLOCK node now! */
880        end_sequence ();
881        emit_insns_after (start, original_before_jump);
882      }
883
884      fixup->block_start_count = block_start_count;
885      fixup->stack_level = 0;
886      fixup->cleanup_list_list
887	= ((block->data.block.outer_cleanups
888	    || block->data.block.cleanups)
889	   ? tree_cons (NULL_TREE, block->data.block.cleanups,
890			block->data.block.outer_cleanups)
891	   : 0);
892      fixup->next = goto_fixup_chain;
893      goto_fixup_chain = fixup;
894    }
895
896  return block != 0;
897}
898
899
900
901/* Expand any needed fixups in the outputmost binding level of the
902   function.  FIRST_INSN is the first insn in the function.  */
903
904void
905expand_fixups (first_insn)
906     rtx first_insn;
907{
908  fixup_gotos (NULL_PTR, NULL_RTX, NULL_TREE, first_insn, 0);
909}
910
911/* When exiting a binding contour, process all pending gotos requiring fixups.
912   THISBLOCK is the structure that describes the block being exited.
913   STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
914   CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
915   FIRST_INSN is the insn that began this contour.
916
917   Gotos that jump out of this contour must restore the
918   stack level and do the cleanups before actually jumping.
919
920   DONT_JUMP_IN nonzero means report error there is a jump into this
921   contour from before the beginning of the contour.
922   This is also done if STACK_LEVEL is nonzero.  */
923
924static void
925fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
926     struct nesting *thisblock;
927     rtx stack_level;
928     tree cleanup_list;
929     rtx first_insn;
930     int dont_jump_in;
931{
932  register struct goto_fixup *f, *prev;
933
934  /* F is the fixup we are considering; PREV is the previous one.  */
935  /* We run this loop in two passes so that cleanups of exited blocks
936     are run first, and blocks that are exited are marked so
937     afterwards.  */
938
939  for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
940    {
941      /* Test for a fixup that is inactive because it is already handled.  */
942      if (f->before_jump == 0)
943	{
944	  /* Delete inactive fixup from the chain, if that is easy to do.  */
945	  if (prev != 0)
946	    prev->next = f->next;
947	}
948      /* Has this fixup's target label been defined?
949	 If so, we can finalize it.  */
950      else if (PREV_INSN (f->target_rtl) != 0)
951	{
952	  register rtx cleanup_insns;
953
954	  /* Get the first non-label after the label
955	     this goto jumps to.  If that's before this scope begins,
956	     we don't have a jump into the scope.  */
957	  rtx after_label = f->target_rtl;
958	  while (after_label != 0 && GET_CODE (after_label) == CODE_LABEL)
959	    after_label = NEXT_INSN (after_label);
960
961	  /* If this fixup jumped into this contour from before the beginning
962	     of this contour, report an error.  */
963	  /* ??? Bug: this does not detect jumping in through intermediate
964	     blocks that have stack levels or cleanups.
965	     It detects only a problem with the innermost block
966	     around the label.  */
967	  if (f->target != 0
968	      && (dont_jump_in || stack_level || cleanup_list)
969	      /* If AFTER_LABEL is 0, it means the jump goes to the end
970		 of the rtl, which means it jumps into this scope.  */
971	      && (after_label == 0
972		  || INSN_UID (first_insn) < INSN_UID (after_label))
973	      && INSN_UID (first_insn) > INSN_UID (f->before_jump)
974	      && ! DECL_ERROR_ISSUED (f->target))
975	    {
976	      error_with_decl (f->target,
977			       "label `%s' used before containing binding contour");
978	      /* Prevent multiple errors for one label.  */
979	      DECL_ERROR_ISSUED (f->target) = 1;
980	    }
981
982	  /* We will expand the cleanups into a sequence of their own and
983	     then later on we will attach this new sequence to the insn
984	     stream just ahead of the actual jump insn.  */
985
986	  start_sequence ();
987
988	  /* Temporarily restore the lexical context where we will
989	     logically be inserting the fixup code.  We do this for the
990	     sake of getting the debugging information right.  */
991
992	  pushlevel (0);
993	  set_block (f->context);
994
995	  /* Expand the cleanups for blocks this jump exits.  */
996	  if (f->cleanup_list_list)
997	    {
998	      tree lists;
999	      for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1000		/* Marked elements correspond to blocks that have been closed.
1001		   Do their cleanups.  */
1002		if (TREE_ADDRESSABLE (lists)
1003		    && TREE_VALUE (lists) != 0)
1004		  {
1005		    expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1006		    /* Pop any pushes done in the cleanups,
1007		       in case function is about to return.  */
1008		    do_pending_stack_adjust ();
1009		  }
1010	    }
1011
1012	  /* Restore stack level for the biggest contour that this
1013	     jump jumps out of.  */
1014	  if (f->stack_level)
1015	    emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1016
1017	  /* Finish up the sequence containing the insns which implement the
1018	     necessary cleanups, and then attach that whole sequence to the
1019	     insn stream just ahead of the actual jump insn.  Attaching it
1020	     at that point insures that any cleanups which are in fact
1021	     implicit C++ object destructions (which must be executed upon
1022	     leaving the block) appear (to the debugger) to be taking place
1023	     in an area of the generated code where the object(s) being
1024	     destructed are still "in scope".  */
1025
1026	  cleanup_insns = get_insns ();
1027	  poplevel (1, 0, 0);
1028
1029	  end_sequence ();
1030	  emit_insns_after (cleanup_insns, f->before_jump);
1031
1032
1033	  f->before_jump = 0;
1034	}
1035    }
1036
1037  /* For any still-undefined labels, do the cleanups for this block now.
1038     We must do this now since items in the cleanup list may go out
1039     of scope when the block ends.  */
1040  for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1041    if (f->before_jump != 0
1042	&& PREV_INSN (f->target_rtl) == 0
1043	/* Label has still not appeared.  If we are exiting a block with
1044	   a stack level to restore, that started before the fixup,
1045	   mark this stack level as needing restoration
1046	   when the fixup is later finalized.   */
1047	&& thisblock != 0
1048	/* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1049	   means the label is undefined.  That's erroneous, but possible.  */
1050	&& (thisblock->data.block.block_start_count
1051	    <= f->block_start_count))
1052      {
1053	tree lists = f->cleanup_list_list;
1054	rtx cleanup_insns;
1055
1056	for (; lists; lists = TREE_CHAIN (lists))
1057	  /* If the following elt. corresponds to our containing block
1058	     then the elt. must be for this block.  */
1059	  if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1060	    {
1061	      start_sequence ();
1062	      pushlevel (0);
1063	      set_block (f->context);
1064	      expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1065	      do_pending_stack_adjust ();
1066	      cleanup_insns = get_insns ();
1067	      poplevel (1, 0, 0);
1068	      end_sequence ();
1069	      if (cleanup_insns != 0)
1070		f->before_jump
1071		  = emit_insns_after (cleanup_insns, f->before_jump);
1072
1073	      f->cleanup_list_list = TREE_CHAIN (lists);
1074	    }
1075
1076	if (stack_level)
1077	  f->stack_level = stack_level;
1078      }
1079}
1080
1081/* Return the number of times character C occurs in string S.  */
1082static int
1083n_occurrences (c, s)
1084     int c;
1085     const char *s;
1086{
1087  int n = 0;
1088  while (*s)
1089    n += (*s++ == c);
1090  return n;
1091}
1092
1093/* Generate RTL for an asm statement (explicit assembler code).
1094   BODY is a STRING_CST node containing the assembler code text,
1095   or an ADDR_EXPR containing a STRING_CST.  */
1096
1097void
1098expand_asm (body)
1099     tree body;
1100{
1101  if (current_function_check_memory_usage)
1102    {
1103      error ("`asm' cannot be used with `-fcheck-memory-usage'");
1104      return;
1105    }
1106
1107  if (TREE_CODE (body) == ADDR_EXPR)
1108    body = TREE_OPERAND (body, 0);
1109
1110  emit_insn (gen_rtx_ASM_INPUT (VOIDmode,
1111				TREE_STRING_POINTER (body)));
1112  last_expr_type = 0;
1113}
1114
1115/* Generate RTL for an asm statement with arguments.
1116   STRING is the instruction template.
1117   OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1118   Each output or input has an expression in the TREE_VALUE and
1119   a constraint-string in the TREE_PURPOSE.
1120   CLOBBERS is a list of STRING_CST nodes each naming a hard register
1121   that is clobbered by this insn.
1122
1123   Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1124   Some elements of OUTPUTS may be replaced with trees representing temporary
1125   values.  The caller should copy those temporary values to the originally
1126   specified lvalues.
1127
1128   VOL nonzero means the insn is volatile; don't optimize it.  */
1129
1130void
1131expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1132     tree string, outputs, inputs, clobbers;
1133     int vol;
1134     char *filename;
1135     int line;
1136{
1137  rtvec argvec, constraints;
1138  rtx body;
1139  int ninputs = list_length (inputs);
1140  int noutputs = list_length (outputs);
1141  int ninout = 0;
1142  int nclobbers;
1143  tree tail;
1144  register int i;
1145  /* Vector of RTX's of evaluated output operands.  */
1146  rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1147  int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1148  rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1149  enum machine_mode *inout_mode
1150    = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1151  /* The insn we have emitted.  */
1152  rtx insn;
1153
1154  /* An ASM with no outputs needs to be treated as volatile, for now.  */
1155  if (noutputs == 0)
1156    vol = 1;
1157
1158  if (current_function_check_memory_usage)
1159    {
1160      error ("`asm' cannot be used with `-fcheck-memory-usage'");
1161      return;
1162    }
1163
1164  /* Count the number of meaningful clobbered registers, ignoring what
1165     we would ignore later.  */
1166  nclobbers = 0;
1167  for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1168    {
1169      char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1170      i = decode_reg_name (regname);
1171      if (i >= 0 || i == -4)
1172	++nclobbers;
1173      else if (i == -2)
1174	error ("unknown register name `%s' in `asm'", regname);
1175    }
1176
1177  last_expr_type = 0;
1178
1179  /* Check that the number of alternatives is constant across all
1180     operands.  */
1181  if (outputs || inputs)
1182    {
1183      tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1184      int nalternatives = n_occurrences (',', TREE_STRING_POINTER (tmp));
1185      tree next = inputs;
1186
1187      if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1188	{
1189	  error ("too many alternatives in `asm'");
1190	  return;
1191	}
1192
1193      tmp = outputs;
1194      while (tmp)
1195	{
1196	  char *constraint = TREE_STRING_POINTER (TREE_PURPOSE (tmp));
1197	  if (n_occurrences (',', constraint) != nalternatives)
1198	    {
1199	      error ("operand constraints for `asm' differ in number of alternatives");
1200	      return;
1201	    }
1202	  if (TREE_CHAIN (tmp))
1203	    tmp = TREE_CHAIN (tmp);
1204	  else
1205	    tmp = next, next = 0;
1206	}
1207    }
1208
1209  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1210    {
1211      tree val = TREE_VALUE (tail);
1212      tree type = TREE_TYPE (val);
1213      char *constraint;
1214      char *p;
1215      int c_len;
1216      int j;
1217      int is_inout = 0;
1218      int allows_reg = 0;
1219      int allows_mem = 0;
1220
1221      /* If there's an erroneous arg, emit no insn.  */
1222      if (TREE_TYPE (val) == error_mark_node)
1223	return;
1224
1225      /* Make sure constraint has `=' and does not have `+'.  Also, see
1226	 if it allows any register.  Be liberal on the latter test, since
1227	 the worst that happens if we get it wrong is we issue an error
1228	 message.  */
1229
1230      c_len = TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1;
1231      constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1232
1233      /* Allow the `=' or `+' to not be at the beginning of the string,
1234	 since it wasn't explicitly documented that way, and there is a
1235	 large body of code that puts it last.  Swap the character to
1236	 the front, so as not to uglify any place else.  */
1237      switch (c_len)
1238	{
1239	default:
1240	  if ((p = strchr (constraint, '=')) != NULL)
1241	    break;
1242	  if ((p = strchr (constraint, '+')) != NULL)
1243	    break;
1244	case 0:
1245	  error ("output operand constraint lacks `='");
1246	  return;
1247	}
1248
1249      if (p != constraint)
1250	{
1251	  j = *p;
1252	  bcopy (constraint, constraint+1, p-constraint);
1253	  *constraint = j;
1254
1255	  warning ("output constraint `%c' for operand %d is not at the beginning", j, i);
1256	}
1257
1258      is_inout = constraint[0] == '+';
1259      /* Replace '+' with '='.  */
1260      constraint[0] = '=';
1261      /* Make sure we can specify the matching operand.  */
1262      if (is_inout && i > 9)
1263	{
1264	  error ("output operand constraint %d contains `+'", i);
1265	  return;
1266	}
1267
1268      for (j = 1; j < c_len; j++)
1269	switch (constraint[j])
1270	  {
1271	  case '+':
1272	  case '=':
1273	    error ("operand constraint contains '+' or '=' at illegal position.");
1274	    return;
1275
1276	  case '%':
1277	    if (i + 1 == ninputs + noutputs)
1278	      {
1279		error ("`%%' constraint used with last operand");
1280		return;
1281	      }
1282	    break;
1283
1284	  case '?':  case '!':  case '*':  case '&':
1285	  case 'E':  case 'F':  case 'G':  case 'H':
1286	  case 's':  case 'i':  case 'n':
1287	  case 'I':  case 'J':  case 'K':  case 'L':  case 'M':
1288	  case 'N':  case 'O':  case 'P':  case ',':
1289#ifdef EXTRA_CONSTRAINT
1290	  case 'Q':  case 'R':  case 'S':  case 'T':  case 'U':
1291#endif
1292	    break;
1293
1294	  case '0':  case '1':  case '2':  case '3':  case '4':
1295	  case '5':  case '6':  case '7':  case '8':  case '9':
1296	    error ("matching constraint not valid in output operand");
1297	    break;
1298
1299	  case 'V':  case 'm':  case 'o':
1300	    allows_mem = 1;
1301	    break;
1302
1303	  case '<':  case '>':
1304          /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1305             excepting those that expand_call created.  So match memory
1306	     and hope.  */
1307	    allows_mem = 1;
1308	    break;
1309
1310	  case 'g':  case 'X':
1311	    allows_reg = 1;
1312	    allows_mem = 1;
1313	    break;
1314
1315	  case 'p': case 'r':
1316	  default:
1317	    allows_reg = 1;
1318	    break;
1319	  }
1320
1321      /* If an output operand is not a decl or indirect ref and our constraint
1322	 allows a register, make a temporary to act as an intermediate.
1323	 Make the asm insn write into that, then our caller will copy it to
1324	 the real output operand.  Likewise for promoted variables.  */
1325
1326      real_output_rtx[i] = NULL_RTX;
1327      if ((TREE_CODE (val) == INDIRECT_REF
1328	   && allows_mem)
1329	  || (TREE_CODE_CLASS (TREE_CODE (val)) == 'd'
1330	      && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1331	      && ! (GET_CODE (DECL_RTL (val)) == REG
1332		    && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1333	  || ! allows_reg
1334	  || is_inout)
1335	{
1336	  if (! allows_reg)
1337	    mark_addressable (TREE_VALUE (tail));
1338
1339	  output_rtx[i]
1340	    = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode,
1341			   EXPAND_MEMORY_USE_WO);
1342
1343	  if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1344	    error ("output number %d not directly addressable", i);
1345	  if (! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1346	    {
1347    	      real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1348	      output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1349	      if (is_inout)
1350		emit_move_insn (output_rtx[i], real_output_rtx[i]);
1351	    }
1352	}
1353      else
1354	{
1355	  output_rtx[i] = assign_temp (type, 0, 0, 1);
1356	  TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1357	}
1358
1359      if (is_inout)
1360	{
1361	  inout_mode[ninout] = TYPE_MODE (TREE_TYPE (TREE_VALUE (tail)));
1362	  inout_opnum[ninout++] = i;
1363	}
1364    }
1365
1366  ninputs += ninout;
1367  if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1368    {
1369      error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1370      return;
1371    }
1372
1373  /* Make vectors for the expression-rtx and constraint strings.  */
1374
1375  argvec = rtvec_alloc (ninputs);
1376  constraints = rtvec_alloc (ninputs);
1377
1378  body = gen_rtx_ASM_OPERANDS (VOIDmode,
1379			       TREE_STRING_POINTER (string), "", 0, argvec,
1380			       constraints, filename, line);
1381
1382  MEM_VOLATILE_P (body) = vol;
1383
1384  /* Eval the inputs and put them into ARGVEC.
1385     Put their constraints into ASM_INPUTs and store in CONSTRAINTS.  */
1386
1387  i = 0;
1388  for (tail = inputs; tail; tail = TREE_CHAIN (tail))
1389    {
1390      int j;
1391      int allows_reg = 0, allows_mem = 0;
1392      char *constraint, *orig_constraint;
1393      int c_len;
1394      rtx op;
1395
1396      /* If there's an erroneous arg, emit no insn,
1397	 because the ASM_INPUT would get VOIDmode
1398	 and that could cause a crash in reload.  */
1399      if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1400	return;
1401
1402      /* ??? Can this happen, and does the error message make any sense? */
1403      if (TREE_PURPOSE (tail) == NULL_TREE)
1404	{
1405	  error ("hard register `%s' listed as input operand to `asm'",
1406		 TREE_STRING_POINTER (TREE_VALUE (tail)) );
1407	  return;
1408	}
1409
1410      c_len = TREE_STRING_LENGTH (TREE_PURPOSE (tail)) - 1;
1411      constraint = TREE_STRING_POINTER (TREE_PURPOSE (tail));
1412      orig_constraint = constraint;
1413
1414      /* Make sure constraint has neither `=', `+', nor '&'.  */
1415
1416      for (j = 0; j < c_len; j++)
1417	switch (constraint[j])
1418	  {
1419	  case '+':  case '=':  case '&':
1420	    if (constraint == orig_constraint)
1421	      {
1422	        error ("input operand constraint contains `%c'", constraint[j]);
1423	        return;
1424	      }
1425	    break;
1426
1427	  case '%':
1428	    if (constraint == orig_constraint
1429		&& i + 1 == ninputs - ninout)
1430	      {
1431		error ("`%%' constraint used with last operand");
1432		return;
1433	      }
1434	    break;
1435
1436	  case 'V':  case 'm':  case 'o':
1437	    allows_mem = 1;
1438	    break;
1439
1440	  case '<':  case '>':
1441	  case '?':  case '!':  case '*':
1442	  case 'E':  case 'F':  case 'G':  case 'H':  case 'X':
1443	  case 's':  case 'i':  case 'n':
1444	  case 'I':  case 'J':  case 'K':  case 'L':  case 'M':
1445	  case 'N':  case 'O':  case 'P':  case ',':
1446#ifdef EXTRA_CONSTRAINT
1447	  case 'Q':  case 'R':  case 'S':  case 'T':  case 'U':
1448#endif
1449	    break;
1450
1451	    /* Whether or not a numeric constraint allows a register is
1452	       decided by the matching constraint, and so there is no need
1453	       to do anything special with them.  We must handle them in
1454	       the default case, so that we don't unnecessarily force
1455	       operands to memory.  */
1456	  case '0':  case '1':  case '2':  case '3':  case '4':
1457	  case '5':  case '6':  case '7':  case '8':  case '9':
1458	    if (constraint[j] >= '0' + noutputs)
1459	      {
1460		error
1461		  ("matching constraint references invalid operand number");
1462		return;
1463	      }
1464
1465	    /* Try and find the real constraint for this dup.  */
1466	    if ((j == 0 && c_len == 1)
1467		|| (j == 1 && c_len == 2 && constraint[0] == '%'))
1468	      {
1469		tree o = outputs;
1470		for (j = constraint[j] - '0'; j > 0; --j)
1471		  o = TREE_CHAIN (o);
1472
1473		c_len = TREE_STRING_LENGTH (TREE_PURPOSE (o)) - 1;
1474		constraint = TREE_STRING_POINTER (TREE_PURPOSE (o));
1475		j = 0;
1476		break;
1477	      }
1478
1479	    /* ... fall through ... */
1480
1481	  case 'p':  case 'r':
1482	  default:
1483	    allows_reg = 1;
1484	    break;
1485
1486	  case 'g':
1487	    allows_reg = 1;
1488	    allows_mem = 1;
1489	    break;
1490	  }
1491
1492      if (! allows_reg && allows_mem)
1493	mark_addressable (TREE_VALUE (tail));
1494
1495      op = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
1496
1497      if (asm_operand_ok (op, constraint) <= 0)
1498	{
1499	  if (allows_reg)
1500	    op = force_reg (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))), op);
1501	  else if (!allows_mem)
1502	    warning ("asm operand %d probably doesn't match constraints", i);
1503	  else if (CONSTANT_P (op))
1504	    op = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1505				  op);
1506	  else if (GET_CODE (op) == REG
1507		   || GET_CODE (op) == SUBREG
1508		   || GET_CODE (op) == CONCAT)
1509	    {
1510	      tree type = TREE_TYPE (TREE_VALUE (tail));
1511	      rtx memloc = assign_temp (type, 1, 1, 1);
1512
1513	      emit_move_insn (memloc, op);
1514	      op = memloc;
1515	    }
1516	  else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1517	    /* We won't recognize volatile memory as available a
1518	       memory_operand at this point.  Ignore it.  */
1519	    ;
1520	  else if (queued_subexp_p (op))
1521	    ;
1522	  else
1523	    /* ??? Leave this only until we have experience with what
1524	       happens in combine and elsewhere when constraints are
1525	       not satisfied.  */
1526	    warning ("asm operand %d probably doesn't match constraints", i);
1527	}
1528      XVECEXP (body, 3, i) = op;
1529
1530      XVECEXP (body, 4, i)      /* constraints */
1531	= gen_rtx_ASM_INPUT (TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
1532			     orig_constraint);
1533      i++;
1534    }
1535
1536  /* Protect all the operands from the queue,
1537     now that they have all been evaluated.  */
1538
1539  for (i = 0; i < ninputs - ninout; i++)
1540    XVECEXP (body, 3, i) = protect_from_queue (XVECEXP (body, 3, i), 0);
1541
1542  for (i = 0; i < noutputs; i++)
1543    output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1544
1545  /* For in-out operands, copy output rtx to input rtx. */
1546  for (i = 0; i < ninout; i++)
1547    {
1548      static char match[9+1][2]
1549	= {"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"};
1550      int j = inout_opnum[i];
1551
1552      XVECEXP (body, 3, ninputs - ninout + i)      /* argvec */
1553	= output_rtx[j];
1554      XVECEXP (body, 4, ninputs - ninout + i)      /* constraints */
1555	= gen_rtx_ASM_INPUT (inout_mode[j], match[j]);
1556    }
1557
1558  /* Now, for each output, construct an rtx
1559     (set OUTPUT (asm_operands INSN OUTPUTNUMBER OUTPUTCONSTRAINT
1560			       ARGVEC CONSTRAINTS))
1561     If there is more than one, put them inside a PARALLEL.  */
1562
1563  if (noutputs == 1 && nclobbers == 0)
1564    {
1565      XSTR (body, 1) = TREE_STRING_POINTER (TREE_PURPOSE (outputs));
1566      insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1567    }
1568  else if (noutputs == 0 && nclobbers == 0)
1569    {
1570      /* No output operands: put in a raw ASM_OPERANDS rtx.  */
1571      insn = emit_insn (body);
1572    }
1573  else
1574    {
1575      rtx obody = body;
1576      int num = noutputs;
1577      if (num == 0) num = 1;
1578      body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1579
1580      /* For each output operand, store a SET.  */
1581
1582      for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1583	{
1584	  XVECEXP (body, 0, i)
1585	    = gen_rtx_SET (VOIDmode,
1586			   output_rtx[i],
1587			   gen_rtx_ASM_OPERANDS (VOIDmode,
1588						 TREE_STRING_POINTER (string),
1589						 TREE_STRING_POINTER (TREE_PURPOSE (tail)),
1590						 i, argvec, constraints,
1591						 filename, line));
1592	  MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1593	}
1594
1595      /* If there are no outputs (but there are some clobbers)
1596	 store the bare ASM_OPERANDS into the PARALLEL.  */
1597
1598      if (i == 0)
1599	XVECEXP (body, 0, i++) = obody;
1600
1601      /* Store (clobber REG) for each clobbered register specified.  */
1602
1603      for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1604	{
1605	  char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1606	  int j = decode_reg_name (regname);
1607
1608	  if (j < 0)
1609	    {
1610	      if (j == -3)	/* `cc', which is not a register */
1611		continue;
1612
1613	      if (j == -4)	/* `memory', don't cache memory across asm */
1614		{
1615		  XVECEXP (body, 0, i++)
1616		    = gen_rtx_CLOBBER (VOIDmode,
1617				       gen_rtx_MEM (BLKmode,
1618						    gen_rtx_SCRATCH (VOIDmode)));
1619		  continue;
1620		}
1621
1622	      /* Ignore unknown register, error already signaled.  */
1623	      continue;
1624	    }
1625
1626	  /* Use QImode since that's guaranteed to clobber just one reg.  */
1627	  XVECEXP (body, 0, i++)
1628	    = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1629	}
1630
1631      insn = emit_insn (body);
1632    }
1633
1634  /* For any outputs that needed reloading into registers, spill them
1635     back to where they belong.  */
1636  for (i = 0; i < noutputs; ++i)
1637    if (real_output_rtx[i])
1638      emit_move_insn (real_output_rtx[i], output_rtx[i]);
1639
1640  free_temp_slots ();
1641}
1642
1643/* Generate RTL to evaluate the expression EXP
1644   and remember it in case this is the VALUE in a ({... VALUE; }) constr.  */
1645
1646void
1647expand_expr_stmt (exp)
1648     tree exp;
1649{
1650  /* If -W, warn about statements with no side effects,
1651     except for an explicit cast to void (e.g. for assert()), and
1652     except inside a ({...}) where they may be useful.  */
1653  if (expr_stmts_for_value == 0 && exp != error_mark_node)
1654    {
1655      if (! TREE_SIDE_EFFECTS (exp) && (extra_warnings || warn_unused)
1656	  && !(TREE_CODE (exp) == CONVERT_EXPR
1657	       && TREE_TYPE (exp) == void_type_node))
1658	warning_with_file_and_line (emit_filename, emit_lineno,
1659				    "statement with no effect");
1660      else if (warn_unused)
1661	warn_if_unused_value (exp);
1662    }
1663
1664  /* If EXP is of function type and we are expanding statements for
1665     value, convert it to pointer-to-function.  */
1666  if (expr_stmts_for_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
1667    exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
1668
1669  last_expr_type = TREE_TYPE (exp);
1670  last_expr_value = expand_expr (exp,
1671				 (expr_stmts_for_value
1672				  ? NULL_RTX : const0_rtx),
1673				 VOIDmode, 0);
1674
1675  /* If all we do is reference a volatile value in memory,
1676     copy it to a register to be sure it is actually touched.  */
1677  if (last_expr_value != 0 && GET_CODE (last_expr_value) == MEM
1678      && TREE_THIS_VOLATILE (exp))
1679    {
1680      if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode)
1681	;
1682      else if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
1683	copy_to_reg (last_expr_value);
1684      else
1685	{
1686	  rtx lab = gen_label_rtx ();
1687
1688	  /* Compare the value with itself to reference it.  */
1689	  emit_cmp_and_jump_insns (last_expr_value, last_expr_value, EQ,
1690				   expand_expr (TYPE_SIZE (last_expr_type),
1691						NULL_RTX, VOIDmode, 0),
1692				   BLKmode, 0,
1693				   TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT,
1694				   lab);
1695	  emit_label (lab);
1696	}
1697    }
1698
1699  /* If this expression is part of a ({...}) and is in memory, we may have
1700     to preserve temporaries.  */
1701  preserve_temp_slots (last_expr_value);
1702
1703  /* Free any temporaries used to evaluate this expression.  Any temporary
1704     used as a result of this expression will already have been preserved
1705     above.  */
1706  free_temp_slots ();
1707
1708  emit_queue ();
1709}
1710
1711/* Warn if EXP contains any computations whose results are not used.
1712   Return 1 if a warning is printed; 0 otherwise.  */
1713
1714int
1715warn_if_unused_value (exp)
1716     tree exp;
1717{
1718  if (TREE_USED (exp))
1719    return 0;
1720
1721  switch (TREE_CODE (exp))
1722    {
1723    case PREINCREMENT_EXPR:
1724    case POSTINCREMENT_EXPR:
1725    case PREDECREMENT_EXPR:
1726    case POSTDECREMENT_EXPR:
1727    case MODIFY_EXPR:
1728    case INIT_EXPR:
1729    case TARGET_EXPR:
1730    case CALL_EXPR:
1731    case METHOD_CALL_EXPR:
1732    case RTL_EXPR:
1733    case TRY_CATCH_EXPR:
1734    case WITH_CLEANUP_EXPR:
1735    case EXIT_EXPR:
1736      /* We don't warn about COND_EXPR because it may be a useful
1737	 construct if either arm contains a side effect.  */
1738    case COND_EXPR:
1739      return 0;
1740
1741    case BIND_EXPR:
1742      /* For a binding, warn if no side effect within it.  */
1743      return warn_if_unused_value (TREE_OPERAND (exp, 1));
1744
1745    case SAVE_EXPR:
1746      return warn_if_unused_value (TREE_OPERAND (exp, 1));
1747
1748    case TRUTH_ORIF_EXPR:
1749    case TRUTH_ANDIF_EXPR:
1750      /* In && or ||, warn if 2nd operand has no side effect.  */
1751      return warn_if_unused_value (TREE_OPERAND (exp, 1));
1752
1753    case COMPOUND_EXPR:
1754      if (TREE_NO_UNUSED_WARNING (exp))
1755	return 0;
1756      if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
1757	return 1;
1758      /* Let people do `(foo (), 0)' without a warning.  */
1759      if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
1760	return 0;
1761      return warn_if_unused_value (TREE_OPERAND (exp, 1));
1762
1763    case NOP_EXPR:
1764    case CONVERT_EXPR:
1765    case NON_LVALUE_EXPR:
1766      /* Don't warn about values cast to void.  */
1767      if (TREE_TYPE (exp) == void_type_node)
1768	return 0;
1769      /* Don't warn about conversions not explicit in the user's program.  */
1770      if (TREE_NO_UNUSED_WARNING (exp))
1771	return 0;
1772      /* Assignment to a cast usually results in a cast of a modify.
1773	 Don't complain about that.  There can be an arbitrary number of
1774	 casts before the modify, so we must loop until we find the first
1775	 non-cast expression and then test to see if that is a modify.  */
1776      {
1777	tree tem = TREE_OPERAND (exp, 0);
1778
1779	while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
1780	  tem = TREE_OPERAND (tem, 0);
1781
1782	if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
1783	    || TREE_CODE (tem) == CALL_EXPR)
1784	  return 0;
1785      }
1786      goto warn;
1787
1788    case INDIRECT_REF:
1789      /* Don't warn about automatic dereferencing of references, since
1790	 the user cannot control it.  */
1791      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
1792	return warn_if_unused_value (TREE_OPERAND (exp, 0));
1793      /* ... fall through ...  */
1794
1795    default:
1796      /* Referencing a volatile value is a side effect, so don't warn.  */
1797      if ((TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
1798	   || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
1799	  && TREE_THIS_VOLATILE (exp))
1800	return 0;
1801    warn:
1802      warning_with_file_and_line (emit_filename, emit_lineno,
1803				  "value computed is not used");
1804      return 1;
1805    }
1806}
1807
1808/* Clear out the memory of the last expression evaluated.  */
1809
1810void
1811clear_last_expr ()
1812{
1813  last_expr_type = 0;
1814}
1815
1816/* Begin a statement which will return a value.
1817   Return the RTL_EXPR for this statement expr.
1818   The caller must save that value and pass it to expand_end_stmt_expr.  */
1819
1820tree
1821expand_start_stmt_expr ()
1822{
1823  int momentary;
1824  tree t;
1825
1826  /* Make the RTL_EXPR node temporary, not momentary,
1827     so that rtl_expr_chain doesn't become garbage.  */
1828  momentary = suspend_momentary ();
1829  t = make_node (RTL_EXPR);
1830  resume_momentary (momentary);
1831  do_pending_stack_adjust ();
1832  start_sequence_for_rtl_expr (t);
1833  NO_DEFER_POP;
1834  expr_stmts_for_value++;
1835  return t;
1836}
1837
1838/* Restore the previous state at the end of a statement that returns a value.
1839   Returns a tree node representing the statement's value and the
1840   insns to compute the value.
1841
1842   The nodes of that expression have been freed by now, so we cannot use them.
1843   But we don't want to do that anyway; the expression has already been
1844   evaluated and now we just want to use the value.  So generate a RTL_EXPR
1845   with the proper type and RTL value.
1846
1847   If the last substatement was not an expression,
1848   return something with type `void'.  */
1849
1850tree
1851expand_end_stmt_expr (t)
1852     tree t;
1853{
1854  OK_DEFER_POP;
1855
1856  if (last_expr_type == 0)
1857    {
1858      last_expr_type = void_type_node;
1859      last_expr_value = const0_rtx;
1860    }
1861  else if (last_expr_value == 0)
1862    /* There are some cases where this can happen, such as when the
1863       statement is void type.  */
1864    last_expr_value = const0_rtx;
1865  else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
1866    /* Remove any possible QUEUED.  */
1867    last_expr_value = protect_from_queue (last_expr_value, 0);
1868
1869  emit_queue ();
1870
1871  TREE_TYPE (t) = last_expr_type;
1872  RTL_EXPR_RTL (t) = last_expr_value;
1873  RTL_EXPR_SEQUENCE (t) = get_insns ();
1874
1875  rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
1876
1877  end_sequence ();
1878
1879  /* Don't consider deleting this expr or containing exprs at tree level.  */
1880  TREE_SIDE_EFFECTS (t) = 1;
1881  /* Propagate volatility of the actual RTL expr.  */
1882  TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
1883
1884  last_expr_type = 0;
1885  expr_stmts_for_value--;
1886
1887  return t;
1888}
1889
1890/* Generate RTL for the start of an if-then.  COND is the expression
1891   whose truth should be tested.
1892
1893   If EXITFLAG is nonzero, this conditional is visible to
1894   `exit_something'.  */
1895
1896void
1897expand_start_cond (cond, exitflag)
1898     tree cond;
1899     int exitflag;
1900{
1901  struct nesting *thiscond = ALLOC_NESTING ();
1902
1903  /* Make an entry on cond_stack for the cond we are entering.  */
1904
1905  thiscond->next = cond_stack;
1906  thiscond->all = nesting_stack;
1907  thiscond->depth = ++nesting_depth;
1908  thiscond->data.cond.next_label = gen_label_rtx ();
1909  /* Before we encounter an `else', we don't need a separate exit label
1910     unless there are supposed to be exit statements
1911     to exit this conditional.  */
1912  thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
1913  thiscond->data.cond.endif_label = thiscond->exit_label;
1914  cond_stack = thiscond;
1915  nesting_stack = thiscond;
1916
1917  do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
1918}
1919
1920/* Generate RTL between then-clause and the elseif-clause
1921   of an if-then-elseif-....  */
1922
1923void
1924expand_start_elseif (cond)
1925     tree cond;
1926{
1927  if (cond_stack->data.cond.endif_label == 0)
1928    cond_stack->data.cond.endif_label = gen_label_rtx ();
1929  emit_jump (cond_stack->data.cond.endif_label);
1930  emit_label (cond_stack->data.cond.next_label);
1931  cond_stack->data.cond.next_label = gen_label_rtx ();
1932  do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1933}
1934
1935/* Generate RTL between the then-clause and the else-clause
1936   of an if-then-else.  */
1937
1938void
1939expand_start_else ()
1940{
1941  if (cond_stack->data.cond.endif_label == 0)
1942    cond_stack->data.cond.endif_label = gen_label_rtx ();
1943
1944  emit_jump (cond_stack->data.cond.endif_label);
1945  emit_label (cond_stack->data.cond.next_label);
1946  cond_stack->data.cond.next_label = 0;  /* No more _else or _elseif calls.  */
1947}
1948
1949/* After calling expand_start_else, turn this "else" into an "else if"
1950   by providing another condition.  */
1951
1952void
1953expand_elseif (cond)
1954     tree cond;
1955{
1956  cond_stack->data.cond.next_label = gen_label_rtx ();
1957  do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
1958}
1959
1960/* Generate RTL for the end of an if-then.
1961   Pop the record for it off of cond_stack.  */
1962
1963void
1964expand_end_cond ()
1965{
1966  struct nesting *thiscond = cond_stack;
1967
1968  do_pending_stack_adjust ();
1969  if (thiscond->data.cond.next_label)
1970    emit_label (thiscond->data.cond.next_label);
1971  if (thiscond->data.cond.endif_label)
1972    emit_label (thiscond->data.cond.endif_label);
1973
1974  POPSTACK (cond_stack);
1975  last_expr_type = 0;
1976}
1977
1978
1979
1980/* Generate RTL for the start of a loop.  EXIT_FLAG is nonzero if this
1981   loop should be exited by `exit_something'.  This is a loop for which
1982   `expand_continue' will jump to the top of the loop.
1983
1984   Make an entry on loop_stack to record the labels associated with
1985   this loop.  */
1986
1987struct nesting *
1988expand_start_loop (exit_flag)
1989     int exit_flag;
1990{
1991  register struct nesting *thisloop = ALLOC_NESTING ();
1992
1993  /* Make an entry on loop_stack for the loop we are entering.  */
1994
1995  thisloop->next = loop_stack;
1996  thisloop->all = nesting_stack;
1997  thisloop->depth = ++nesting_depth;
1998  thisloop->data.loop.start_label = gen_label_rtx ();
1999  thisloop->data.loop.end_label = gen_label_rtx ();
2000  thisloop->data.loop.alt_end_label = 0;
2001  thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2002  thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2003  loop_stack = thisloop;
2004  nesting_stack = thisloop;
2005
2006  do_pending_stack_adjust ();
2007  emit_queue ();
2008  emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
2009  emit_label (thisloop->data.loop.start_label);
2010
2011  return thisloop;
2012}
2013
2014/* Like expand_start_loop but for a loop where the continuation point
2015   (for expand_continue_loop) will be specified explicitly.  */
2016
2017struct nesting *
2018expand_start_loop_continue_elsewhere (exit_flag)
2019     int exit_flag;
2020{
2021  struct nesting *thisloop = expand_start_loop (exit_flag);
2022  loop_stack->data.loop.continue_label = gen_label_rtx ();
2023  return thisloop;
2024}
2025
2026/* Specify the continuation point for a loop started with
2027   expand_start_loop_continue_elsewhere.
2028   Use this at the point in the code to which a continue statement
2029   should jump.  */
2030
2031void
2032expand_loop_continue_here ()
2033{
2034  do_pending_stack_adjust ();
2035  emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
2036  emit_label (loop_stack->data.loop.continue_label);
2037}
2038
2039/* Finish a loop.  Generate a jump back to the top and the loop-exit label.
2040   Pop the block off of loop_stack.  */
2041
2042void
2043expand_end_loop ()
2044{
2045  rtx start_label = loop_stack->data.loop.start_label;
2046  rtx insn = get_last_insn ();
2047  int needs_end_jump = 1;
2048
2049  /* Mark the continue-point at the top of the loop if none elsewhere.  */
2050  if (start_label == loop_stack->data.loop.continue_label)
2051    emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2052
2053  do_pending_stack_adjust ();
2054
2055  /* If optimizing, perhaps reorder the loop.
2056     First, try to use a condjump near the end.
2057     expand_exit_loop_if_false ends loops with unconditional jumps,
2058     like this:
2059
2060     if (test) goto label;
2061     optional: cleanup
2062     goto loop_stack->data.loop.end_label
2063     barrier
2064     label:
2065
2066     If we find such a pattern, we can end the loop earlier.  */
2067
2068  if (optimize
2069      && GET_CODE (insn) == CODE_LABEL
2070      && LABEL_NAME (insn) == NULL
2071      && GET_CODE (PREV_INSN (insn)) == BARRIER)
2072    {
2073      rtx label = insn;
2074      rtx jump = PREV_INSN (PREV_INSN (label));
2075
2076      if (GET_CODE (jump) == JUMP_INSN
2077	  && GET_CODE (PATTERN (jump)) == SET
2078	  && SET_DEST (PATTERN (jump)) == pc_rtx
2079	  && GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
2080	  && (XEXP (SET_SRC (PATTERN (jump)), 0)
2081	      == loop_stack->data.loop.end_label))
2082	{
2083	  rtx prev;
2084
2085	  /* The test might be complex and reference LABEL multiple times,
2086	     like the loop in loop_iterations to set vtop.  To handle this,
2087	     we move LABEL.  */
2088	  insn = PREV_INSN (label);
2089	  reorder_insns (label, label, start_label);
2090
2091	  for (prev = PREV_INSN (jump); ; prev = PREV_INSN (prev))
2092	   {
2093	      /* We ignore line number notes, but if we see any other note,
2094		 in particular NOTE_INSN_BLOCK_*, NOTE_INSN_EH_REGION_*,
2095		 NOTE_INSN_LOOP_*, we disable this optimization.  */
2096	      if (GET_CODE (prev) == NOTE)
2097		{
2098		  if (NOTE_LINE_NUMBER (prev) < 0)
2099		    break;
2100		  continue;
2101		}
2102	      if (GET_CODE (prev) == CODE_LABEL)
2103		break;
2104	      if (GET_CODE (prev) == JUMP_INSN)
2105		{
2106		  if (GET_CODE (PATTERN (prev)) == SET
2107		      && SET_DEST (PATTERN (prev)) == pc_rtx
2108		      && GET_CODE (SET_SRC (PATTERN (prev))) == IF_THEN_ELSE
2109		      && (GET_CODE (XEXP (SET_SRC (PATTERN (prev)), 1))
2110			  == LABEL_REF)
2111		      && XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0) == label)
2112		    {
2113		      XEXP (XEXP (SET_SRC (PATTERN (prev)), 1), 0)
2114			= start_label;
2115		      emit_note_after (NOTE_INSN_LOOP_END, prev);
2116		      needs_end_jump = 0;
2117		    }
2118		  break;
2119		}
2120	   }
2121	}
2122    }
2123
2124     /* If the loop starts with a loop exit, roll that to the end where
2125     it will optimize together with the jump back.
2126
2127     We look for the conditional branch to the exit, except that once
2128     we find such a branch, we don't look past 30 instructions.
2129
2130     In more detail, if the loop presently looks like this (in pseudo-C):
2131
2132         start_label:
2133         if (test) goto end_label;
2134	 body;
2135	 goto start_label;
2136	 end_label:
2137
2138     transform it to look like:
2139
2140         goto start_label;
2141         newstart_label:
2142	 body;
2143	 start_label:
2144	 if (test) goto end_label;
2145	 goto newstart_label;
2146	 end_label:
2147
2148     Here, the `test' may actually consist of some reasonably complex
2149     code, terminating in a test.  */
2150
2151  if (optimize
2152      && needs_end_jump
2153      &&
2154      ! (GET_CODE (insn) == JUMP_INSN
2155	 && GET_CODE (PATTERN (insn)) == SET
2156	 && SET_DEST (PATTERN (insn)) == pc_rtx
2157	 && GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE))
2158    {
2159      int eh_regions = 0;
2160      int num_insns = 0;
2161      rtx last_test_insn = NULL_RTX;
2162
2163      /* Scan insns from the top of the loop looking for a qualified
2164	 conditional exit.  */
2165      for (insn = NEXT_INSN (loop_stack->data.loop.start_label); insn;
2166	   insn = NEXT_INSN (insn))
2167	{
2168	  if (GET_CODE (insn) == NOTE)
2169	    {
2170	      if (optimize < 2
2171		  && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2172		      || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2173		/* The code that actually moves the exit test will
2174		   carefully leave BLOCK notes in their original
2175		   location.  That means, however, that we can't debug
2176		   the exit test itself.  So, we refuse to move code
2177		   containing BLOCK notes at low optimization levels.  */
2178		break;
2179
2180	      if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_BEG)
2181		++eh_regions;
2182	      else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EH_REGION_END)
2183		{
2184		  --eh_regions;
2185		  if (eh_regions < 0)
2186		    /* We've come to the end of an EH region, but
2187		       never saw the beginning of that region.  That
2188		       means that an EH region begins before the top
2189		       of the loop, and ends in the middle of it.  The
2190		       existence of such a situation violates a basic
2191		       assumption in this code, since that would imply
2192		       that even when EH_REGIONS is zero, we might
2193		       move code out of an exception region.  */
2194		    abort ();
2195		}
2196
2197	      /* We must not walk into a nested loop.  */
2198	      if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
2199		break;
2200
2201	      /* We already know this INSN is a NOTE, so there's no
2202		 point in looking at it to see if it's a JUMP.  */
2203	      continue;
2204	    }
2205
2206	  if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == INSN)
2207	    num_insns++;
2208
2209	  if (last_test_insn && num_insns > 30)
2210	    break;
2211
2212	  if (eh_regions > 0)
2213	    /* We don't want to move a partial EH region.  Consider:
2214
2215		  while ( ( { try {
2216				if (cond ()) 0;
2217				else {
2218				  bar();
2219				  1;
2220				}
2221			      } catch (...) {
2222				1;
2223			      } )) {
2224		     body;
2225		  }
2226
2227	        This isn't legal C++, but here's what it's supposed to
2228	        mean: if cond() is true, stop looping.  Otherwise,
2229	        call bar, and keep looping.  In addition, if cond
2230	        throws an exception, catch it and keep looping. Such
2231	        constructs are certainy legal in LISP.
2232
2233		We should not move the `if (cond()) 0' test since then
2234		the EH-region for the try-block would be broken up.
2235		(In this case we would the EH_BEG note for the `try'
2236		and `if cond()' but not the call to bar() or the
2237		EH_END note.)
2238
2239	        So we don't look for tests within an EH region.  */
2240	    continue;
2241
2242	  if (GET_CODE (insn) == JUMP_INSN
2243	      && GET_CODE (PATTERN (insn)) == SET
2244	      && SET_DEST (PATTERN (insn)) == pc_rtx)
2245	    {
2246	      /* This is indeed a jump.  */
2247	      rtx dest1 = NULL_RTX;
2248	      rtx dest2 = NULL_RTX;
2249	      rtx potential_last_test;
2250	      if (GET_CODE (SET_SRC (PATTERN (insn))) == IF_THEN_ELSE)
2251		{
2252		  /* A conditional jump.  */
2253		  dest1 = XEXP (SET_SRC (PATTERN (insn)), 1);
2254		  dest2 = XEXP (SET_SRC (PATTERN (insn)), 2);
2255		  potential_last_test = insn;
2256		}
2257	      else
2258		{
2259		  /* An unconditional jump.  */
2260		  dest1 = SET_SRC (PATTERN (insn));
2261		  /* Include the BARRIER after the JUMP.  */
2262		  potential_last_test = NEXT_INSN (insn);
2263		}
2264
2265	      do {
2266		if (dest1 && GET_CODE (dest1) == LABEL_REF
2267		    && ((XEXP (dest1, 0)
2268			 == loop_stack->data.loop.alt_end_label)
2269			|| (XEXP (dest1, 0)
2270			    == loop_stack->data.loop.end_label)))
2271		  {
2272		    last_test_insn = potential_last_test;
2273		    break;
2274		  }
2275
2276		/* If this was a conditional jump, there may be
2277		   another label at which we should look.  */
2278		dest1 = dest2;
2279		dest2 = NULL_RTX;
2280	      } while (dest1);
2281	    }
2282	}
2283
2284      if (last_test_insn != 0 && last_test_insn != get_last_insn ())
2285	{
2286	  /* We found one.  Move everything from there up
2287	     to the end of the loop, and add a jump into the loop
2288	     to jump to there.  */
2289	  register rtx newstart_label = gen_label_rtx ();
2290	  register rtx start_move = start_label;
2291	  rtx next_insn;
2292
2293	  /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2294	     then we want to move this note also.  */
2295	  if (GET_CODE (PREV_INSN (start_move)) == NOTE
2296	      && (NOTE_LINE_NUMBER (PREV_INSN (start_move))
2297		  == NOTE_INSN_LOOP_CONT))
2298	    start_move = PREV_INSN (start_move);
2299
2300	  emit_label_after (newstart_label, PREV_INSN (start_move));
2301
2302	  /* Actually move the insns.  Start at the beginning, and
2303	     keep copying insns until we've copied the
2304	     last_test_insn.  */
2305	  for (insn = start_move; insn; insn = next_insn)
2306	    {
2307	      /* Figure out which insn comes after this one.  We have
2308		 to do this before we move INSN.  */
2309	      if (insn == last_test_insn)
2310		/* We've moved all the insns.  */
2311		next_insn = NULL_RTX;
2312	      else
2313		next_insn = NEXT_INSN (insn);
2314
2315	      if (GET_CODE (insn) == NOTE
2316		  && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2317		      || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2318		/* We don't want to move NOTE_INSN_BLOCK_BEGs or
2319		   NOTE_INSN_BLOCK_ENDs because the correct generation
2320		   of debugging information depends on these appearing
2321		   in the same order in the RTL and in the tree
2322		   structure, where they are represented as BLOCKs.
2323		   So, we don't move block notes.  Of course, moving
2324		   the code inside the block is likely to make it
2325		   impossible to debug the instructions in the exit
2326		   test, but such is the price of optimization.  */
2327		continue;
2328
2329	      /* Move the INSN.  */
2330	      reorder_insns (insn, insn, get_last_insn ());
2331	    }
2332
2333	  emit_jump_insn_after (gen_jump (start_label),
2334				PREV_INSN (newstart_label));
2335	  emit_barrier_after (PREV_INSN (newstart_label));
2336	  start_label = newstart_label;
2337	}
2338    }
2339
2340  if (needs_end_jump)
2341    {
2342      emit_jump (start_label);
2343      emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
2344    }
2345  emit_label (loop_stack->data.loop.end_label);
2346
2347  POPSTACK (loop_stack);
2348
2349  last_expr_type = 0;
2350}
2351
2352/* Generate a jump to the current loop's continue-point.
2353   This is usually the top of the loop, but may be specified
2354   explicitly elsewhere.  If not currently inside a loop,
2355   return 0 and do nothing; caller will print an error message.  */
2356
2357int
2358expand_continue_loop (whichloop)
2359     struct nesting *whichloop;
2360{
2361  last_expr_type = 0;
2362  if (whichloop == 0)
2363    whichloop = loop_stack;
2364  if (whichloop == 0)
2365    return 0;
2366  expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2367			NULL_RTX);
2368  return 1;
2369}
2370
2371/* Generate a jump to exit the current loop.  If not currently inside a loop,
2372   return 0 and do nothing; caller will print an error message.  */
2373
2374int
2375expand_exit_loop (whichloop)
2376     struct nesting *whichloop;
2377{
2378  last_expr_type = 0;
2379  if (whichloop == 0)
2380    whichloop = loop_stack;
2381  if (whichloop == 0)
2382    return 0;
2383  expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2384  return 1;
2385}
2386
2387/* Generate a conditional jump to exit the current loop if COND
2388   evaluates to zero.  If not currently inside a loop,
2389   return 0 and do nothing; caller will print an error message.  */
2390
2391int
2392expand_exit_loop_if_false (whichloop, cond)
2393     struct nesting *whichloop;
2394     tree cond;
2395{
2396  rtx label = gen_label_rtx ();
2397  rtx last_insn;
2398  last_expr_type = 0;
2399
2400  if (whichloop == 0)
2401    whichloop = loop_stack;
2402  if (whichloop == 0)
2403    return 0;
2404  /* In order to handle fixups, we actually create a conditional jump
2405     around a unconditional branch to exit the loop.  If fixups are
2406     necessary, they go before the unconditional branch.  */
2407
2408
2409  do_jump (cond, NULL_RTX, label);
2410  last_insn = get_last_insn ();
2411  if (GET_CODE (last_insn) == CODE_LABEL)
2412    whichloop->data.loop.alt_end_label = last_insn;
2413  expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2414			NULL_RTX);
2415  emit_label (label);
2416
2417  return 1;
2418}
2419
2420/* Return nonzero if the loop nest is empty.  Else return zero.  */
2421
2422int
2423stmt_loop_nest_empty ()
2424{
2425  return (loop_stack == NULL);
2426}
2427
2428/* Return non-zero if we should preserve sub-expressions as separate
2429   pseudos.  We never do so if we aren't optimizing.  We always do so
2430   if -fexpensive-optimizations.
2431
2432   Otherwise, we only do so if we are in the "early" part of a loop.  I.e.,
2433   the loop may still be a small one.  */
2434
2435int
2436preserve_subexpressions_p ()
2437{
2438  rtx insn;
2439
2440  if (flag_expensive_optimizations)
2441    return 1;
2442
2443  if (optimize == 0 || loop_stack == 0)
2444    return 0;
2445
2446  insn = get_last_insn_anywhere ();
2447
2448  return (insn
2449	  && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2450	      < n_non_fixed_regs * 3));
2451
2452}
2453
2454/* Generate a jump to exit the current loop, conditional, binding contour
2455   or case statement.  Not all such constructs are visible to this function,
2456   only those started with EXIT_FLAG nonzero.  Individual languages use
2457   the EXIT_FLAG parameter to control which kinds of constructs you can
2458   exit this way.
2459
2460   If not currently inside anything that can be exited,
2461   return 0 and do nothing; caller will print an error message.  */
2462
2463int
2464expand_exit_something ()
2465{
2466  struct nesting *n;
2467  last_expr_type = 0;
2468  for (n = nesting_stack; n; n = n->all)
2469    if (n->exit_label != 0)
2470      {
2471	expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2472	return 1;
2473      }
2474
2475  return 0;
2476}
2477
2478/* Generate RTL to return from the current function, with no value.
2479   (That is, we do not do anything about returning any value.)  */
2480
2481void
2482expand_null_return ()
2483{
2484  struct nesting *block = block_stack;
2485  rtx last_insn = 0;
2486
2487  /* Does any pending block have cleanups?  */
2488
2489  while (block && block->data.block.cleanups == 0)
2490    block = block->next;
2491
2492  /* If yes, use a goto to return, since that runs cleanups.  */
2493
2494  expand_null_return_1 (last_insn, block != 0);
2495}
2496
2497/* Generate RTL to return from the current function, with value VAL.  */
2498
2499static void
2500expand_value_return (val)
2501     rtx val;
2502{
2503  struct nesting *block = block_stack;
2504  rtx last_insn = get_last_insn ();
2505  rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2506
2507  /* Copy the value to the return location
2508     unless it's already there.  */
2509
2510  if (return_reg != val)
2511    {
2512#ifdef PROMOTE_FUNCTION_RETURN
2513      tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2514      int unsignedp = TREE_UNSIGNED (type);
2515      enum machine_mode old_mode
2516	= DECL_MODE (DECL_RESULT (current_function_decl));
2517      enum machine_mode mode
2518	= promote_mode (type, old_mode, &unsignedp, 1);
2519
2520      if (mode != old_mode)
2521	val = convert_modes (mode, old_mode, val, unsignedp);
2522#endif
2523      emit_move_insn (return_reg, val);
2524    }
2525  if (GET_CODE (return_reg) == REG
2526      && REGNO (return_reg) < FIRST_PSEUDO_REGISTER)
2527    emit_insn (gen_rtx_USE (VOIDmode, return_reg));
2528  /* Handle calls that return values in multiple non-contiguous locations.
2529     The Irix 6 ABI has examples of this.  */
2530  else if (GET_CODE (return_reg) == PARALLEL)
2531    {
2532      int i;
2533
2534      for (i = 0; i < XVECLEN (return_reg, 0); i++)
2535	{
2536	  rtx x = XEXP (XVECEXP (return_reg, 0, i), 0);
2537
2538	  if (GET_CODE (x) == REG
2539	      && REGNO (x) < FIRST_PSEUDO_REGISTER)
2540	    emit_insn (gen_rtx_USE (VOIDmode, x));
2541	}
2542    }
2543
2544  /* Does any pending block have cleanups?  */
2545
2546  while (block && block->data.block.cleanups == 0)
2547    block = block->next;
2548
2549  /* If yes, use a goto to return, since that runs cleanups.
2550     Use LAST_INSN to put cleanups *before* the move insn emitted above.  */
2551
2552  expand_null_return_1 (last_insn, block != 0);
2553}
2554
2555/* Output a return with no value.  If LAST_INSN is nonzero,
2556   pretend that the return takes place after LAST_INSN.
2557   If USE_GOTO is nonzero then don't use a return instruction;
2558   go to the return label instead.  This causes any cleanups
2559   of pending blocks to be executed normally.  */
2560
2561static void
2562expand_null_return_1 (last_insn, use_goto)
2563     rtx last_insn;
2564     int use_goto;
2565{
2566  rtx end_label = cleanup_label ? cleanup_label : return_label;
2567
2568  clear_pending_stack_adjust ();
2569  do_pending_stack_adjust ();
2570  last_expr_type = 0;
2571
2572  /* PCC-struct return always uses an epilogue.  */
2573  if (current_function_returns_pcc_struct || use_goto)
2574    {
2575      if (end_label == 0)
2576	end_label = return_label = gen_label_rtx ();
2577      expand_goto_internal (NULL_TREE, end_label, last_insn);
2578      return;
2579    }
2580
2581  /* Otherwise output a simple return-insn if one is available,
2582     unless it won't do the job.  */
2583#ifdef HAVE_return
2584  if (HAVE_return && use_goto == 0 && cleanup_label == 0)
2585    {
2586      emit_jump_insn (gen_return ());
2587      emit_barrier ();
2588      return;
2589    }
2590#endif
2591
2592  /* Otherwise jump to the epilogue.  */
2593  expand_goto_internal (NULL_TREE, end_label, last_insn);
2594}
2595
2596/* Generate RTL to evaluate the expression RETVAL and return it
2597   from the current function.  */
2598
2599void
2600expand_return (retval)
2601     tree retval;
2602{
2603  /* If there are any cleanups to be performed, then they will
2604     be inserted following LAST_INSN.  It is desirable
2605     that the last_insn, for such purposes, should be the
2606     last insn before computing the return value.  Otherwise, cleanups
2607     which call functions can clobber the return value.  */
2608  /* ??? rms: I think that is erroneous, because in C++ it would
2609     run destructors on variables that might be used in the subsequent
2610     computation of the return value.  */
2611  rtx last_insn = 0;
2612  register rtx val = 0;
2613  register rtx op0;
2614  tree retval_rhs;
2615  int cleanups;
2616
2617  /* If function wants no value, give it none.  */
2618  if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2619    {
2620      expand_expr (retval, NULL_RTX, VOIDmode, 0);
2621      emit_queue ();
2622      expand_null_return ();
2623      return;
2624    }
2625
2626  /* Are any cleanups needed?  E.g. C++ destructors to be run?  */
2627  /* This is not sufficient.  We also need to watch for cleanups of the
2628     expression we are about to expand.  Unfortunately, we cannot know
2629     if it has cleanups until we expand it, and we want to change how we
2630     expand it depending upon if we need cleanups.  We can't win.  */
2631#if 0
2632  cleanups = any_pending_cleanups (1);
2633#else
2634  cleanups = 1;
2635#endif
2636
2637  if (TREE_CODE (retval) == RESULT_DECL)
2638    retval_rhs = retval;
2639  else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2640	   && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2641    retval_rhs = TREE_OPERAND (retval, 1);
2642  else if (TREE_TYPE (retval) == void_type_node)
2643    /* Recognize tail-recursive call to void function.  */
2644    retval_rhs = retval;
2645  else
2646    retval_rhs = NULL_TREE;
2647
2648  /* Only use `last_insn' if there are cleanups which must be run.  */
2649  if (cleanups || cleanup_label != 0)
2650    last_insn = get_last_insn ();
2651
2652  /* Distribute return down conditional expr if either of the sides
2653     may involve tail recursion (see test below).  This enhances the number
2654     of tail recursions we see.  Don't do this always since it can produce
2655     sub-optimal code in some cases and we distribute assignments into
2656     conditional expressions when it would help.  */
2657
2658  if (optimize && retval_rhs != 0
2659      && frame_offset == 0
2660      && TREE_CODE (retval_rhs) == COND_EXPR
2661      && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
2662	  || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
2663    {
2664      rtx label = gen_label_rtx ();
2665      tree expr;
2666
2667      do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
2668      start_cleanup_deferral ();
2669      expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2670		    DECL_RESULT (current_function_decl),
2671		    TREE_OPERAND (retval_rhs, 1));
2672      TREE_SIDE_EFFECTS (expr) = 1;
2673      expand_return (expr);
2674      emit_label (label);
2675
2676      expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
2677		    DECL_RESULT (current_function_decl),
2678		    TREE_OPERAND (retval_rhs, 2));
2679      TREE_SIDE_EFFECTS (expr) = 1;
2680      expand_return (expr);
2681      end_cleanup_deferral ();
2682      return;
2683    }
2684
2685  /* Attempt to optimize the call if it is tail recursive.  */
2686  if (optimize_tail_recursion (retval_rhs, last_insn))
2687    return;
2688
2689#ifdef HAVE_return
2690  /* This optimization is safe if there are local cleanups
2691     because expand_null_return takes care of them.
2692     ??? I think it should also be safe when there is a cleanup label,
2693     because expand_null_return takes care of them, too.
2694     Any reason why not?  */
2695  if (HAVE_return && cleanup_label == 0
2696      && ! current_function_returns_pcc_struct
2697      && BRANCH_COST <= 1)
2698    {
2699      /* If this is  return x == y;  then generate
2700	 if (x == y) return 1; else return 0;
2701	 if we can do it with explicit return insns and branches are cheap,
2702	 but not if we have the corresponding scc insn.  */
2703      int has_scc = 0;
2704      if (retval_rhs)
2705	switch (TREE_CODE (retval_rhs))
2706	  {
2707	  case EQ_EXPR:
2708#ifdef HAVE_seq
2709	    has_scc = HAVE_seq;
2710#endif
2711	  case NE_EXPR:
2712#ifdef HAVE_sne
2713	    has_scc = HAVE_sne;
2714#endif
2715	  case GT_EXPR:
2716#ifdef HAVE_sgt
2717	    has_scc = HAVE_sgt;
2718#endif
2719	  case GE_EXPR:
2720#ifdef HAVE_sge
2721	    has_scc = HAVE_sge;
2722#endif
2723	  case LT_EXPR:
2724#ifdef HAVE_slt
2725	    has_scc = HAVE_slt;
2726#endif
2727	  case LE_EXPR:
2728#ifdef HAVE_sle
2729	    has_scc = HAVE_sle;
2730#endif
2731	  case TRUTH_ANDIF_EXPR:
2732	  case TRUTH_ORIF_EXPR:
2733	  case TRUTH_AND_EXPR:
2734	  case TRUTH_OR_EXPR:
2735	  case TRUTH_NOT_EXPR:
2736	  case TRUTH_XOR_EXPR:
2737	    if (! has_scc)
2738	      {
2739		op0 = gen_label_rtx ();
2740		jumpifnot (retval_rhs, op0);
2741		expand_value_return (const1_rtx);
2742		emit_label (op0);
2743		expand_value_return (const0_rtx);
2744		return;
2745	      }
2746	    break;
2747
2748	  default:
2749	    break;
2750	  }
2751    }
2752#endif /* HAVE_return */
2753
2754  /* If the result is an aggregate that is being returned in one (or more)
2755     registers, load the registers here.  The compiler currently can't handle
2756     copying a BLKmode value into registers.  We could put this code in a
2757     more general area (for use by everyone instead of just function
2758     call/return), but until this feature is generally usable it is kept here
2759     (and in expand_call).  The value must go into a pseudo in case there
2760     are cleanups that will clobber the real return register.  */
2761
2762  if (retval_rhs != 0
2763      && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
2764      && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2765    {
2766      int i, bitpos, xbitpos;
2767      int big_endian_correction = 0;
2768      int bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2769      int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2770      int bitsize = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)),
2771			 (unsigned int)BITS_PER_WORD);
2772      rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
2773      rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
2774      rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
2775      enum machine_mode tmpmode, result_reg_mode;
2776
2777      /* Structures whose size is not a multiple of a word are aligned
2778	 to the least significant byte (to the right).  On a BYTES_BIG_ENDIAN
2779	 machine, this means we must skip the empty high order bytes when
2780	 calculating the bit offset.  */
2781      if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2782	big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2783						  * BITS_PER_UNIT));
2784
2785      /* Copy the structure BITSIZE bits at a time.  */
2786      for (bitpos = 0, xbitpos = big_endian_correction;
2787	   bitpos < bytes * BITS_PER_UNIT;
2788	   bitpos += bitsize, xbitpos += bitsize)
2789	{
2790	  /* We need a new destination pseudo each time xbitpos is
2791	     on a word boundary and when xbitpos == big_endian_correction
2792	     (the first time through).  */
2793	  if (xbitpos % BITS_PER_WORD == 0
2794	      || xbitpos == big_endian_correction)
2795	    {
2796	      /* Generate an appropriate register.  */
2797	      dst = gen_reg_rtx (word_mode);
2798	      result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2799
2800	      /* Clobber the destination before we move anything into it.  */
2801	      emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
2802	    }
2803
2804	  /* We need a new source operand each time bitpos is on a word
2805	     boundary.  */
2806	  if (bitpos % BITS_PER_WORD == 0)
2807	    src = operand_subword_force (result_val,
2808					 bitpos / BITS_PER_WORD,
2809					 BLKmode);
2810
2811	  /* Use bitpos for the source extraction (left justified) and
2812	     xbitpos for the destination store (right justified).  */
2813	  store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2814			   extract_bit_field (src, bitsize,
2815					      bitpos % BITS_PER_WORD, 1,
2816					      NULL_RTX, word_mode,
2817					      word_mode,
2818					      bitsize / BITS_PER_UNIT,
2819					      BITS_PER_WORD),
2820			   bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2821	}
2822
2823      /* Find the smallest integer mode large enough to hold the
2824	 entire structure and use that mode instead of BLKmode
2825	 on the USE insn for the return register.   */
2826      bytes = int_size_in_bytes (TREE_TYPE (retval_rhs));
2827      for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2828	   tmpmode != MAX_MACHINE_MODE;
2829	   tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2830	{
2831	  /* Have we found a large enough mode?  */
2832	  if (GET_MODE_SIZE (tmpmode) >= bytes)
2833	    break;
2834	}
2835
2836      /* No suitable mode found.  */
2837      if (tmpmode == MAX_MACHINE_MODE)
2838	abort ();
2839
2840      PUT_MODE (DECL_RTL (DECL_RESULT (current_function_decl)), tmpmode);
2841
2842      if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2843	result_reg_mode = word_mode;
2844      else
2845	result_reg_mode = tmpmode;
2846      result_reg = gen_reg_rtx (result_reg_mode);
2847
2848      emit_queue ();
2849      for (i = 0; i < n_regs; i++)
2850	emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
2851			result_pseudos[i]);
2852
2853      if (tmpmode != result_reg_mode)
2854	result_reg = gen_lowpart (tmpmode, result_reg);
2855
2856      expand_value_return (result_reg);
2857    }
2858  else if (cleanups
2859      && retval_rhs != 0
2860      && TREE_TYPE (retval_rhs) != void_type_node
2861      && GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
2862    {
2863      /* Calculate the return value into a pseudo reg.  */
2864      val = gen_reg_rtx (DECL_MODE (DECL_RESULT (current_function_decl)));
2865      val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2866      val = force_not_mem (val);
2867      emit_queue ();
2868      /* Return the calculated value, doing cleanups first.  */
2869      expand_value_return (val);
2870    }
2871  else
2872    {
2873      /* No cleanups or no hard reg used;
2874	 calculate value into hard return reg.  */
2875      expand_expr (retval, const0_rtx, VOIDmode, 0);
2876      emit_queue ();
2877      expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
2878    }
2879}
2880
2881/* Return 1 if the end of the generated RTX is not a barrier.
2882   This means code already compiled can drop through.  */
2883
2884int
2885drop_through_at_end_p ()
2886{
2887  rtx insn = get_last_insn ();
2888  while (insn && GET_CODE (insn) == NOTE)
2889    insn = PREV_INSN (insn);
2890  return insn && GET_CODE (insn) != BARRIER;
2891}
2892
2893/* Test CALL_EXPR to determine if it is a potential tail recursion call
2894   and emit code to optimize the tail recursion.  LAST_INSN indicates where
2895   to place the jump to the tail recursion label.  Return TRUE if the
2896   call was optimized into a goto.
2897
2898   This is only used by expand_return, but expand_call is expected to
2899   use it soon.  */
2900
2901int
2902optimize_tail_recursion (call_expr, last_insn)
2903     tree call_expr;
2904     rtx last_insn;
2905{
2906  /* For tail-recursive call to current function,
2907     just jump back to the beginning.
2908     It's unsafe if any auto variable in this function
2909     has its address taken; for simplicity,
2910     require stack frame to be empty.  */
2911  if (optimize && call_expr != 0
2912      && frame_offset == 0
2913      && TREE_CODE (call_expr) == CALL_EXPR
2914      && TREE_CODE (TREE_OPERAND (call_expr, 0)) == ADDR_EXPR
2915      && TREE_OPERAND (TREE_OPERAND (call_expr, 0), 0) == current_function_decl
2916      /* Finish checking validity, and if valid emit code
2917	 to set the argument variables for the new call.  */
2918      && tail_recursion_args (TREE_OPERAND (call_expr, 1),
2919			      DECL_ARGUMENTS (current_function_decl)))
2920    {
2921      if (tail_recursion_label == 0)
2922	{
2923	  tail_recursion_label = gen_label_rtx ();
2924	  emit_label_after (tail_recursion_label,
2925			    tail_recursion_reentry);
2926	}
2927      emit_queue ();
2928      expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
2929      emit_barrier ();
2930      return 1;
2931    }
2932
2933  return 0;
2934}
2935
2936/* Emit code to alter this function's formal parms for a tail-recursive call.
2937   ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
2938   FORMALS is the chain of decls of formals.
2939   Return 1 if this can be done;
2940   otherwise return 0 and do not emit any code.  */
2941
2942static int
2943tail_recursion_args (actuals, formals)
2944     tree actuals, formals;
2945{
2946  register tree a = actuals, f = formals;
2947  register int i;
2948  register rtx *argvec;
2949
2950  /* Check that number and types of actuals are compatible
2951     with the formals.  This is not always true in valid C code.
2952     Also check that no formal needs to be addressable
2953     and that all formals are scalars.  */
2954
2955  /* Also count the args.  */
2956
2957  for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
2958    {
2959      if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
2960	  != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
2961	return 0;
2962      if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
2963	return 0;
2964    }
2965  if (a != 0 || f != 0)
2966    return 0;
2967
2968  /* Compute all the actuals.  */
2969
2970  argvec = (rtx *) alloca (i * sizeof (rtx));
2971
2972  for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2973    argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
2974
2975  /* Find which actual values refer to current values of previous formals.
2976     Copy each of them now, before any formal is changed.  */
2977
2978  for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
2979    {
2980      int copy = 0;
2981      register int j;
2982      for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
2983	if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
2984	  { copy = 1; break; }
2985      if (copy)
2986	argvec[i] = copy_to_reg (argvec[i]);
2987    }
2988
2989  /* Store the values of the actuals into the formals.  */
2990
2991  for (f = formals, a = actuals, i = 0; f;
2992       f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
2993    {
2994      if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
2995	emit_move_insn (DECL_RTL (f), argvec[i]);
2996      else
2997	convert_move (DECL_RTL (f), argvec[i],
2998		      TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
2999    }
3000
3001  free_temp_slots ();
3002  return 1;
3003}
3004
3005/* Generate the RTL code for entering a binding contour.
3006   The variables are declared one by one, by calls to `expand_decl'.
3007
3008   EXIT_FLAG is nonzero if this construct should be visible to
3009   `exit_something'.  */
3010
3011void
3012expand_start_bindings (exit_flag)
3013     int exit_flag;
3014{
3015  struct nesting *thisblock = ALLOC_NESTING ();
3016  rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
3017
3018  /* Make an entry on block_stack for the block we are entering.  */
3019
3020  thisblock->next = block_stack;
3021  thisblock->all = nesting_stack;
3022  thisblock->depth = ++nesting_depth;
3023  thisblock->data.block.stack_level = 0;
3024  thisblock->data.block.cleanups = 0;
3025  thisblock->data.block.function_call_count = 0;
3026  thisblock->data.block.exception_region = 0;
3027  thisblock->data.block.target_temp_slot_level = target_temp_slot_level;
3028
3029  thisblock->data.block.conditional_code = 0;
3030  thisblock->data.block.last_unconditional_cleanup = note;
3031  thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3032
3033  if (block_stack
3034      && !(block_stack->data.block.cleanups == NULL_TREE
3035	   && block_stack->data.block.outer_cleanups == NULL_TREE))
3036    thisblock->data.block.outer_cleanups
3037      = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3038		   block_stack->data.block.outer_cleanups);
3039  else
3040    thisblock->data.block.outer_cleanups = 0;
3041  thisblock->data.block.label_chain = 0;
3042  thisblock->data.block.innermost_stack_block = stack_block_stack;
3043  thisblock->data.block.first_insn = note;
3044  thisblock->data.block.block_start_count = ++block_start_count;
3045  thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3046  block_stack = thisblock;
3047  nesting_stack = thisblock;
3048
3049  /* Make a new level for allocating stack slots.  */
3050  push_temp_slots ();
3051}
3052
3053/* Specify the scope of temporaries created by TARGET_EXPRs.  Similar
3054   to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3055   expand_expr are made.  After we end the region, we know that all
3056   space for all temporaries that were created by TARGET_EXPRs will be
3057   destroyed and their space freed for reuse.  */
3058
3059void
3060expand_start_target_temps ()
3061{
3062  /* This is so that even if the result is preserved, the space
3063     allocated will be freed, as we know that it is no longer in use.  */
3064  push_temp_slots ();
3065
3066  /* Start a new binding layer that will keep track of all cleanup
3067     actions to be performed.  */
3068  expand_start_bindings (0);
3069
3070  target_temp_slot_level = temp_slot_level;
3071}
3072
3073void
3074expand_end_target_temps ()
3075{
3076  expand_end_bindings (NULL_TREE, 0, 0);
3077
3078  /* This is so that even if the result is preserved, the space
3079     allocated will be freed, as we know that it is no longer in use.  */
3080  pop_temp_slots ();
3081}
3082
3083/* Mark top block of block_stack as an implicit binding for an
3084   exception region.  This is used to prevent infinite recursion when
3085   ending a binding with expand_end_bindings.  It is only ever called
3086   by expand_eh_region_start, as that it the only way to create a
3087   block stack for a exception region.  */
3088
3089void
3090mark_block_as_eh_region ()
3091{
3092  block_stack->data.block.exception_region = 1;
3093  if (block_stack->next
3094      && block_stack->next->data.block.conditional_code)
3095    {
3096      block_stack->data.block.conditional_code
3097	= block_stack->next->data.block.conditional_code;
3098      block_stack->data.block.last_unconditional_cleanup
3099	= block_stack->next->data.block.last_unconditional_cleanup;
3100      block_stack->data.block.cleanup_ptr
3101	= block_stack->next->data.block.cleanup_ptr;
3102    }
3103}
3104
3105/* True if we are currently emitting insns in an area of output code
3106   that is controlled by a conditional expression.  This is used by
3107   the cleanup handling code to generate conditional cleanup actions.  */
3108
3109int
3110conditional_context ()
3111{
3112  return block_stack && block_stack->data.block.conditional_code;
3113}
3114
3115/* Mark top block of block_stack as not for an implicit binding for an
3116   exception region.  This is only ever done by expand_eh_region_end
3117   to let expand_end_bindings know that it is being called explicitly
3118   to end the binding layer for just the binding layer associated with
3119   the exception region, otherwise expand_end_bindings would try and
3120   end all implicit binding layers for exceptions regions, and then
3121   one normal binding layer.  */
3122
3123void
3124mark_block_as_not_eh_region ()
3125{
3126  block_stack->data.block.exception_region = 0;
3127}
3128
3129/* True if the top block of block_stack was marked as for an exception
3130   region by mark_block_as_eh_region.  */
3131
3132int
3133is_eh_region ()
3134{
3135  return block_stack && block_stack->data.block.exception_region;
3136}
3137
3138/* Given a pointer to a BLOCK node, save a pointer to the most recently
3139   generated NOTE_INSN_BLOCK_END in the BLOCK_END_NOTE field of the given
3140   BLOCK node.  */
3141
3142void
3143remember_end_note (block)
3144     register tree block;
3145{
3146  BLOCK_END_NOTE (block) = last_block_end_note;
3147  last_block_end_note = NULL_RTX;
3148}
3149
3150/* Emit a handler label for a nonlocal goto handler.
3151   Also emit code to store the handler label in SLOT before BEFORE_INSN.  */
3152
3153static rtx
3154expand_nl_handler_label (slot, before_insn)
3155     rtx slot, before_insn;
3156{
3157  rtx insns;
3158  rtx handler_label = gen_label_rtx ();
3159
3160  /* Don't let jump_optimize delete the handler.  */
3161  LABEL_PRESERVE_P (handler_label) = 1;
3162
3163  start_sequence ();
3164  emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3165  insns = get_insns ();
3166  end_sequence ();
3167  emit_insns_before (insns, before_insn);
3168
3169  emit_label (handler_label);
3170
3171  return handler_label;
3172}
3173
3174/* Emit code to restore vital registers at the beginning of a nonlocal goto
3175   handler.  */
3176static void
3177expand_nl_goto_receiver ()
3178{
3179#ifdef HAVE_nonlocal_goto
3180  if (! HAVE_nonlocal_goto)
3181#endif
3182    /* First adjust our frame pointer to its actual value.  It was
3183       previously set to the start of the virtual area corresponding to
3184       the stacked variables when we branched here and now needs to be
3185       adjusted to the actual hardware fp value.
3186
3187       Assignments are to virtual registers are converted by
3188       instantiate_virtual_regs into the corresponding assignment
3189       to the underlying register (fp in this case) that makes
3190       the original assignment true.
3191       So the following insn will actually be
3192       decrementing fp by STARTING_FRAME_OFFSET.  */
3193    emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3194
3195#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3196  if (fixed_regs[ARG_POINTER_REGNUM])
3197    {
3198#ifdef ELIMINABLE_REGS
3199      /* If the argument pointer can be eliminated in favor of the
3200	 frame pointer, we don't need to restore it.  We assume here
3201	 that if such an elimination is present, it can always be used.
3202	 This is the case on all known machines; if we don't make this
3203	 assumption, we do unnecessary saving on many machines.  */
3204      static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
3205      size_t i;
3206
3207      for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
3208	if (elim_regs[i].from == ARG_POINTER_REGNUM
3209	    && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3210	  break;
3211
3212      if (i == sizeof elim_regs / sizeof elim_regs [0])
3213#endif
3214	{
3215	  /* Now restore our arg pointer from the address at which it
3216	     was saved in our stack frame.
3217	     If there hasn't be space allocated for it yet, make
3218	     some now.  */
3219	  if (arg_pointer_save_area == 0)
3220	    arg_pointer_save_area
3221	      = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
3222	  emit_move_insn (virtual_incoming_args_rtx,
3223			  /* We need a pseudo here, or else
3224			     instantiate_virtual_regs_1 complains.  */
3225			  copy_to_reg (arg_pointer_save_area));
3226	}
3227    }
3228#endif
3229
3230#ifdef HAVE_nonlocal_goto_receiver
3231  if (HAVE_nonlocal_goto_receiver)
3232    emit_insn (gen_nonlocal_goto_receiver ());
3233#endif
3234}
3235
3236/* Make handlers for nonlocal gotos taking place in the function calls in
3237   block THISBLOCK.  */
3238
3239static void
3240expand_nl_goto_receivers (thisblock)
3241     struct nesting *thisblock;
3242{
3243  tree link;
3244  rtx afterward = gen_label_rtx ();
3245  rtx insns, slot;
3246  rtx label_list;
3247  int any_invalid;
3248
3249  /* Record the handler address in the stack slot for that purpose,
3250     during this block, saving and restoring the outer value.  */
3251  if (thisblock->next != 0)
3252    for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3253      {
3254	rtx save_receiver = gen_reg_rtx (Pmode);
3255	emit_move_insn (XEXP (slot, 0), save_receiver);
3256
3257	start_sequence ();
3258	emit_move_insn (save_receiver, XEXP (slot, 0));
3259	insns = get_insns ();
3260	end_sequence ();
3261	emit_insns_before (insns, thisblock->data.block.first_insn);
3262      }
3263
3264  /* Jump around the handlers; they run only when specially invoked.  */
3265  emit_jump (afterward);
3266
3267  /* Make a separate handler for each label.  */
3268  link = nonlocal_labels;
3269  slot = nonlocal_goto_handler_slots;
3270  label_list = NULL_RTX;
3271  for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3272    /* Skip any labels we shouldn't be able to jump to from here,
3273       we generate one special handler for all of them below which just calls
3274       abort.  */
3275    if (! DECL_TOO_LATE (TREE_VALUE (link)))
3276      {
3277	rtx lab;
3278	lab = expand_nl_handler_label (XEXP (slot, 0),
3279				       thisblock->data.block.first_insn);
3280	label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3281
3282	expand_nl_goto_receiver ();
3283
3284	/* Jump to the "real" nonlocal label.  */
3285	expand_goto (TREE_VALUE (link));
3286      }
3287
3288  /* A second pass over all nonlocal labels; this time we handle those
3289     we should not be able to jump to at this point.  */
3290  link = nonlocal_labels;
3291  slot = nonlocal_goto_handler_slots;
3292  any_invalid = 0;
3293  for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3294    if (DECL_TOO_LATE (TREE_VALUE (link)))
3295      {
3296	rtx lab;
3297	lab = expand_nl_handler_label (XEXP (slot, 0),
3298				       thisblock->data.block.first_insn);
3299	label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3300	any_invalid = 1;
3301      }
3302
3303  if (any_invalid)
3304    {
3305      expand_nl_goto_receiver ();
3306      emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "abort"), 0,
3307			 VOIDmode, 0);
3308      emit_barrier ();
3309    }
3310
3311  nonlocal_goto_handler_labels = label_list;
3312  emit_label (afterward);
3313}
3314
3315/* Generate RTL code to terminate a binding contour.
3316
3317   VARS is the chain of VAR_DECL nodes for the variables bound in this
3318   contour.  There may actually be other nodes in this chain, but any
3319   nodes other than VAR_DECLS are ignored.
3320
3321   MARK_ENDS is nonzero if we should put a note at the beginning
3322   and end of this binding contour.
3323
3324   DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3325   (That is true automatically if the contour has a saved stack level.)  */
3326
3327void
3328expand_end_bindings (vars, mark_ends, dont_jump_in)
3329     tree vars;
3330     int mark_ends;
3331     int dont_jump_in;
3332{
3333  register struct nesting *thisblock;
3334  register tree decl;
3335
3336  while (block_stack->data.block.exception_region)
3337    {
3338      /* Because we don't need or want a new temporary level and
3339	 because we didn't create one in expand_eh_region_start,
3340	 create a fake one now to avoid removing one in
3341	 expand_end_bindings.  */
3342      push_temp_slots ();
3343
3344      block_stack->data.block.exception_region = 0;
3345
3346      expand_end_bindings (NULL_TREE, 0, 0);
3347    }
3348
3349  /* Since expand_eh_region_start does an expand_start_bindings, we
3350     have to first end all the bindings that were created by
3351     expand_eh_region_start.  */
3352
3353  thisblock = block_stack;
3354
3355  if (warn_unused)
3356    for (decl = vars; decl; decl = TREE_CHAIN (decl))
3357      if (TREE_CODE (decl) == VAR_DECL
3358	  && ! TREE_USED (decl)
3359	  && ! DECL_IN_SYSTEM_HEADER (decl)
3360	  && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3361	warning_with_decl (decl, "unused variable `%s'");
3362
3363  if (thisblock->exit_label)
3364    {
3365      do_pending_stack_adjust ();
3366      emit_label (thisblock->exit_label);
3367    }
3368
3369  /* If necessary, make handlers for nonlocal gotos taking
3370     place in the function calls in this block.  */
3371  if (function_call_count != thisblock->data.block.function_call_count
3372      && nonlocal_labels
3373      /* Make handler for outermost block
3374	 if there were any nonlocal gotos to this function.  */
3375      && (thisblock->next == 0 ? current_function_has_nonlocal_label
3376	  /* Make handler for inner block if it has something
3377	     special to do when you jump out of it.  */
3378	  : (thisblock->data.block.cleanups != 0
3379	     || thisblock->data.block.stack_level != 0)))
3380    expand_nl_goto_receivers (thisblock);
3381
3382  /* Don't allow jumping into a block that has a stack level.
3383     Cleanups are allowed, though.  */
3384  if (dont_jump_in
3385      || thisblock->data.block.stack_level != 0)
3386    {
3387      struct label_chain *chain;
3388
3389      /* Any labels in this block are no longer valid to go to.
3390	 Mark them to cause an error message.  */
3391      for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3392	{
3393	  DECL_TOO_LATE (chain->label) = 1;
3394	  /* If any goto without a fixup came to this label,
3395	     that must be an error, because gotos without fixups
3396	     come from outside all saved stack-levels.  */
3397	  if (TREE_ADDRESSABLE (chain->label))
3398	    error_with_decl (chain->label,
3399			     "label `%s' used before containing binding contour");
3400	}
3401    }
3402
3403  /* Restore stack level in effect before the block
3404     (only if variable-size objects allocated).  */
3405  /* Perform any cleanups associated with the block.  */
3406
3407  if (thisblock->data.block.stack_level != 0
3408      || thisblock->data.block.cleanups != 0)
3409    {
3410      /* Only clean up here if this point can actually be reached.  */
3411      int reachable = GET_CODE (get_last_insn ()) != BARRIER;
3412
3413      /* Don't let cleanups affect ({...}) constructs.  */
3414      int old_expr_stmts_for_value = expr_stmts_for_value;
3415      rtx old_last_expr_value = last_expr_value;
3416      tree old_last_expr_type = last_expr_type;
3417      expr_stmts_for_value = 0;
3418
3419      /* Do the cleanups.  */
3420      expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3421      if (reachable)
3422	do_pending_stack_adjust ();
3423
3424      expr_stmts_for_value = old_expr_stmts_for_value;
3425      last_expr_value = old_last_expr_value;
3426      last_expr_type = old_last_expr_type;
3427
3428      /* Restore the stack level.  */
3429
3430      if (reachable && thisblock->data.block.stack_level != 0)
3431	{
3432	  emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3433			      thisblock->data.block.stack_level, NULL_RTX);
3434	  if (nonlocal_goto_handler_slots != 0)
3435	    emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3436			     NULL_RTX);
3437	}
3438
3439      /* Any gotos out of this block must also do these things.
3440	 Also report any gotos with fixups that came to labels in this
3441	 level.  */
3442      fixup_gotos (thisblock,
3443		   thisblock->data.block.stack_level,
3444		   thisblock->data.block.cleanups,
3445		   thisblock->data.block.first_insn,
3446		   dont_jump_in);
3447    }
3448
3449  /* Mark the beginning and end of the scope if requested.
3450     We do this now, after running cleanups on the variables
3451     just going out of scope, so they are in scope for their cleanups.  */
3452
3453  if (mark_ends)
3454    last_block_end_note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
3455  else
3456    /* Get rid of the beginning-mark if we don't make an end-mark.  */
3457    NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3458
3459  /* If doing stupid register allocation, make sure lives of all
3460     register variables declared here extend thru end of scope.  */
3461
3462  if (obey_regdecls)
3463    for (decl = vars; decl; decl = TREE_CHAIN (decl))
3464      if (TREE_CODE (decl) == VAR_DECL && DECL_RTL (decl))
3465	use_variable (DECL_RTL (decl));
3466
3467  /* Restore the temporary level of TARGET_EXPRs.  */
3468  target_temp_slot_level = thisblock->data.block.target_temp_slot_level;
3469
3470  /* Restore block_stack level for containing block.  */
3471
3472  stack_block_stack = thisblock->data.block.innermost_stack_block;
3473  POPSTACK (block_stack);
3474
3475  /* Pop the stack slot nesting and free any slots at this level.  */
3476  pop_temp_slots ();
3477}
3478
3479/* Generate RTL for the automatic variable declaration DECL.
3480   (Other kinds of declarations are simply ignored if seen here.)  */
3481
3482void
3483expand_decl (decl)
3484     register tree decl;
3485{
3486  struct nesting *thisblock = block_stack;
3487  tree type;
3488
3489  type = TREE_TYPE (decl);
3490
3491  /* Only automatic variables need any expansion done.
3492     Static and external variables, and external functions,
3493     will be handled by `assemble_variable' (called from finish_decl).
3494     TYPE_DECL and CONST_DECL require nothing.
3495     PARM_DECLs are handled in `assign_parms'.  */
3496
3497  if (TREE_CODE (decl) != VAR_DECL)
3498    return;
3499  if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3500    return;
3501
3502  /* Create the RTL representation for the variable.  */
3503
3504  if (type == error_mark_node)
3505    DECL_RTL (decl) = gen_rtx_MEM (BLKmode, const0_rtx);
3506  else if (DECL_SIZE (decl) == 0)
3507    /* Variable with incomplete type.  */
3508    {
3509      if (DECL_INITIAL (decl) == 0)
3510	/* Error message was already done; now avoid a crash.  */
3511	DECL_RTL (decl) = assign_stack_temp (DECL_MODE (decl), 0, 1);
3512      else
3513	/* An initializer is going to decide the size of this array.
3514	   Until we know the size, represent its address with a reg.  */
3515	DECL_RTL (decl) = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3516      MEM_SET_IN_STRUCT_P (DECL_RTL (decl), AGGREGATE_TYPE_P (type));
3517    }
3518  else if (DECL_MODE (decl) != BLKmode
3519	   /* If -ffloat-store, don't put explicit float vars
3520	      into regs.  */
3521	   && !(flag_float_store
3522		&& TREE_CODE (type) == REAL_TYPE)
3523	   && ! TREE_THIS_VOLATILE (decl)
3524	   && ! TREE_ADDRESSABLE (decl)
3525	   && (DECL_REGISTER (decl) || ! obey_regdecls)
3526	   /* if -fcheck-memory-usage, check all variables.  */
3527	   && ! current_function_check_memory_usage)
3528    {
3529      /* Automatic variable that can go in a register.  */
3530      int unsignedp = TREE_UNSIGNED (type);
3531      enum machine_mode reg_mode
3532	= promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3533
3534      DECL_RTL (decl) = gen_reg_rtx (reg_mode);
3535      mark_user_reg (DECL_RTL (decl));
3536
3537      if (POINTER_TYPE_P (type))
3538	mark_reg_pointer (DECL_RTL (decl),
3539			  (TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl)))
3540			   / BITS_PER_UNIT));
3541    }
3542
3543  else if (TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
3544	   && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3545		 && (TREE_INT_CST_HIGH (DECL_SIZE (decl)) != 0
3546		     || (TREE_INT_CST_LOW (DECL_SIZE (decl))
3547			 > STACK_CHECK_MAX_VAR_SIZE * BITS_PER_UNIT))))
3548    {
3549      /* Variable of fixed size that goes on the stack.  */
3550      rtx oldaddr = 0;
3551      rtx addr;
3552
3553      /* If we previously made RTL for this decl, it must be an array
3554	 whose size was determined by the initializer.
3555	 The old address was a register; set that register now
3556	 to the proper address.  */
3557      if (DECL_RTL (decl) != 0)
3558	{
3559	  if (GET_CODE (DECL_RTL (decl)) != MEM
3560	      || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3561	    abort ();
3562	  oldaddr = XEXP (DECL_RTL (decl), 0);
3563	}
3564
3565      DECL_RTL (decl) = assign_temp (TREE_TYPE (decl), 1, 1, 1);
3566      MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3567			   AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3568
3569      /* Set alignment we actually gave this decl.  */
3570      DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3571			   : GET_MODE_BITSIZE (DECL_MODE (decl)));
3572
3573      if (oldaddr)
3574	{
3575	  addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3576	  if (addr != oldaddr)
3577	    emit_move_insn (oldaddr, addr);
3578	}
3579
3580      /* If this is a memory ref that contains aggregate components,
3581	 mark it as such for cse and loop optimize.  */
3582      MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3583			   AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3584#if 0
3585      /* If this is in memory because of -ffloat-store,
3586	 set the volatile bit, to prevent optimizations from
3587	 undoing the effects.  */
3588      if (flag_float_store && TREE_CODE (type) == REAL_TYPE)
3589	MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3590#endif
3591
3592      MEM_ALIAS_SET (DECL_RTL (decl)) = get_alias_set (decl);
3593    }
3594  else
3595    /* Dynamic-size object: must push space on the stack.  */
3596    {
3597      rtx address, size;
3598
3599      /* Record the stack pointer on entry to block, if have
3600	 not already done so.  */
3601      if (thisblock->data.block.stack_level == 0)
3602	{
3603	  do_pending_stack_adjust ();
3604	  emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3605			   &thisblock->data.block.stack_level,
3606			   thisblock->data.block.first_insn);
3607	  stack_block_stack = thisblock;
3608	}
3609
3610      /* Compute the variable's size, in bytes.  */
3611      size = expand_expr (size_binop (CEIL_DIV_EXPR,
3612				      DECL_SIZE (decl),
3613				      size_int (BITS_PER_UNIT)),
3614			  NULL_RTX, VOIDmode, 0);
3615      free_temp_slots ();
3616
3617      /* Allocate space on the stack for the variable.  Note that
3618	 DECL_ALIGN says how the variable is to be aligned and we
3619	 cannot use it to conclude anything about the alignment of
3620	 the size.  */
3621      address = allocate_dynamic_stack_space (size, NULL_RTX,
3622					      TYPE_ALIGN (TREE_TYPE (decl)));
3623
3624      /* Reference the variable indirect through that rtx.  */
3625      DECL_RTL (decl) = gen_rtx_MEM (DECL_MODE (decl), address);
3626
3627      /* If this is a memory ref that contains aggregate components,
3628	 mark it as such for cse and loop optimize.  */
3629      MEM_SET_IN_STRUCT_P (DECL_RTL (decl),
3630			   AGGREGATE_TYPE_P (TREE_TYPE (decl)));
3631
3632      /* Indicate the alignment we actually gave this variable.  */
3633#ifdef STACK_BOUNDARY
3634      DECL_ALIGN (decl) = STACK_BOUNDARY;
3635#else
3636      DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3637#endif
3638    }
3639
3640  if (TREE_THIS_VOLATILE (decl))
3641    MEM_VOLATILE_P (DECL_RTL (decl)) = 1;
3642#if 0 /* A variable is not necessarily unchanging
3643	 just because it is const.  RTX_UNCHANGING_P
3644	 means no change in the function,
3645	 not merely no change in the variable's scope.
3646	 It is correct to set RTX_UNCHANGING_P if the variable's scope
3647	 is the whole function.  There's no convenient way to test that.  */
3648  if (TREE_READONLY (decl))
3649    RTX_UNCHANGING_P (DECL_RTL (decl)) = 1;
3650#endif
3651
3652  /* If doing stupid register allocation, make sure life of any
3653     register variable starts here, at the start of its scope.  */
3654
3655  if (obey_regdecls)
3656    use_variable (DECL_RTL (decl));
3657}
3658
3659
3660
3661/* Emit code to perform the initialization of a declaration DECL.  */
3662
3663void
3664expand_decl_init (decl)
3665     tree decl;
3666{
3667  int was_used = TREE_USED (decl);
3668
3669  /* If this is a CONST_DECL, we don't have to generate any code, but
3670     if DECL_INITIAL is a constant, call expand_expr to force TREE_CST_RTL
3671     to be set while in the obstack containing the constant.  If we don't
3672     do this, we can lose if we have functions nested three deep and the middle
3673     function makes a CONST_DECL whose DECL_INITIAL is a STRING_CST while
3674     the innermost function is the first to expand that STRING_CST.  */
3675  if (TREE_CODE (decl) == CONST_DECL)
3676    {
3677      if (DECL_INITIAL (decl) && TREE_CONSTANT (DECL_INITIAL (decl)))
3678	expand_expr (DECL_INITIAL (decl), NULL_RTX, VOIDmode,
3679		     EXPAND_INITIALIZER);
3680      return;
3681    }
3682
3683  if (TREE_STATIC (decl))
3684    return;
3685
3686  /* Compute and store the initial value now.  */
3687
3688  if (DECL_INITIAL (decl) == error_mark_node)
3689    {
3690      enum tree_code code = TREE_CODE (TREE_TYPE (decl));
3691
3692      if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
3693	  || code == POINTER_TYPE || code == REFERENCE_TYPE)
3694	expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
3695			   0, 0);
3696      emit_queue ();
3697    }
3698  else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3699    {
3700      emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
3701      expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
3702      emit_queue ();
3703    }
3704
3705  /* Don't let the initialization count as "using" the variable.  */
3706  TREE_USED (decl) = was_used;
3707
3708  /* Free any temporaries we made while initializing the decl.  */
3709  preserve_temp_slots (NULL_RTX);
3710  free_temp_slots ();
3711}
3712
3713/* CLEANUP is an expression to be executed at exit from this binding contour;
3714   for example, in C++, it might call the destructor for this variable.
3715
3716   We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3717   CLEANUP multiple times, and have the correct semantics.  This
3718   happens in exception handling, for gotos, returns, breaks that
3719   leave the current scope.
3720
3721   If CLEANUP is nonzero and DECL is zero, we record a cleanup
3722   that is not associated with any particular variable.   */
3723
3724int
3725expand_decl_cleanup (decl, cleanup)
3726     tree decl, cleanup;
3727{
3728  struct nesting *thisblock = block_stack;
3729
3730  /* Error if we are not in any block.  */
3731  if (thisblock == 0)
3732    return 0;
3733
3734  /* Record the cleanup if there is one.  */
3735
3736  if (cleanup != 0)
3737    {
3738      tree t;
3739      rtx seq;
3740      tree *cleanups = &thisblock->data.block.cleanups;
3741      int cond_context = conditional_context ();
3742
3743      if (cond_context)
3744	{
3745	  rtx flag = gen_reg_rtx (word_mode);
3746	  rtx set_flag_0;
3747	  tree cond;
3748
3749	  start_sequence ();
3750	  emit_move_insn (flag, const0_rtx);
3751	  set_flag_0 = get_insns ();
3752	  end_sequence ();
3753
3754	  thisblock->data.block.last_unconditional_cleanup
3755	    = emit_insns_after (set_flag_0,
3756				thisblock->data.block.last_unconditional_cleanup);
3757
3758	  emit_move_insn (flag, const1_rtx);
3759
3760	  /* All cleanups must be on the function_obstack.  */
3761	  push_obstacks_nochange ();
3762	  resume_temporary_allocation ();
3763
3764	  cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
3765	  DECL_RTL (cond) = flag;
3766
3767	  /* Conditionalize the cleanup.  */
3768	  cleanup = build (COND_EXPR, void_type_node,
3769			   truthvalue_conversion (cond),
3770			   cleanup, integer_zero_node);
3771	  cleanup = fold (cleanup);
3772
3773	  pop_obstacks ();
3774
3775	  cleanups = thisblock->data.block.cleanup_ptr;
3776	}
3777
3778      /* All cleanups must be on the function_obstack.  */
3779      push_obstacks_nochange ();
3780      resume_temporary_allocation ();
3781      cleanup = unsave_expr (cleanup);
3782      pop_obstacks ();
3783
3784      t = *cleanups = temp_tree_cons (decl, cleanup, *cleanups);
3785
3786      if (! cond_context)
3787	/* If this block has a cleanup, it belongs in stack_block_stack.  */
3788	stack_block_stack = thisblock;
3789
3790      if (cond_context)
3791	{
3792	  start_sequence ();
3793	}
3794
3795      /* If this was optimized so that there is no exception region for the
3796	 cleanup, then mark the TREE_LIST node, so that we can later tell
3797	 if we need to call expand_eh_region_end.  */
3798      if (! using_eh_for_cleanups_p
3799	  || expand_eh_region_start_tree (decl, cleanup))
3800	TREE_ADDRESSABLE (t) = 1;
3801      /* If that started a new EH region, we're in a new block.  */
3802      thisblock = block_stack;
3803
3804      if (cond_context)
3805	{
3806	  seq = get_insns ();
3807	  end_sequence ();
3808	  if (seq)
3809	    thisblock->data.block.last_unconditional_cleanup
3810	      = emit_insns_after (seq,
3811				  thisblock->data.block.last_unconditional_cleanup);
3812	}
3813      else
3814	{
3815	  thisblock->data.block.last_unconditional_cleanup
3816	    = get_last_insn ();
3817	  thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3818	}
3819    }
3820  return 1;
3821}
3822
3823/* Like expand_decl_cleanup, but suppress generating an exception handler
3824   to perform the cleanup.  */
3825
3826int
3827expand_decl_cleanup_no_eh (decl, cleanup)
3828     tree decl, cleanup;
3829{
3830  int save_eh = using_eh_for_cleanups_p;
3831  int result;
3832
3833  using_eh_for_cleanups_p = 0;
3834  result = expand_decl_cleanup (decl, cleanup);
3835  using_eh_for_cleanups_p = save_eh;
3836
3837  return result;
3838}
3839
3840/* Arrange for the top element of the dynamic cleanup chain to be
3841   popped if we exit the current binding contour.  DECL is the
3842   associated declaration, if any, otherwise NULL_TREE.  If the
3843   current contour is left via an exception, then __sjthrow will pop
3844   the top element off the dynamic cleanup chain.  The code that
3845   avoids doing the action we push into the cleanup chain in the
3846   exceptional case is contained in expand_cleanups.
3847
3848   This routine is only used by expand_eh_region_start, and that is
3849   the only way in which an exception region should be started.  This
3850   routine is only used when using the setjmp/longjmp codegen method
3851   for exception handling.  */
3852
3853int
3854expand_dcc_cleanup (decl)
3855     tree decl;
3856{
3857  struct nesting *thisblock = block_stack;
3858  tree cleanup;
3859
3860  /* Error if we are not in any block.  */
3861  if (thisblock == 0)
3862    return 0;
3863
3864  /* Record the cleanup for the dynamic handler chain.  */
3865
3866  /* All cleanups must be on the function_obstack.  */
3867  push_obstacks_nochange ();
3868  resume_temporary_allocation ();
3869  cleanup = make_node (POPDCC_EXPR);
3870  pop_obstacks ();
3871
3872  /* Add the cleanup in a manner similar to expand_decl_cleanup.  */
3873  thisblock->data.block.cleanups
3874    = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3875
3876  /* If this block has a cleanup, it belongs in stack_block_stack.  */
3877  stack_block_stack = thisblock;
3878  return 1;
3879}
3880
3881/* Arrange for the top element of the dynamic handler chain to be
3882   popped if we exit the current binding contour.  DECL is the
3883   associated declaration, if any, otherwise NULL_TREE.  If the current
3884   contour is left via an exception, then __sjthrow will pop the top
3885   element off the dynamic handler chain.  The code that avoids doing
3886   the action we push into the handler chain in the exceptional case
3887   is contained in expand_cleanups.
3888
3889   This routine is only used by expand_eh_region_start, and that is
3890   the only way in which an exception region should be started.  This
3891   routine is only used when using the setjmp/longjmp codegen method
3892   for exception handling.  */
3893
3894int
3895expand_dhc_cleanup (decl)
3896     tree decl;
3897{
3898  struct nesting *thisblock = block_stack;
3899  tree cleanup;
3900
3901  /* Error if we are not in any block.  */
3902  if (thisblock == 0)
3903    return 0;
3904
3905  /* Record the cleanup for the dynamic handler chain.  */
3906
3907  /* All cleanups must be on the function_obstack.  */
3908  push_obstacks_nochange ();
3909  resume_temporary_allocation ();
3910  cleanup = make_node (POPDHC_EXPR);
3911  pop_obstacks ();
3912
3913  /* Add the cleanup in a manner similar to expand_decl_cleanup.  */
3914  thisblock->data.block.cleanups
3915    = temp_tree_cons (decl, cleanup, thisblock->data.block.cleanups);
3916
3917  /* If this block has a cleanup, it belongs in stack_block_stack.  */
3918  stack_block_stack = thisblock;
3919  return 1;
3920}
3921
3922/* DECL is an anonymous union.  CLEANUP is a cleanup for DECL.
3923   DECL_ELTS is the list of elements that belong to DECL's type.
3924   In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup.  */
3925
3926void
3927expand_anon_union_decl (decl, cleanup, decl_elts)
3928     tree decl, cleanup, decl_elts;
3929{
3930  struct nesting *thisblock = block_stack;
3931  rtx x;
3932
3933  expand_decl (decl);
3934  expand_decl_cleanup (decl, cleanup);
3935  x = DECL_RTL (decl);
3936
3937  while (decl_elts)
3938    {
3939      tree decl_elt = TREE_VALUE (decl_elts);
3940      tree cleanup_elt = TREE_PURPOSE (decl_elts);
3941      enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3942
3943      /* Propagate the union's alignment to the elements.  */
3944      DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
3945
3946      /* If the element has BLKmode and the union doesn't, the union is
3947         aligned such that the element doesn't need to have BLKmode, so
3948         change the element's mode to the appropriate one for its size.  */
3949      if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3950	DECL_MODE (decl_elt) = mode
3951	  = mode_for_size (TREE_INT_CST_LOW (DECL_SIZE (decl_elt)),
3952			   MODE_INT, 1);
3953
3954      /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3955         instead create a new MEM rtx with the proper mode.  */
3956      if (GET_CODE (x) == MEM)
3957	{
3958	  if (mode == GET_MODE (x))
3959	    DECL_RTL (decl_elt) = x;
3960	  else
3961	    {
3962	      DECL_RTL (decl_elt) = gen_rtx_MEM (mode, copy_rtx (XEXP (x, 0)));
3963	      MEM_COPY_ATTRIBUTES (DECL_RTL (decl_elt), x);
3964	      RTX_UNCHANGING_P (DECL_RTL (decl_elt)) = RTX_UNCHANGING_P (x);
3965	    }
3966	}
3967      else if (GET_CODE (x) == REG)
3968	{
3969	  if (mode == GET_MODE (x))
3970	    DECL_RTL (decl_elt) = x;
3971	  else
3972	    DECL_RTL (decl_elt) = gen_rtx_SUBREG (mode, x, 0);
3973	}
3974      else
3975	abort ();
3976
3977      /* Record the cleanup if there is one.  */
3978
3979      if (cleanup != 0)
3980	thisblock->data.block.cleanups
3981	  = temp_tree_cons (decl_elt, cleanup_elt,
3982			    thisblock->data.block.cleanups);
3983
3984      decl_elts = TREE_CHAIN (decl_elts);
3985    }
3986}
3987
3988/* Expand a list of cleanups LIST.
3989   Elements may be expressions or may be nested lists.
3990
3991   If DONT_DO is nonnull, then any list-element
3992   whose TREE_PURPOSE matches DONT_DO is omitted.
3993   This is sometimes used to avoid a cleanup associated with
3994   a value that is being returned out of the scope.
3995
3996   If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
3997   goto and handle protection regions specially in that case.
3998
3999   If REACHABLE, we emit code, otherwise just inform the exception handling
4000   code about this finalization.  */
4001
4002static void
4003expand_cleanups (list, dont_do, in_fixup, reachable)
4004     tree list;
4005     tree dont_do;
4006     int in_fixup;
4007     int reachable;
4008{
4009  tree tail;
4010  for (tail = list; tail; tail = TREE_CHAIN (tail))
4011    if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4012      {
4013	if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4014	  expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4015	else
4016	  {
4017	    if (! in_fixup)
4018	      {
4019		tree cleanup = TREE_VALUE (tail);
4020
4021		/* See expand_d{h,c}c_cleanup for why we avoid this.  */
4022		if (TREE_CODE (cleanup) != POPDHC_EXPR
4023		    && TREE_CODE (cleanup) != POPDCC_EXPR
4024		    /* See expand_eh_region_start_tree for this case.  */
4025		    && ! TREE_ADDRESSABLE (tail))
4026		  {
4027		    cleanup = protect_with_terminate (cleanup);
4028		    expand_eh_region_end (cleanup);
4029		  }
4030	      }
4031
4032	    if (reachable)
4033	      {
4034		/* Cleanups may be run multiple times.  For example,
4035		   when exiting a binding contour, we expand the
4036		   cleanups associated with that contour.  When a goto
4037		   within that binding contour has a target outside that
4038		   contour, it will expand all cleanups from its scope to
4039		   the target.  Though the cleanups are expanded multiple
4040		   times, the control paths are non-overlapping so the
4041		   cleanups will not be executed twice.  */
4042
4043		/* We may need to protect fixups with rethrow regions.  */
4044		int protect = (in_fixup && ! TREE_ADDRESSABLE (tail));
4045
4046		if (protect)
4047		  expand_fixup_region_start ();
4048
4049		expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4050		if (protect)
4051		  expand_fixup_region_end (TREE_VALUE (tail));
4052		free_temp_slots ();
4053	      }
4054	  }
4055      }
4056}
4057
4058/* Mark when the context we are emitting RTL for as a conditional
4059   context, so that any cleanup actions we register with
4060   expand_decl_init will be properly conditionalized when those
4061   cleanup actions are later performed.  Must be called before any
4062   expression (tree) is expanded that is within a conditional context.  */
4063
4064void
4065start_cleanup_deferral ()
4066{
4067  /* block_stack can be NULL if we are inside the parameter list.  It is
4068     OK to do nothing, because cleanups aren't possible here.  */
4069  if (block_stack)
4070    ++block_stack->data.block.conditional_code;
4071}
4072
4073/* Mark the end of a conditional region of code.  Because cleanup
4074   deferrals may be nested, we may still be in a conditional region
4075   after we end the currently deferred cleanups, only after we end all
4076   deferred cleanups, are we back in unconditional code.  */
4077
4078void
4079end_cleanup_deferral ()
4080{
4081  /* block_stack can be NULL if we are inside the parameter list.  It is
4082     OK to do nothing, because cleanups aren't possible here.  */
4083  if (block_stack)
4084    --block_stack->data.block.conditional_code;
4085}
4086
4087/* Move all cleanups from the current block_stack
4088   to the containing block_stack, where they are assumed to
4089   have been created.  If anything can cause a temporary to
4090   be created, but not expanded for more than one level of
4091   block_stacks, then this code will have to change.  */
4092
4093void
4094move_cleanups_up ()
4095{
4096  struct nesting *block = block_stack;
4097  struct nesting *outer = block->next;
4098
4099  outer->data.block.cleanups
4100    = chainon (block->data.block.cleanups,
4101	       outer->data.block.cleanups);
4102  block->data.block.cleanups = 0;
4103}
4104
4105tree
4106last_cleanup_this_contour ()
4107{
4108  if (block_stack == 0)
4109    return 0;
4110
4111  return block_stack->data.block.cleanups;
4112}
4113
4114/* Return 1 if there are any pending cleanups at this point.
4115   If THIS_CONTOUR is nonzero, check the current contour as well.
4116   Otherwise, look only at the contours that enclose this one.  */
4117
4118int
4119any_pending_cleanups (this_contour)
4120     int this_contour;
4121{
4122  struct nesting *block;
4123
4124  if (block_stack == 0)
4125    return 0;
4126
4127  if (this_contour && block_stack->data.block.cleanups != NULL)
4128    return 1;
4129  if (block_stack->data.block.cleanups == 0
4130      && block_stack->data.block.outer_cleanups == 0)
4131    return 0;
4132
4133  for (block = block_stack->next; block; block = block->next)
4134    if (block->data.block.cleanups != 0)
4135      return 1;
4136
4137  return 0;
4138}
4139
4140/* Enter a case (Pascal) or switch (C) statement.
4141   Push a block onto case_stack and nesting_stack
4142   to accumulate the case-labels that are seen
4143   and to record the labels generated for the statement.
4144
4145   EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4146   Otherwise, this construct is transparent for `exit_something'.
4147
4148   EXPR is the index-expression to be dispatched on.
4149   TYPE is its nominal type.  We could simply convert EXPR to this type,
4150   but instead we take short cuts.  */
4151
4152void
4153expand_start_case (exit_flag, expr, type, printname)
4154     int exit_flag;
4155     tree expr;
4156     tree type;
4157     const char *printname;
4158{
4159  register struct nesting *thiscase = ALLOC_NESTING ();
4160
4161  /* Make an entry on case_stack for the case we are entering.  */
4162
4163  thiscase->next = case_stack;
4164  thiscase->all = nesting_stack;
4165  thiscase->depth = ++nesting_depth;
4166  thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4167  thiscase->data.case_stmt.case_list = 0;
4168  thiscase->data.case_stmt.index_expr = expr;
4169  thiscase->data.case_stmt.nominal_type = type;
4170  thiscase->data.case_stmt.default_label = 0;
4171  thiscase->data.case_stmt.num_ranges = 0;
4172  thiscase->data.case_stmt.printname = printname;
4173  thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4174  case_stack = thiscase;
4175  nesting_stack = thiscase;
4176
4177  do_pending_stack_adjust ();
4178
4179  /* Make sure case_stmt.start points to something that won't
4180     need any transformation before expand_end_case.  */
4181  if (GET_CODE (get_last_insn ()) != NOTE)
4182    emit_note (NULL_PTR, NOTE_INSN_DELETED);
4183
4184  thiscase->data.case_stmt.start = get_last_insn ();
4185
4186  start_cleanup_deferral ();
4187}
4188
4189
4190/* Start a "dummy case statement" within which case labels are invalid
4191   and are not connected to any larger real case statement.
4192   This can be used if you don't want to let a case statement jump
4193   into the middle of certain kinds of constructs.  */
4194
4195void
4196expand_start_case_dummy ()
4197{
4198  register struct nesting *thiscase = ALLOC_NESTING ();
4199
4200  /* Make an entry on case_stack for the dummy.  */
4201
4202  thiscase->next = case_stack;
4203  thiscase->all = nesting_stack;
4204  thiscase->depth = ++nesting_depth;
4205  thiscase->exit_label = 0;
4206  thiscase->data.case_stmt.case_list = 0;
4207  thiscase->data.case_stmt.start = 0;
4208  thiscase->data.case_stmt.nominal_type = 0;
4209  thiscase->data.case_stmt.default_label = 0;
4210  thiscase->data.case_stmt.num_ranges = 0;
4211  case_stack = thiscase;
4212  nesting_stack = thiscase;
4213  start_cleanup_deferral ();
4214}
4215
4216/* End a dummy case statement.  */
4217
4218void
4219expand_end_case_dummy ()
4220{
4221  end_cleanup_deferral ();
4222  POPSTACK (case_stack);
4223}
4224
4225/* Return the data type of the index-expression
4226   of the innermost case statement, or null if none.  */
4227
4228tree
4229case_index_expr_type ()
4230{
4231  if (case_stack)
4232    return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4233  return 0;
4234}
4235
4236static void
4237check_seenlabel ()
4238{
4239  /* If this is the first label, warn if any insns have been emitted.  */
4240  if (case_stack->data.case_stmt.line_number_status >= 0)
4241    {
4242      rtx insn;
4243
4244      restore_line_number_status
4245	(case_stack->data.case_stmt.line_number_status);
4246      case_stack->data.case_stmt.line_number_status = -1;
4247
4248      for (insn = case_stack->data.case_stmt.start;
4249	   insn;
4250	   insn = NEXT_INSN (insn))
4251	{
4252	  if (GET_CODE (insn) == CODE_LABEL)
4253	    break;
4254	  if (GET_CODE (insn) != NOTE
4255	      && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4256	    {
4257	      do
4258		insn = PREV_INSN (insn);
4259	      while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4260
4261	      /* If insn is zero, then there must have been a syntax error.  */
4262	      if (insn)
4263		warning_with_file_and_line (NOTE_SOURCE_FILE(insn),
4264					    NOTE_LINE_NUMBER(insn),
4265					    "unreachable code at beginning of %s",
4266					    case_stack->data.case_stmt.printname);
4267	      break;
4268	    }
4269	}
4270    }
4271}
4272
4273/* Accumulate one case or default label inside a case or switch statement.
4274   VALUE is the value of the case (a null pointer, for a default label).
4275   The function CONVERTER, when applied to arguments T and V,
4276   converts the value V to the type T.
4277
4278   If not currently inside a case or switch statement, return 1 and do
4279   nothing.  The caller will print a language-specific error message.
4280   If VALUE is a duplicate or overlaps, return 2 and do nothing
4281   except store the (first) duplicate node in *DUPLICATE.
4282   If VALUE is out of range, return 3 and do nothing.
4283   If we are jumping into the scope of a cleanup or var-sized array, return 5.
4284   Return 0 on success.
4285
4286   Extended to handle range statements.  */
4287
4288int
4289pushcase (value, converter, label, duplicate)
4290     register tree value;
4291     tree (*converter) PROTO((tree, tree));
4292     register tree label;
4293     tree *duplicate;
4294{
4295  tree index_type;
4296  tree nominal_type;
4297
4298  /* Fail if not inside a real case statement.  */
4299  if (! (case_stack && case_stack->data.case_stmt.start))
4300    return 1;
4301
4302  if (stack_block_stack
4303      && stack_block_stack->depth > case_stack->depth)
4304    return 5;
4305
4306  index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4307  nominal_type = case_stack->data.case_stmt.nominal_type;
4308
4309  /* If the index is erroneous, avoid more problems: pretend to succeed.  */
4310  if (index_type == error_mark_node)
4311    return 0;
4312
4313  /* Convert VALUE to the type in which the comparisons are nominally done.  */
4314  if (value != 0)
4315    value = (*converter) (nominal_type, value);
4316
4317  check_seenlabel ();
4318
4319  /* Fail if this value is out of range for the actual type of the index
4320     (which may be narrower than NOMINAL_TYPE).  */
4321  if (value != 0 && ! int_fits_type_p (value, index_type))
4322    return 3;
4323
4324  /* Fail if this is a duplicate or overlaps another entry.  */
4325  if (value == 0)
4326    {
4327      if (case_stack->data.case_stmt.default_label != 0)
4328	{
4329	  *duplicate = case_stack->data.case_stmt.default_label;
4330	  return 2;
4331	}
4332      case_stack->data.case_stmt.default_label = label;
4333    }
4334  else
4335    return add_case_node (value, value, label, duplicate);
4336
4337  expand_label (label);
4338  return 0;
4339}
4340
4341/* Like pushcase but this case applies to all values between VALUE1 and
4342   VALUE2 (inclusive).  If VALUE1 is NULL, the range starts at the lowest
4343   value of the index type and ends at VALUE2.  If VALUE2 is NULL, the range
4344   starts at VALUE1 and ends at the highest value of the index type.
4345   If both are NULL, this case applies to all values.
4346
4347   The return value is the same as that of pushcase but there is one
4348   additional error code: 4 means the specified range was empty.  */
4349
4350int
4351pushcase_range (value1, value2, converter, label, duplicate)
4352     register tree value1, value2;
4353     tree (*converter) PROTO((tree, tree));
4354     register tree label;
4355     tree *duplicate;
4356{
4357  tree index_type;
4358  tree nominal_type;
4359
4360  /* Fail if not inside a real case statement.  */
4361  if (! (case_stack && case_stack->data.case_stmt.start))
4362    return 1;
4363
4364  if (stack_block_stack
4365      && stack_block_stack->depth > case_stack->depth)
4366    return 5;
4367
4368  index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4369  nominal_type = case_stack->data.case_stmt.nominal_type;
4370
4371  /* If the index is erroneous, avoid more problems: pretend to succeed.  */
4372  if (index_type == error_mark_node)
4373    return 0;
4374
4375  check_seenlabel ();
4376
4377  /* Convert VALUEs to type in which the comparisons are nominally done
4378     and replace any unspecified value with the corresponding bound.  */
4379  if (value1 == 0)
4380    value1 = TYPE_MIN_VALUE (index_type);
4381  if (value2 == 0)
4382    value2 = TYPE_MAX_VALUE (index_type);
4383
4384  /* Fail if the range is empty.  Do this before any conversion since
4385     we want to allow out-of-range empty ranges.  */
4386  if (value2 && tree_int_cst_lt (value2, value1))
4387    return 4;
4388
4389  value1 = (*converter) (nominal_type, value1);
4390
4391  /* If the max was unbounded, use the max of the nominal_type we are
4392     converting to.  Do this after the < check above to suppress false
4393     positives.  */
4394  if (!value2)
4395    value2 = TYPE_MAX_VALUE (nominal_type);
4396  value2 = (*converter) (nominal_type, value2);
4397
4398  /* Fail if these values are out of range.  */
4399  if (TREE_CONSTANT_OVERFLOW (value1)
4400      || ! int_fits_type_p (value1, index_type))
4401    return 3;
4402
4403  if (TREE_CONSTANT_OVERFLOW (value2)
4404      || ! int_fits_type_p (value2, index_type))
4405    return 3;
4406
4407  return add_case_node (value1, value2, label, duplicate);
4408}
4409
4410/* Do the actual insertion of a case label for pushcase and pushcase_range
4411   into case_stack->data.case_stmt.case_list.  Use an AVL tree to avoid
4412   slowdown for large switch statements.  */
4413
4414static int
4415add_case_node (low, high, label, duplicate)
4416     tree low, high;
4417     tree label;
4418     tree *duplicate;
4419{
4420  struct case_node *p, **q, *r;
4421
4422  q = &case_stack->data.case_stmt.case_list;
4423  p = *q;
4424
4425  while ((r = *q))
4426    {
4427      p = r;
4428
4429      /* Keep going past elements distinctly greater than HIGH.  */
4430      if (tree_int_cst_lt (high, p->low))
4431	q = &p->left;
4432
4433      /* or distinctly less than LOW.  */
4434      else if (tree_int_cst_lt (p->high, low))
4435	q = &p->right;
4436
4437      else
4438	{
4439	  /* We have an overlap; this is an error.  */
4440	  *duplicate = p->code_label;
4441	  return 2;
4442	}
4443    }
4444
4445  /* Add this label to the chain, and succeed.
4446     Copy LOW, HIGH so they are on temporary rather than momentary
4447     obstack and will thus survive till the end of the case statement.  */
4448
4449  r = (struct case_node *) oballoc (sizeof (struct case_node));
4450  r->low = copy_node (low);
4451
4452  /* If the bounds are equal, turn this into the one-value case.  */
4453
4454  if (tree_int_cst_equal (low, high))
4455    r->high = r->low;
4456  else
4457    {
4458      r->high = copy_node (high);
4459      case_stack->data.case_stmt.num_ranges++;
4460    }
4461
4462  r->code_label = label;
4463  expand_label (label);
4464
4465  *q = r;
4466  r->parent = p;
4467  r->left = 0;
4468  r->right = 0;
4469  r->balance = 0;
4470
4471  while (p)
4472    {
4473      struct case_node *s;
4474
4475      if (r == p->left)
4476	{
4477	  int b;
4478
4479	  if (! (b = p->balance))
4480	    /* Growth propagation from left side.  */
4481	    p->balance = -1;
4482	  else if (b < 0)
4483	    {
4484	      if (r->balance < 0)
4485		{
4486		  /* R-Rotation */
4487		  if ((p->left = s = r->right))
4488		    s->parent = p;
4489
4490		  r->right = p;
4491		  p->balance = 0;
4492		  r->balance = 0;
4493		  s = p->parent;
4494		  p->parent = r;
4495
4496		  if ((r->parent = s))
4497		    {
4498		      if (s->left == p)
4499			s->left = r;
4500		      else
4501			s->right = r;
4502		    }
4503		  else
4504		    case_stack->data.case_stmt.case_list = r;
4505		}
4506	      else
4507		/* r->balance == +1 */
4508		{
4509		  /* LR-Rotation */
4510
4511		  int b2;
4512		  struct case_node *t = r->right;
4513
4514		  if ((p->left = s = t->right))
4515		    s->parent = p;
4516
4517		  t->right = p;
4518		  if ((r->right = s = t->left))
4519		    s->parent = r;
4520
4521		  t->left = r;
4522		  b = t->balance;
4523		  b2 = b < 0;
4524		  p->balance = b2;
4525		  b2 = -b2 - b;
4526		  r->balance = b2;
4527		  t->balance = 0;
4528		  s = p->parent;
4529		  p->parent = t;
4530		  r->parent = t;
4531
4532		  if ((t->parent = s))
4533		    {
4534		      if (s->left == p)
4535			s->left = t;
4536		      else
4537			s->right = t;
4538		    }
4539		  else
4540		    case_stack->data.case_stmt.case_list = t;
4541		}
4542	      break;
4543	    }
4544
4545	  else
4546	    {
4547	      /* p->balance == +1; growth of left side balances the node.  */
4548	      p->balance = 0;
4549	      break;
4550	    }
4551	}
4552      else
4553	/* r == p->right */
4554	{
4555	  int b;
4556
4557	  if (! (b = p->balance))
4558	    /* Growth propagation from right side.  */
4559	    p->balance++;
4560	  else if (b > 0)
4561	    {
4562	      if (r->balance > 0)
4563		{
4564		  /* L-Rotation */
4565
4566		  if ((p->right = s = r->left))
4567		    s->parent = p;
4568
4569		  r->left = p;
4570		  p->balance = 0;
4571		  r->balance = 0;
4572		  s = p->parent;
4573		  p->parent = r;
4574		  if ((r->parent = s))
4575		    {
4576		      if (s->left == p)
4577			s->left = r;
4578		      else
4579			s->right = r;
4580		    }
4581
4582		  else
4583		    case_stack->data.case_stmt.case_list = r;
4584		}
4585
4586	      else
4587		/* r->balance == -1 */
4588		{
4589		  /* RL-Rotation */
4590		  int b2;
4591		  struct case_node *t = r->left;
4592
4593		  if ((p->right = s = t->left))
4594		    s->parent = p;
4595
4596		  t->left = p;
4597
4598		  if ((r->left = s = t->right))
4599		    s->parent = r;
4600
4601		  t->right = r;
4602		  b = t->balance;
4603		  b2 = b < 0;
4604		  r->balance = b2;
4605		  b2 = -b2 - b;
4606		  p->balance = b2;
4607		  t->balance = 0;
4608		  s = p->parent;
4609		  p->parent = t;
4610		  r->parent = t;
4611
4612		  if ((t->parent = s))
4613		    {
4614		      if (s->left == p)
4615			s->left = t;
4616		      else
4617			s->right = t;
4618		    }
4619
4620		  else
4621		    case_stack->data.case_stmt.case_list = t;
4622		}
4623	      break;
4624	    }
4625	  else
4626	    {
4627	      /* p->balance == -1; growth of right side balances the node.  */
4628	      p->balance = 0;
4629	      break;
4630	    }
4631	}
4632
4633      r = p;
4634      p = p->parent;
4635    }
4636
4637  return 0;
4638}
4639
4640
4641/* Returns the number of possible values of TYPE.
4642   Returns -1 if the number is unknown or variable.
4643   Returns -2 if the number does not fit in a HOST_WIDE_INT.
4644   Sets *SPARENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4645   do not increase monotonically (there may be duplicates);
4646   to 1 if the values increase monotonically, but not always by 1;
4647   otherwise sets it to 0.  */
4648
4649HOST_WIDE_INT
4650all_cases_count (type, spareness)
4651     tree type;
4652     int *spareness;
4653{
4654  HOST_WIDE_INT count;
4655  *spareness = 0;
4656
4657  switch (TREE_CODE (type))
4658    {
4659      tree t;
4660    case BOOLEAN_TYPE:
4661      count = 2;
4662      break;
4663    case CHAR_TYPE:
4664      count = 1 << BITS_PER_UNIT;
4665      break;
4666    default:
4667    case INTEGER_TYPE:
4668      if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4669	  || TYPE_MAX_VALUE (type) == NULL
4670	  || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST)
4671	return -1;
4672      else
4673	{
4674	  /* count
4675	     = TREE_INT_CST_LOW (TYPE_MAX_VALUE (type))
4676	     - TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + 1
4677	     but with overflow checking.  */
4678	  tree mint = TYPE_MIN_VALUE (type);
4679	  tree maxt = TYPE_MAX_VALUE (type);
4680	  HOST_WIDE_INT lo, hi;
4681	  neg_double(TREE_INT_CST_LOW (mint), TREE_INT_CST_HIGH (mint),
4682		     &lo, &hi);
4683	  add_double(TREE_INT_CST_LOW (maxt), TREE_INT_CST_HIGH (maxt),
4684		     lo, hi, &lo, &hi);
4685	  add_double (lo, hi, 1, 0, &lo, &hi);
4686	  if (hi != 0 || lo < 0)
4687	    return -2;
4688	  count = lo;
4689	}
4690      break;
4691    case ENUMERAL_TYPE:
4692      count = 0;
4693      for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4694	{
4695	  if (TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST
4696	      || TREE_CODE (TREE_VALUE (t)) != INTEGER_CST
4697	      || TREE_INT_CST_LOW (TYPE_MIN_VALUE (type)) + count
4698	      != TREE_INT_CST_LOW (TREE_VALUE (t)))
4699	    *spareness = 1;
4700	  count++;
4701	}
4702      if (*spareness == 1)
4703	{
4704	  tree prev = TREE_VALUE (TYPE_VALUES (type));
4705	  for (t = TYPE_VALUES (type); t = TREE_CHAIN (t), t != NULL_TREE; )
4706	    {
4707	      if (! tree_int_cst_lt (prev, TREE_VALUE (t)))
4708		{
4709		  *spareness = 2;
4710		  break;
4711		}
4712	      prev = TREE_VALUE (t);
4713	    }
4714
4715	}
4716    }
4717  return count;
4718}
4719
4720
4721#define BITARRAY_TEST(ARRAY, INDEX) \
4722  ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4723			  & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4724#define BITARRAY_SET(ARRAY, INDEX) \
4725  ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4726			  |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
4727
4728/* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
4729   with the case values we have seen, assuming the case expression
4730   has the given TYPE.
4731   SPARSENESS is as determined by all_cases_count.
4732
4733   The time needed is proportional to COUNT, unless
4734   SPARSENESS is 2, in which case quadratic time is needed.  */
4735
4736void
4737mark_seen_cases (type, cases_seen, count, sparseness)
4738     tree type;
4739     unsigned char *cases_seen;
4740     long count;
4741     int sparseness;
4742{
4743  tree next_node_to_try = NULL_TREE;
4744  long next_node_offset = 0;
4745
4746  register struct case_node *n, *root = case_stack->data.case_stmt.case_list;
4747  tree val = make_node (INTEGER_CST);
4748  TREE_TYPE (val) = type;
4749  if (! root)
4750    ; /* Do nothing */
4751  else if (sparseness == 2)
4752    {
4753      tree t;
4754      HOST_WIDE_INT xlo;
4755
4756      /* This less efficient loop is only needed to handle
4757	 duplicate case values (multiple enum constants
4758	 with the same value).  */
4759      TREE_TYPE (val) = TREE_TYPE (root->low);
4760      for (t = TYPE_VALUES (type), xlo = 0;  t != NULL_TREE;
4761	   t = TREE_CHAIN (t), xlo++)
4762	{
4763	  TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
4764	  TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
4765	  n = root;
4766	  do
4767	    {
4768	      /* Keep going past elements distinctly greater than VAL.  */
4769	      if (tree_int_cst_lt (val, n->low))
4770		n = n->left;
4771
4772	      /* or distinctly less than VAL.  */
4773	      else if (tree_int_cst_lt (n->high, val))
4774		n = n->right;
4775
4776	      else
4777		{
4778		  /* We have found a matching range.  */
4779		  BITARRAY_SET (cases_seen, xlo);
4780		  break;
4781		}
4782	    }
4783	  while (n);
4784	}
4785    }
4786  else
4787    {
4788      if (root->left)
4789	case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
4790      for (n = root; n; n = n->right)
4791	{
4792	  TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
4793	  TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
4794	  while ( ! tree_int_cst_lt (n->high, val))
4795	    {
4796	      /* Calculate (into xlo) the "offset" of the integer (val).
4797		 The element with lowest value has offset 0, the next smallest
4798		 element has offset 1, etc.  */
4799
4800	      HOST_WIDE_INT xlo, xhi;
4801	      tree t;
4802	      if (sparseness && TYPE_VALUES (type) != NULL_TREE)
4803		{
4804		  /* The TYPE_VALUES will be in increasing order, so
4805		     starting searching where we last ended.  */
4806		  t = next_node_to_try;
4807		  xlo = next_node_offset;
4808		  xhi = 0;
4809		  for (;;)
4810		    {
4811		      if (t == NULL_TREE)
4812			{
4813			  t = TYPE_VALUES (type);
4814			  xlo = 0;
4815			}
4816		      if (tree_int_cst_equal (val, TREE_VALUE (t)))
4817			{
4818			  next_node_to_try = TREE_CHAIN (t);
4819			  next_node_offset = xlo + 1;
4820			  break;
4821			}
4822		      xlo++;
4823		      t = TREE_CHAIN (t);
4824		      if (t == next_node_to_try)
4825			{
4826			  xlo = -1;
4827			  break;
4828			}
4829		    }
4830		}
4831	      else
4832		{
4833		  t = TYPE_MIN_VALUE (type);
4834		  if (t)
4835		    neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
4836				&xlo, &xhi);
4837		  else
4838		    xlo = xhi = 0;
4839		  add_double (xlo, xhi,
4840			      TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4841			      &xlo, &xhi);
4842		}
4843
4844	      if (xhi == 0 && xlo >= 0 && xlo < count)
4845		BITARRAY_SET (cases_seen, xlo);
4846	      add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
4847			  1, 0,
4848			  &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
4849	    }
4850	}
4851    }
4852}
4853
4854/* Called when the index of a switch statement is an enumerated type
4855   and there is no default label.
4856
4857   Checks that all enumeration literals are covered by the case
4858   expressions of a switch.  Also, warn if there are any extra
4859   switch cases that are *not* elements of the enumerated type.
4860
4861   If all enumeration literals were covered by the case expressions,
4862   turn one of the expressions into the default expression since it should
4863   not be possible to fall through such a switch.  */
4864
4865void
4866check_for_full_enumeration_handling (type)
4867     tree type;
4868{
4869  register struct case_node *n;
4870  register tree chain;
4871#if 0  /* variable used by 'if 0'ed  code below. */
4872  register struct case_node **l;
4873  int all_values = 1;
4874#endif
4875
4876  /* True iff the selector type is a numbered set mode.  */
4877  int sparseness = 0;
4878
4879  /* The number of possible selector values.  */
4880  HOST_WIDE_INT size;
4881
4882  /* For each possible selector value. a one iff it has been matched
4883     by a case value alternative.  */
4884  unsigned char *cases_seen;
4885
4886  /* The allocated size of cases_seen, in chars.  */
4887  long bytes_needed;
4888
4889  if (! warn_switch)
4890    return;
4891
4892  size = all_cases_count (type, &sparseness);
4893  bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
4894
4895  if (size > 0 && size < 600000
4896      /* We deliberately use malloc here - not xmalloc.  */
4897      && (cases_seen = (unsigned char *) malloc (bytes_needed)) != NULL)
4898    {
4899      long i;
4900      tree v = TYPE_VALUES (type);
4901      bzero (cases_seen, bytes_needed);
4902
4903      /* The time complexity of this code is normally O(N), where
4904	 N being the number of members in the enumerated type.
4905	 However, if type is a ENUMERAL_TYPE whose values do not
4906	 increase monotonically, O(N*log(N)) time may be needed.  */
4907
4908      mark_seen_cases (type, cases_seen, size, sparseness);
4909
4910      for (i = 0;  v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
4911	{
4912	  if (BITARRAY_TEST(cases_seen, i) == 0)
4913	    warning ("enumeration value `%s' not handled in switch",
4914		     IDENTIFIER_POINTER (TREE_PURPOSE (v)));
4915	}
4916
4917      free (cases_seen);
4918    }
4919
4920  /* Now we go the other way around; we warn if there are case
4921     expressions that don't correspond to enumerators.  This can
4922     occur since C and C++ don't enforce type-checking of
4923     assignments to enumeration variables.  */
4924
4925  if (case_stack->data.case_stmt.case_list
4926      && case_stack->data.case_stmt.case_list->left)
4927    case_stack->data.case_stmt.case_list
4928      = case_tree2list (case_stack->data.case_stmt.case_list, 0);
4929  if (warn_switch)
4930    for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
4931      {
4932	for (chain = TYPE_VALUES (type);
4933	     chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
4934	     chain = TREE_CHAIN (chain))
4935	  ;
4936
4937	if (!chain)
4938	  {
4939	    if (TYPE_NAME (type) == 0)
4940	      warning ("case value `%ld' not in enumerated type",
4941		       (long) TREE_INT_CST_LOW (n->low));
4942	    else
4943	      warning ("case value `%ld' not in enumerated type `%s'",
4944		       (long) TREE_INT_CST_LOW (n->low),
4945		       IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4946					    == IDENTIFIER_NODE)
4947					   ? TYPE_NAME (type)
4948					   : DECL_NAME (TYPE_NAME (type))));
4949	  }
4950	if (!tree_int_cst_equal (n->low, n->high))
4951	  {
4952	    for (chain = TYPE_VALUES (type);
4953		 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
4954		 chain = TREE_CHAIN (chain))
4955	      ;
4956
4957	    if (!chain)
4958	      {
4959		if (TYPE_NAME (type) == 0)
4960		  warning ("case value `%ld' not in enumerated type",
4961			   (long) TREE_INT_CST_LOW (n->high));
4962		else
4963		  warning ("case value `%ld' not in enumerated type `%s'",
4964			   (long) TREE_INT_CST_LOW (n->high),
4965			   IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
4966						== IDENTIFIER_NODE)
4967					       ? TYPE_NAME (type)
4968					       : DECL_NAME (TYPE_NAME (type))));
4969	      }
4970	  }
4971      }
4972
4973#if 0
4974  /* ??? This optimization is disabled because it causes valid programs to
4975     fail.  ANSI C does not guarantee that an expression with enum type
4976     will have a value that is the same as one of the enumeration literals.  */
4977
4978  /* If all values were found as case labels, make one of them the default
4979     label.  Thus, this switch will never fall through.  We arbitrarily pick
4980     the last one to make the default since this is likely the most
4981     efficient choice.  */
4982
4983  if (all_values)
4984    {
4985      for (l = &case_stack->data.case_stmt.case_list;
4986	   (*l)->right != 0;
4987	   l = &(*l)->right)
4988	;
4989
4990      case_stack->data.case_stmt.default_label = (*l)->code_label;
4991      *l = 0;
4992    }
4993#endif /* 0 */
4994}
4995
4996
4997/* Terminate a case (Pascal) or switch (C) statement
4998   in which ORIG_INDEX is the expression to be tested.
4999   Generate the code to test it and jump to the right place.  */
5000
5001void
5002expand_end_case (orig_index)
5003     tree orig_index;
5004{
5005  tree minval = NULL_TREE, maxval = NULL_TREE, range, orig_minval;
5006  rtx default_label = 0;
5007  register struct case_node *n;
5008  unsigned int count;
5009  rtx index;
5010  rtx table_label;
5011  int ncases;
5012  rtx *labelvec;
5013  register int i;
5014  rtx before_case;
5015  register struct nesting *thiscase = case_stack;
5016  tree index_expr, index_type;
5017  int unsignedp;
5018
5019  table_label = gen_label_rtx ();
5020  index_expr = thiscase->data.case_stmt.index_expr;
5021  index_type = TREE_TYPE (index_expr);
5022  unsignedp = TREE_UNSIGNED (index_type);
5023
5024  do_pending_stack_adjust ();
5025
5026  /* This might get an spurious warning in the presence of a syntax error;
5027     it could be fixed by moving the call to check_seenlabel after the
5028     check for error_mark_node, and copying the code of check_seenlabel that
5029     deals with case_stack->data.case_stmt.line_number_status /
5030     restore_line_number_status in front of the call to end_cleanup_deferral;
5031     However, this might miss some useful warnings in the presence of
5032     non-syntax errors.  */
5033  check_seenlabel ();
5034
5035  /* An ERROR_MARK occurs for various reasons including invalid data type.  */
5036  if (index_type != error_mark_node)
5037    {
5038      /* If switch expression was an enumerated type, check that all
5039	 enumeration literals are covered by the cases.
5040	 No sense trying this if there's a default case, however.  */
5041
5042      if (!thiscase->data.case_stmt.default_label
5043	  && TREE_CODE (TREE_TYPE (orig_index)) == ENUMERAL_TYPE
5044	  && TREE_CODE (index_expr) != INTEGER_CST)
5045	check_for_full_enumeration_handling (TREE_TYPE (orig_index));
5046
5047      /* If we don't have a default-label, create one here,
5048	 after the body of the switch.  */
5049      if (thiscase->data.case_stmt.default_label == 0)
5050	{
5051	  thiscase->data.case_stmt.default_label
5052	    = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5053	  expand_label (thiscase->data.case_stmt.default_label);
5054	}
5055      default_label = label_rtx (thiscase->data.case_stmt.default_label);
5056
5057      before_case = get_last_insn ();
5058
5059      if (thiscase->data.case_stmt.case_list
5060	  && thiscase->data.case_stmt.case_list->left)
5061	thiscase->data.case_stmt.case_list
5062	  = case_tree2list(thiscase->data.case_stmt.case_list, 0);
5063
5064      /* Simplify the case-list before we count it.  */
5065      group_case_nodes (thiscase->data.case_stmt.case_list);
5066
5067      /* Get upper and lower bounds of case values.
5068	 Also convert all the case values to the index expr's data type.  */
5069
5070      count = 0;
5071      for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5072	{
5073	  /* Check low and high label values are integers.  */
5074	  if (TREE_CODE (n->low) != INTEGER_CST)
5075	    abort ();
5076	  if (TREE_CODE (n->high) != INTEGER_CST)
5077	    abort ();
5078
5079	  n->low = convert (index_type, n->low);
5080	  n->high = convert (index_type, n->high);
5081
5082	  /* Count the elements and track the largest and smallest
5083	     of them (treating them as signed even if they are not).  */
5084	  if (count++ == 0)
5085	    {
5086	      minval = n->low;
5087	      maxval = n->high;
5088	    }
5089	  else
5090	    {
5091	      if (INT_CST_LT (n->low, minval))
5092		minval = n->low;
5093	      if (INT_CST_LT (maxval, n->high))
5094		maxval = n->high;
5095	    }
5096	  /* A range counts double, since it requires two compares.  */
5097	  if (! tree_int_cst_equal (n->low, n->high))
5098	    count++;
5099	}
5100
5101      orig_minval = minval;
5102
5103      /* Compute span of values.  */
5104      if (count != 0)
5105	range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5106
5107      end_cleanup_deferral ();
5108
5109      if (count == 0)
5110	{
5111	  expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5112	  emit_queue ();
5113	  emit_jump (default_label);
5114	}
5115
5116      /* If range of values is much bigger than number of values,
5117	 make a sequence of conditional branches instead of a dispatch.
5118	 If the switch-index is a constant, do it this way
5119	 because we can optimize it.  */
5120
5121#ifndef CASE_VALUES_THRESHOLD
5122#ifdef HAVE_casesi
5123#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
5124#else
5125      /* If machine does not have a case insn that compares the
5126	 bounds, this means extra overhead for dispatch tables
5127	 which raises the threshold for using them.  */
5128#define CASE_VALUES_THRESHOLD 5
5129#endif /* HAVE_casesi */
5130#endif /* CASE_VALUES_THRESHOLD */
5131
5132      else if (TREE_INT_CST_HIGH (range) != 0
5133	       || count < (unsigned int) CASE_VALUES_THRESHOLD
5134	       || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
5135		   > 10 * count)
5136#ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5137	       || flag_pic
5138#endif
5139	       || TREE_CODE (index_expr) == INTEGER_CST
5140	       /* These will reduce to a constant.  */
5141	       || (TREE_CODE (index_expr) == CALL_EXPR
5142		   && TREE_CODE (TREE_OPERAND (index_expr, 0)) == ADDR_EXPR
5143		   && TREE_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == FUNCTION_DECL
5144		   && DECL_FUNCTION_CODE (TREE_OPERAND (TREE_OPERAND (index_expr, 0), 0)) == BUILT_IN_CLASSIFY_TYPE)
5145	       || (TREE_CODE (index_expr) == COMPOUND_EXPR
5146		   && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5147	{
5148	  index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5149
5150	  /* If the index is a short or char that we do not have
5151	     an insn to handle comparisons directly, convert it to
5152	     a full integer now, rather than letting each comparison
5153	     generate the conversion.  */
5154
5155	  if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5156	      && (cmp_optab->handlers[(int) GET_MODE(index)].insn_code
5157		  == CODE_FOR_nothing))
5158	    {
5159	      enum machine_mode wider_mode;
5160	      for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5161		   wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5162		if (cmp_optab->handlers[(int) wider_mode].insn_code
5163		    != CODE_FOR_nothing)
5164		  {
5165		    index = convert_to_mode (wider_mode, index, unsignedp);
5166		    break;
5167		  }
5168	    }
5169
5170	  emit_queue ();
5171	  do_pending_stack_adjust ();
5172
5173	  index = protect_from_queue (index, 0);
5174	  if (GET_CODE (index) == MEM)
5175	    index = copy_to_reg (index);
5176	  if (GET_CODE (index) == CONST_INT
5177	      || TREE_CODE (index_expr) == INTEGER_CST)
5178	    {
5179	      /* Make a tree node with the proper constant value
5180		 if we don't already have one.  */
5181	      if (TREE_CODE (index_expr) != INTEGER_CST)
5182		{
5183		  index_expr
5184		    = build_int_2 (INTVAL (index),
5185				   unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5186		  index_expr = convert (index_type, index_expr);
5187		}
5188
5189	      /* For constant index expressions we need only
5190		 issue a unconditional branch to the appropriate
5191		 target code.  The job of removing any unreachable
5192		 code is left to the optimisation phase if the
5193		 "-O" option is specified.  */
5194	      for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5195		if (! tree_int_cst_lt (index_expr, n->low)
5196		    && ! tree_int_cst_lt (n->high, index_expr))
5197		  break;
5198
5199	      if (n)
5200		emit_jump (label_rtx (n->code_label));
5201	      else
5202		emit_jump (default_label);
5203	    }
5204	  else
5205	    {
5206	      /* If the index expression is not constant we generate
5207		 a binary decision tree to select the appropriate
5208		 target code.  This is done as follows:
5209
5210		 The list of cases is rearranged into a binary tree,
5211		 nearly optimal assuming equal probability for each case.
5212
5213		 The tree is transformed into RTL, eliminating
5214		 redundant test conditions at the same time.
5215
5216		 If program flow could reach the end of the
5217		 decision tree an unconditional jump to the
5218		 default code is emitted.  */
5219
5220	      use_cost_table
5221		= (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
5222		   && estimate_case_costs (thiscase->data.case_stmt.case_list));
5223	      balance_case_nodes (&thiscase->data.case_stmt.case_list,
5224				  NULL_PTR);
5225	      emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5226			       default_label, index_type);
5227	      emit_jump_if_reachable (default_label);
5228	    }
5229	}
5230      else
5231	{
5232	  int win = 0;
5233#ifdef HAVE_casesi
5234	  if (HAVE_casesi)
5235	    {
5236	      enum machine_mode index_mode = SImode;
5237	      int index_bits = GET_MODE_BITSIZE (index_mode);
5238	      rtx op1, op2;
5239	      enum machine_mode op_mode;
5240
5241	      /* Convert the index to SImode.  */
5242	      if (GET_MODE_BITSIZE (TYPE_MODE (index_type))
5243		  > GET_MODE_BITSIZE (index_mode))
5244		{
5245		  enum machine_mode omode = TYPE_MODE (index_type);
5246		  rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
5247
5248		  /* We must handle the endpoints in the original mode.  */
5249		  index_expr = build (MINUS_EXPR, index_type,
5250				      index_expr, minval);
5251		  minval = integer_zero_node;
5252		  index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5253		  emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
5254					   omode, 1, 0, default_label);
5255		  /* Now we can safely truncate.  */
5256		  index = convert_to_mode (index_mode, index, 0);
5257		}
5258	      else
5259		{
5260		  if (TYPE_MODE (index_type) != index_mode)
5261		    {
5262		      index_expr = convert (type_for_size (index_bits, 0),
5263					    index_expr);
5264		      index_type = TREE_TYPE (index_expr);
5265		    }
5266
5267		  index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5268		}
5269	      emit_queue ();
5270	      index = protect_from_queue (index, 0);
5271	      do_pending_stack_adjust ();
5272
5273	      op_mode = insn_operand_mode[(int)CODE_FOR_casesi][0];
5274	      if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][0])
5275		  (index, op_mode))
5276		index = copy_to_mode_reg (op_mode, index);
5277
5278	      op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
5279
5280	      op_mode = insn_operand_mode[(int)CODE_FOR_casesi][1];
5281	      if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][1])
5282		  (op1, op_mode))
5283		op1 = copy_to_mode_reg (op_mode, op1);
5284
5285	      op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
5286
5287	      op_mode = insn_operand_mode[(int)CODE_FOR_casesi][2];
5288	      if (! (*insn_operand_predicate[(int)CODE_FOR_casesi][2])
5289		  (op2, op_mode))
5290		op2 = copy_to_mode_reg (op_mode, op2);
5291
5292	      emit_jump_insn (gen_casesi (index, op1, op2,
5293					  table_label, default_label));
5294	      win = 1;
5295	    }
5296#endif
5297#ifdef HAVE_tablejump
5298	  if (! win && HAVE_tablejump)
5299	    {
5300	      index_expr = convert (thiscase->data.case_stmt.nominal_type,
5301				    fold (build (MINUS_EXPR, index_type,
5302						 index_expr, minval)));
5303	      index_type = TREE_TYPE (index_expr);
5304	      index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5305	      emit_queue ();
5306	      index = protect_from_queue (index, 0);
5307	      do_pending_stack_adjust ();
5308
5309	      do_tablejump (index, TYPE_MODE (index_type),
5310			    expand_expr (range, NULL_RTX, VOIDmode, 0),
5311			    table_label, default_label);
5312	      win = 1;
5313	    }
5314#endif
5315	  if (! win)
5316	    abort ();
5317
5318	  /* Get table of labels to jump to, in order of case index.  */
5319
5320	  ncases = TREE_INT_CST_LOW (range) + 1;
5321	  labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5322	  bzero ((char *) labelvec, ncases * sizeof (rtx));
5323
5324	  for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5325	    {
5326	      register HOST_WIDE_INT i
5327		= TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (orig_minval);
5328
5329	      while (1)
5330		{
5331		  labelvec[i]
5332		    = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5333		  if (i + TREE_INT_CST_LOW (orig_minval)
5334		      == TREE_INT_CST_LOW (n->high))
5335		    break;
5336		  i++;
5337		}
5338	    }
5339
5340	  /* Fill in the gaps with the default.  */
5341	  for (i = 0; i < ncases; i++)
5342	    if (labelvec[i] == 0)
5343	      labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5344
5345	  /* Output the table */
5346	  emit_label (table_label);
5347
5348	  if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5349	    emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5350						   gen_rtx_LABEL_REF (Pmode, table_label),
5351						   gen_rtvec_v (ncases, labelvec),
5352						    const0_rtx, const0_rtx, 0));
5353	  else
5354	    emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5355					      gen_rtvec_v (ncases, labelvec)));
5356
5357	  /* If the case insn drops through the table,
5358	     after the table we must jump to the default-label.
5359	     Otherwise record no drop-through after the table.  */
5360#ifdef CASE_DROPS_THROUGH
5361	  emit_jump (default_label);
5362#else
5363	  emit_barrier ();
5364#endif
5365	}
5366
5367      before_case = squeeze_notes (NEXT_INSN (before_case), get_last_insn ());
5368      reorder_insns (before_case, get_last_insn (),
5369		     thiscase->data.case_stmt.start);
5370    }
5371  else
5372    end_cleanup_deferral ();
5373
5374  if (thiscase->exit_label)
5375    emit_label (thiscase->exit_label);
5376
5377  POPSTACK (case_stack);
5378
5379  free_temp_slots ();
5380}
5381
5382/* Convert the tree NODE into a list linked by the right field, with the left
5383   field zeroed.  RIGHT is used for recursion; it is a list to be placed
5384   rightmost in the resulting list.  */
5385
5386static struct case_node *
5387case_tree2list (node, right)
5388     struct case_node *node, *right;
5389{
5390  struct case_node *left;
5391
5392  if (node->right)
5393    right = case_tree2list (node->right, right);
5394
5395  node->right = right;
5396  if ((left = node->left))
5397    {
5398      node->left = 0;
5399      return case_tree2list (left, node);
5400    }
5401
5402  return node;
5403}
5404
5405/* Generate code to jump to LABEL if OP1 and OP2 are equal.  */
5406
5407static void
5408do_jump_if_equal (op1, op2, label, unsignedp)
5409     rtx op1, op2, label;
5410     int unsignedp;
5411{
5412  if (GET_CODE (op1) == CONST_INT
5413      && GET_CODE (op2) == CONST_INT)
5414    {
5415      if (INTVAL (op1) == INTVAL (op2))
5416	emit_jump (label);
5417    }
5418  else
5419    {
5420      enum machine_mode mode = GET_MODE (op1);
5421      if (mode == VOIDmode)
5422	mode = GET_MODE (op2);
5423      emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX, mode, unsignedp,
5424			       0, label);
5425    }
5426}
5427
5428/* Not all case values are encountered equally.  This function
5429   uses a heuristic to weight case labels, in cases where that
5430   looks like a reasonable thing to do.
5431
5432   Right now, all we try to guess is text, and we establish the
5433   following weights:
5434
5435	chars above space:	16
5436	digits:			16
5437	default:		12
5438	space, punct:		8
5439	tab:			4
5440	newline:		2
5441	other "\" chars:	1
5442	remaining chars:	0
5443
5444   If we find any cases in the switch that are not either -1 or in the range
5445   of valid ASCII characters, or are control characters other than those
5446   commonly used with "\", don't treat this switch scanning text.
5447
5448   Return 1 if these nodes are suitable for cost estimation, otherwise
5449   return 0.  */
5450
5451static int
5452estimate_case_costs (node)
5453     case_node_ptr node;
5454{
5455  tree min_ascii = build_int_2 (-1, -1);
5456  tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5457  case_node_ptr n;
5458  int i;
5459
5460  /* If we haven't already made the cost table, make it now.  Note that the
5461     lower bound of the table is -1, not zero.  */
5462
5463  if (cost_table == NULL)
5464    {
5465      cost_table = ((short *) xmalloc (129 * sizeof (short))) + 1;
5466      bzero ((char *) (cost_table - 1), 129 * sizeof (short));
5467
5468      for (i = 0; i < 128; i++)
5469	{
5470	  if (ISALNUM (i))
5471	    cost_table[i] = 16;
5472	  else if (ISPUNCT (i))
5473	    cost_table[i] = 8;
5474	  else if (ISCNTRL (i))
5475	    cost_table[i] = -1;
5476	}
5477
5478      cost_table[' '] = 8;
5479      cost_table['\t'] = 4;
5480      cost_table['\0'] = 4;
5481      cost_table['\n'] = 2;
5482      cost_table['\f'] = 1;
5483      cost_table['\v'] = 1;
5484      cost_table['\b'] = 1;
5485    }
5486
5487  /* See if all the case expressions look like text.  It is text if the
5488     constant is >= -1 and the highest constant is <= 127.  Do all comparisons
5489     as signed arithmetic since we don't want to ever access cost_table with a
5490     value less than -1.  Also check that none of the constants in a range
5491     are strange control characters.  */
5492
5493  for (n = node; n; n = n->right)
5494    {
5495      if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5496	return 0;
5497
5498      for (i = TREE_INT_CST_LOW (n->low); i <= TREE_INT_CST_LOW (n->high); i++)
5499	if (cost_table[i] < 0)
5500	  return 0;
5501    }
5502
5503  /* All interesting values are within the range of interesting
5504     ASCII characters.  */
5505  return 1;
5506}
5507
5508/* Scan an ordered list of case nodes
5509   combining those with consecutive values or ranges.
5510
5511   Eg. three separate entries 1: 2: 3: become one entry 1..3:  */
5512
5513static void
5514group_case_nodes (head)
5515     case_node_ptr head;
5516{
5517  case_node_ptr node = head;
5518
5519  while (node)
5520    {
5521      rtx lb = next_real_insn (label_rtx (node->code_label));
5522      rtx lb2;
5523      case_node_ptr np = node;
5524
5525      /* Try to group the successors of NODE with NODE.  */
5526      while (((np = np->right) != 0)
5527	     /* Do they jump to the same place?  */
5528	     && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5529		 || (lb != 0 && lb2 != 0
5530		     && simplejump_p (lb)
5531		     && simplejump_p (lb2)
5532		     && rtx_equal_p (SET_SRC (PATTERN (lb)),
5533				     SET_SRC (PATTERN (lb2)))))
5534	     /* Are their ranges consecutive?  */
5535	     && tree_int_cst_equal (np->low,
5536				    fold (build (PLUS_EXPR,
5537						 TREE_TYPE (node->high),
5538						 node->high,
5539						 integer_one_node)))
5540	     /* An overflow is not consecutive.  */
5541	     && tree_int_cst_lt (node->high,
5542				 fold (build (PLUS_EXPR,
5543					      TREE_TYPE (node->high),
5544					      node->high,
5545					      integer_one_node))))
5546	{
5547	  node->high = np->high;
5548	}
5549      /* NP is the first node after NODE which can't be grouped with it.
5550	 Delete the nodes in between, and move on to that node.  */
5551      node->right = np;
5552      node = np;
5553    }
5554}
5555
5556/* Take an ordered list of case nodes
5557   and transform them into a near optimal binary tree,
5558   on the assumption that any target code selection value is as
5559   likely as any other.
5560
5561   The transformation is performed by splitting the ordered
5562   list into two equal sections plus a pivot.  The parts are
5563   then attached to the pivot as left and right branches.  Each
5564   branch is then transformed recursively.  */
5565
5566static void
5567balance_case_nodes (head, parent)
5568     case_node_ptr *head;
5569     case_node_ptr parent;
5570{
5571  register case_node_ptr np;
5572
5573  np = *head;
5574  if (np)
5575    {
5576      int cost = 0;
5577      int i = 0;
5578      int ranges = 0;
5579      register case_node_ptr *npp;
5580      case_node_ptr left;
5581
5582      /* Count the number of entries on branch.  Also count the ranges.  */
5583
5584      while (np)
5585	{
5586	  if (!tree_int_cst_equal (np->low, np->high))
5587	    {
5588	      ranges++;
5589	      if (use_cost_table)
5590		cost += cost_table[TREE_INT_CST_LOW (np->high)];
5591	    }
5592
5593	  if (use_cost_table)
5594	    cost += cost_table[TREE_INT_CST_LOW (np->low)];
5595
5596	  i++;
5597	  np = np->right;
5598	}
5599
5600      if (i > 2)
5601	{
5602	  /* Split this list if it is long enough for that to help.  */
5603	  npp = head;
5604	  left = *npp;
5605	  if (use_cost_table)
5606	    {
5607	      /* Find the place in the list that bisects the list's total cost,
5608		 Here I gets half the total cost.  */
5609	      int n_moved = 0;
5610	      i = (cost + 1) / 2;
5611	      while (1)
5612		{
5613		  /* Skip nodes while their cost does not reach that amount.  */
5614		  if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5615		    i -= cost_table[TREE_INT_CST_LOW ((*npp)->high)];
5616		  i -= cost_table[TREE_INT_CST_LOW ((*npp)->low)];
5617		  if (i <= 0)
5618		    break;
5619		  npp = &(*npp)->right;
5620		  n_moved += 1;
5621		}
5622	      if (n_moved == 0)
5623		{
5624		  /* Leave this branch lopsided, but optimize left-hand
5625		     side and fill in `parent' fields for right-hand side.  */
5626		  np = *head;
5627		  np->parent = parent;
5628		  balance_case_nodes (&np->left, np);
5629		  for (; np->right; np = np->right)
5630		    np->right->parent = np;
5631		  return;
5632		}
5633	    }
5634	  /* If there are just three nodes, split at the middle one.  */
5635	  else if (i == 3)
5636	    npp = &(*npp)->right;
5637	  else
5638	    {
5639	      /* Find the place in the list that bisects the list's total cost,
5640		 where ranges count as 2.
5641		 Here I gets half the total cost.  */
5642	      i = (i + ranges + 1) / 2;
5643	      while (1)
5644		{
5645		  /* Skip nodes while their cost does not reach that amount.  */
5646		  if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5647		    i--;
5648		  i--;
5649		  if (i <= 0)
5650		    break;
5651		  npp = &(*npp)->right;
5652		}
5653	    }
5654	  *head = np = *npp;
5655	  *npp = 0;
5656	  np->parent = parent;
5657	  np->left = left;
5658
5659	  /* Optimize each of the two split parts.  */
5660	  balance_case_nodes (&np->left, np);
5661	  balance_case_nodes (&np->right, np);
5662	}
5663      else
5664	{
5665	  /* Else leave this branch as one level,
5666	     but fill in `parent' fields.  */
5667	  np = *head;
5668	  np->parent = parent;
5669	  for (; np->right; np = np->right)
5670	    np->right->parent = np;
5671	}
5672    }
5673}
5674
5675/* Search the parent sections of the case node tree
5676   to see if a test for the lower bound of NODE would be redundant.
5677   INDEX_TYPE is the type of the index expression.
5678
5679   The instructions to generate the case decision tree are
5680   output in the same order as nodes are processed so it is
5681   known that if a parent node checks the range of the current
5682   node minus one that the current node is bounded at its lower
5683   span.  Thus the test would be redundant.  */
5684
5685static int
5686node_has_low_bound (node, index_type)
5687     case_node_ptr node;
5688     tree index_type;
5689{
5690  tree low_minus_one;
5691  case_node_ptr pnode;
5692
5693  /* If the lower bound of this node is the lowest value in the index type,
5694     we need not test it.  */
5695
5696  if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5697    return 1;
5698
5699  /* If this node has a left branch, the value at the left must be less
5700     than that at this node, so it cannot be bounded at the bottom and
5701     we need not bother testing any further.  */
5702
5703  if (node->left)
5704    return 0;
5705
5706  low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5707			       node->low, integer_one_node));
5708
5709  /* If the subtraction above overflowed, we can't verify anything.
5710     Otherwise, look for a parent that tests our value - 1.  */
5711
5712  if (! tree_int_cst_lt (low_minus_one, node->low))
5713    return 0;
5714
5715  for (pnode = node->parent; pnode; pnode = pnode->parent)
5716    if (tree_int_cst_equal (low_minus_one, pnode->high))
5717      return 1;
5718
5719  return 0;
5720}
5721
5722/* Search the parent sections of the case node tree
5723   to see if a test for the upper bound of NODE would be redundant.
5724   INDEX_TYPE is the type of the index expression.
5725
5726   The instructions to generate the case decision tree are
5727   output in the same order as nodes are processed so it is
5728   known that if a parent node checks the range of the current
5729   node plus one that the current node is bounded at its upper
5730   span.  Thus the test would be redundant.  */
5731
5732static int
5733node_has_high_bound (node, index_type)
5734     case_node_ptr node;
5735     tree index_type;
5736{
5737  tree high_plus_one;
5738  case_node_ptr pnode;
5739
5740  /* If there is no upper bound, obviously no test is needed.  */
5741
5742  if (TYPE_MAX_VALUE (index_type) == NULL)
5743    return 1;
5744
5745  /* If the upper bound of this node is the highest value in the type
5746     of the index expression, we need not test against it.  */
5747
5748  if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5749    return 1;
5750
5751  /* If this node has a right branch, the value at the right must be greater
5752     than that at this node, so it cannot be bounded at the top and
5753     we need not bother testing any further.  */
5754
5755  if (node->right)
5756    return 0;
5757
5758  high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5759			       node->high, integer_one_node));
5760
5761  /* If the addition above overflowed, we can't verify anything.
5762     Otherwise, look for a parent that tests our value + 1.  */
5763
5764  if (! tree_int_cst_lt (node->high, high_plus_one))
5765    return 0;
5766
5767  for (pnode = node->parent; pnode; pnode = pnode->parent)
5768    if (tree_int_cst_equal (high_plus_one, pnode->low))
5769      return 1;
5770
5771  return 0;
5772}
5773
5774/* Search the parent sections of the
5775   case node tree to see if both tests for the upper and lower
5776   bounds of NODE would be redundant.  */
5777
5778static int
5779node_is_bounded (node, index_type)
5780     case_node_ptr node;
5781     tree index_type;
5782{
5783  return (node_has_low_bound (node, index_type)
5784	  && node_has_high_bound (node, index_type));
5785}
5786
5787/*  Emit an unconditional jump to LABEL unless it would be dead code.  */
5788
5789static void
5790emit_jump_if_reachable (label)
5791     rtx label;
5792{
5793  if (GET_CODE (get_last_insn ()) != BARRIER)
5794    emit_jump (label);
5795}
5796
5797/* Emit step-by-step code to select a case for the value of INDEX.
5798   The thus generated decision tree follows the form of the
5799   case-node binary tree NODE, whose nodes represent test conditions.
5800   INDEX_TYPE is the type of the index of the switch.
5801
5802   Care is taken to prune redundant tests from the decision tree
5803   by detecting any boundary conditions already checked by
5804   emitted rtx.  (See node_has_high_bound, node_has_low_bound
5805   and node_is_bounded, above.)
5806
5807   Where the test conditions can be shown to be redundant we emit
5808   an unconditional jump to the target code.  As a further
5809   optimization, the subordinates of a tree node are examined to
5810   check for bounded nodes.  In this case conditional and/or
5811   unconditional jumps as a result of the boundary check for the
5812   current node are arranged to target the subordinates associated
5813   code for out of bound conditions on the current node.
5814
5815   We can assume that when control reaches the code generated here,
5816   the index value has already been compared with the parents
5817   of this node, and determined to be on the same side of each parent
5818   as this node is.  Thus, if this node tests for the value 51,
5819   and a parent tested for 52, we don't need to consider
5820   the possibility of a value greater than 51.  If another parent
5821   tests for the value 50, then this node need not test anything.  */
5822
5823static void
5824emit_case_nodes (index, node, default_label, index_type)
5825     rtx index;
5826     case_node_ptr node;
5827     rtx default_label;
5828     tree index_type;
5829{
5830  /* If INDEX has an unsigned type, we must make unsigned branches.  */
5831  int unsignedp = TREE_UNSIGNED (index_type);
5832  typedef rtx rtx_fn ();
5833  enum machine_mode mode = GET_MODE (index);
5834
5835  /* See if our parents have already tested everything for us.
5836     If they have, emit an unconditional jump for this node.  */
5837  if (node_is_bounded (node, index_type))
5838    emit_jump (label_rtx (node->code_label));
5839
5840  else if (tree_int_cst_equal (node->low, node->high))
5841    {
5842      /* Node is single valued.  First see if the index expression matches
5843	 this node and then check our children, if any.  */
5844
5845      do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
5846			label_rtx (node->code_label), unsignedp);
5847
5848      if (node->right != 0 && node->left != 0)
5849	{
5850	  /* This node has children on both sides.
5851	     Dispatch to one side or the other
5852	     by comparing the index value with this node's value.
5853	     If one subtree is bounded, check that one first,
5854	     so we can avoid real branches in the tree.  */
5855
5856	  if (node_is_bounded (node->right, index_type))
5857	    {
5858	      emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
5859							   VOIDmode, 0),
5860				        GT, NULL_RTX, mode, unsignedp, 0,
5861					label_rtx (node->right->code_label));
5862	      emit_case_nodes (index, node->left, default_label, index_type);
5863	    }
5864
5865	  else if (node_is_bounded (node->left, index_type))
5866	    {
5867	      emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
5868							   VOIDmode, 0),
5869				       LT, NULL_RTX, mode, unsignedp, 0,
5870				       label_rtx (node->left->code_label));
5871	      emit_case_nodes (index, node->right, default_label, index_type);
5872	    }
5873
5874	  else
5875	    {
5876	      /* Neither node is bounded.  First distinguish the two sides;
5877		 then emit the code for one side at a time.  */
5878
5879	      tree test_label
5880		= build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5881
5882	      /* See if the value is on the right.  */
5883	      emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
5884							   VOIDmode, 0),
5885				       GT, NULL_RTX, mode, unsignedp, 0,
5886				       label_rtx (test_label));
5887
5888	      /* Value must be on the left.
5889		 Handle the left-hand subtree.  */
5890	      emit_case_nodes (index, node->left, default_label, index_type);
5891	      /* If left-hand subtree does nothing,
5892		 go to default.  */
5893	      emit_jump_if_reachable (default_label);
5894
5895	      /* Code branches here for the right-hand subtree.  */
5896	      expand_label (test_label);
5897	      emit_case_nodes (index, node->right, default_label, index_type);
5898	    }
5899	}
5900
5901      else if (node->right != 0 && node->left == 0)
5902	{
5903	  /* Here we have a right child but no left so we issue conditional
5904	     branch to default and process the right child.
5905
5906	     Omit the conditional branch to default if we it avoid only one
5907	     right child; it costs too much space to save so little time.  */
5908
5909	  if (node->right->right || node->right->left
5910	      || !tree_int_cst_equal (node->right->low, node->right->high))
5911	    {
5912	      if (!node_has_low_bound (node, index_type))
5913		{
5914		  emit_cmp_and_jump_insns (index, expand_expr (node->high,
5915							       NULL_RTX,
5916							       VOIDmode, 0),
5917					   LT, NULL_RTX, mode, unsignedp, 0,
5918					   default_label);
5919		}
5920
5921	      emit_case_nodes (index, node->right, default_label, index_type);
5922	    }
5923	  else
5924	    /* We cannot process node->right normally
5925	       since we haven't ruled out the numbers less than
5926	       this node's value.  So handle node->right explicitly.  */
5927	    do_jump_if_equal (index,
5928			      expand_expr (node->right->low, NULL_RTX,
5929					   VOIDmode, 0),
5930			      label_rtx (node->right->code_label), unsignedp);
5931	}
5932
5933      else if (node->right == 0 && node->left != 0)
5934	{
5935	  /* Just one subtree, on the left.  */
5936
5937#if 0 /* The following code and comment were formerly part
5938	 of the condition here, but they didn't work
5939	 and I don't understand what the idea was.  -- rms.  */
5940	  /* If our "most probable entry" is less probable
5941	     than the default label, emit a jump to
5942	     the default label using condition codes
5943	     already lying around.  With no right branch,
5944	     a branch-greater-than will get us to the default
5945	     label correctly.  */
5946	  if (use_cost_table
5947	       && cost_table[TREE_INT_CST_LOW (node->high)] < 12)
5948	    ;
5949#endif /* 0 */
5950 	  if (node->left->left || node->left->right
5951	      || !tree_int_cst_equal (node->left->low, node->left->high))
5952	    {
5953	      if (!node_has_high_bound (node, index_type))
5954		{
5955		  emit_cmp_and_jump_insns (index, expand_expr (node->high,
5956							       NULL_RTX,
5957							       VOIDmode, 0),
5958					   GT, NULL_RTX, mode, unsignedp, 0,
5959					   default_label);
5960		}
5961
5962	      emit_case_nodes (index, node->left, default_label, index_type);
5963	    }
5964	  else
5965	    /* We cannot process node->left normally
5966	       since we haven't ruled out the numbers less than
5967	       this node's value.  So handle node->left explicitly.  */
5968	    do_jump_if_equal (index,
5969			      expand_expr (node->left->low, NULL_RTX,
5970					   VOIDmode, 0),
5971			      label_rtx (node->left->code_label), unsignedp);
5972	}
5973    }
5974  else
5975    {
5976      /* Node is a range.  These cases are very similar to those for a single
5977	 value, except that we do not start by testing whether this node
5978	 is the one to branch to.  */
5979
5980      if (node->right != 0 && node->left != 0)
5981	{
5982	  /* Node has subtrees on both sides.
5983	     If the right-hand subtree is bounded,
5984	     test for it first, since we can go straight there.
5985	     Otherwise, we need to make a branch in the control structure,
5986	     then handle the two subtrees.  */
5987	  tree test_label = 0;
5988
5989
5990	  if (node_is_bounded (node->right, index_type))
5991	    /* Right hand node is fully bounded so we can eliminate any
5992	       testing and branch directly to the target code.  */
5993	    emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
5994							 VOIDmode, 0),
5995				     GT, NULL_RTX, mode, unsignedp, 0,
5996				     label_rtx (node->right->code_label));
5997	  else
5998	    {
5999	      /* Right hand node requires testing.
6000		 Branch to a label where we will handle it later.  */
6001
6002	      test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6003	      emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6004							   VOIDmode, 0),
6005				       GT, NULL_RTX, mode, unsignedp, 0,
6006				       label_rtx (test_label));
6007	    }
6008
6009	  /* Value belongs to this node or to the left-hand subtree.  */
6010
6011	  emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6012						       VOIDmode, 0),
6013				   GE, NULL_RTX, mode, unsignedp, 0,
6014				   label_rtx (node->code_label));
6015
6016	  /* Handle the left-hand subtree.  */
6017	  emit_case_nodes (index, node->left, default_label, index_type);
6018
6019	  /* If right node had to be handled later, do that now.  */
6020
6021	  if (test_label)
6022	    {
6023	      /* If the left-hand subtree fell through,
6024		 don't let it fall into the right-hand subtree.  */
6025	      emit_jump_if_reachable (default_label);
6026
6027	      expand_label (test_label);
6028	      emit_case_nodes (index, node->right, default_label, index_type);
6029	    }
6030	}
6031
6032      else if (node->right != 0 && node->left == 0)
6033	{
6034	  /* Deal with values to the left of this node,
6035	     if they are possible.  */
6036	  if (!node_has_low_bound (node, index_type))
6037	    {
6038	      emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6039							   VOIDmode, 0),
6040				       LT, NULL_RTX, mode, unsignedp, 0,
6041				       default_label);
6042	    }
6043
6044	  /* Value belongs to this node or to the right-hand subtree.  */
6045
6046	  emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6047						       VOIDmode, 0),
6048				   LE, NULL_RTX, mode, unsignedp, 0,
6049				   label_rtx (node->code_label));
6050
6051	  emit_case_nodes (index, node->right, default_label, index_type);
6052	}
6053
6054      else if (node->right == 0 && node->left != 0)
6055	{
6056	  /* Deal with values to the right of this node,
6057	     if they are possible.  */
6058	  if (!node_has_high_bound (node, index_type))
6059	    {
6060	      emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6061							   VOIDmode, 0),
6062				       GT, NULL_RTX, mode, unsignedp, 0,
6063				       default_label);
6064	    }
6065
6066	  /* Value belongs to this node or to the left-hand subtree.  */
6067
6068	  emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6069						       VOIDmode, 0),
6070				   GE, NULL_RTX, mode, unsignedp, 0,
6071				   label_rtx (node->code_label));
6072
6073	  emit_case_nodes (index, node->left, default_label, index_type);
6074	}
6075
6076      else
6077	{
6078	  /* Node has no children so we check low and high bounds to remove
6079	     redundant tests.  Only one of the bounds can exist,
6080	     since otherwise this node is bounded--a case tested already.  */
6081
6082	  if (!node_has_high_bound (node, index_type))
6083	    {
6084	      emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
6085							   VOIDmode, 0),
6086				       GT, NULL_RTX, mode, unsignedp, 0,
6087				       default_label);
6088	    }
6089
6090	  if (!node_has_low_bound (node, index_type))
6091	    {
6092	      emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
6093							   VOIDmode, 0),
6094				       LT, NULL_RTX, mode, unsignedp, 0,
6095				       default_label);
6096	    }
6097
6098	  emit_jump (label_rtx (node->code_label));
6099	}
6100    }
6101}
6102
6103/* These routines are used by the loop unrolling code.  They copy BLOCK trees
6104   so that the debugging info will be correct for the unrolled loop.  */
6105
6106/* Indexed by block number, contains a pointer to the N'th block node.
6107
6108  Allocated by the call to identify_blocks, then released after the call
6109  to reorder_blocks in the function unroll_block_trees.  */
6110
6111static tree *block_vector;
6112
6113void
6114find_loop_tree_blocks ()
6115{
6116  tree block = DECL_INITIAL (current_function_decl);
6117
6118  block_vector = identify_blocks (block, get_insns ());
6119}
6120
6121void
6122unroll_block_trees ()
6123{
6124  tree block = DECL_INITIAL (current_function_decl);
6125
6126  reorder_blocks (block_vector, block, get_insns ());
6127
6128  /* Release any memory allocated by identify_blocks.  */
6129  if (block_vector)
6130    free (block_vector);
6131}
6132