stmt.c revision 110611
1/* Expands front end tree to back end RTL for GNU C-Compiler
2   Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3   1998, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22/* This file handles the generation of rtl code from tree structure
23   above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24   It also creates the rtl expressions for parameters and auto variables
25   and has full responsibility for allocating stack slots.
26
27   The functions whose names start with `expand_' are called by the
28   parser to generate RTL instructions for various kinds of constructs.
29
30   Some control and binding constructs require calling several such
31   functions at different times.  For example, a simple if-then
32   is expanded by calling `expand_start_cond' (with the condition-expression
33   as argument) before parsing the then-clause and calling `expand_end_cond'
34   after parsing the then-clause.  */
35
36#include "config.h"
37#include "system.h"
38
39#include "rtl.h"
40#include "tree.h"
41#include "tm_p.h"
42#include "flags.h"
43#include "except.h"
44#include "function.h"
45#include "insn-config.h"
46#include "expr.h"
47#include "libfuncs.h"
48#include "hard-reg-set.h"
49#include "obstack.h"
50#include "loop.h"
51#include "recog.h"
52#include "machmode.h"
53#include "toplev.h"
54#include "output.h"
55#include "ggc.h"
56
57#define obstack_chunk_alloc xmalloc
58#define obstack_chunk_free free
59struct obstack stmt_obstack;
60
61/* Assume that case vectors are not pc-relative.  */
62#ifndef CASE_VECTOR_PC_RELATIVE
63#define CASE_VECTOR_PC_RELATIVE 0
64#endif
65
66/* Functions and data structures for expanding case statements.  */
67
68/* Case label structure, used to hold info on labels within case
69   statements.  We handle "range" labels; for a single-value label
70   as in C, the high and low limits are the same.
71
72   An AVL tree of case nodes is initially created, and later transformed
73   to a list linked via the RIGHT fields in the nodes.  Nodes with
74   higher case values are later in the list.
75
76   Switch statements can be output in one of two forms.  A branch table
77   is used if there are more than a few labels and the labels are dense
78   within the range between the smallest and largest case value.  If a
79   branch table is used, no further manipulations are done with the case
80   node chain.
81
82   The alternative to the use of a branch table is to generate a series
83   of compare and jump insns.  When that is done, we use the LEFT, RIGHT,
84   and PARENT fields to hold a binary tree.  Initially the tree is
85   totally unbalanced, with everything on the right.  We balance the tree
86   with nodes on the left having lower case values than the parent
87   and nodes on the right having higher values.  We then output the tree
88   in order.  */
89
90struct case_node
91{
92  struct case_node	*left;	/* Left son in binary tree */
93  struct case_node	*right;	/* Right son in binary tree; also node chain */
94  struct case_node	*parent; /* Parent of node in binary tree */
95  tree			low;	/* Lowest index value for this label */
96  tree			high;	/* Highest index value for this label */
97  tree			code_label; /* Label to jump to when node matches */
98  int			balance;
99};
100
101typedef struct case_node case_node;
102typedef struct case_node *case_node_ptr;
103
104/* These are used by estimate_case_costs and balance_case_nodes.  */
105
106/* This must be a signed type, and non-ANSI compilers lack signed char.  */
107static short cost_table_[129];
108static int use_cost_table;
109static int cost_table_initialized;
110
111/* Special care is needed because we allow -1, but TREE_INT_CST_LOW
112   is unsigned.  */
113#define COST_TABLE(I)  cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
114
115/* Stack of control and binding constructs we are currently inside.
116
117   These constructs begin when you call `expand_start_WHATEVER'
118   and end when you call `expand_end_WHATEVER'.  This stack records
119   info about how the construct began that tells the end-function
120   what to do.  It also may provide information about the construct
121   to alter the behavior of other constructs within the body.
122   For example, they may affect the behavior of C `break' and `continue'.
123
124   Each construct gets one `struct nesting' object.
125   All of these objects are chained through the `all' field.
126   `nesting_stack' points to the first object (innermost construct).
127   The position of an entry on `nesting_stack' is in its `depth' field.
128
129   Each type of construct has its own individual stack.
130   For example, loops have `loop_stack'.  Each object points to the
131   next object of the same type through the `next' field.
132
133   Some constructs are visible to `break' exit-statements and others
134   are not.  Which constructs are visible depends on the language.
135   Therefore, the data structure allows each construct to be visible
136   or not, according to the args given when the construct is started.
137   The construct is visible if the `exit_label' field is non-null.
138   In that case, the value should be a CODE_LABEL rtx.  */
139
140struct nesting
141{
142  struct nesting *all;
143  struct nesting *next;
144  int depth;
145  rtx exit_label;
146  union
147    {
148      /* For conds (if-then and if-then-else statements).  */
149      struct
150	{
151	  /* Label for the end of the if construct.
152	     There is none if EXITFLAG was not set
153	     and no `else' has been seen yet.  */
154	  rtx endif_label;
155	  /* Label for the end of this alternative.
156	     This may be the end of the if or the next else/elseif.  */
157	  rtx next_label;
158	} cond;
159      /* For loops.  */
160      struct
161	{
162	  /* Label at the top of the loop; place to loop back to.  */
163	  rtx start_label;
164	  /* Label at the end of the whole construct.  */
165	  rtx end_label;
166	  /* Label before a jump that branches to the end of the whole
167	     construct.  This is where destructors go if any.  */
168	  rtx alt_end_label;
169	  /* Label for `continue' statement to jump to;
170	     this is in front of the stepper of the loop.  */
171	  rtx continue_label;
172	} loop;
173      /* For variable binding contours.  */
174      struct
175	{
176	  /* Sequence number of this binding contour within the function,
177	     in order of entry.  */
178	  int block_start_count;
179	  /* Nonzero => value to restore stack to on exit.  */
180	  rtx stack_level;
181	  /* The NOTE that starts this contour.
182	     Used by expand_goto to check whether the destination
183	     is within each contour or not.  */
184	  rtx first_insn;
185	  /* Innermost containing binding contour that has a stack level.  */
186	  struct nesting *innermost_stack_block;
187	  /* List of cleanups to be run on exit from this contour.
188	     This is a list of expressions to be evaluated.
189	     The TREE_PURPOSE of each link is the ..._DECL node
190	     which the cleanup pertains to.  */
191	  tree cleanups;
192	  /* List of cleanup-lists of blocks containing this block,
193	     as they were at the locus where this block appears.
194	     There is an element for each containing block,
195	     ordered innermost containing block first.
196	     The tail of this list can be 0,
197	     if all remaining elements would be empty lists.
198	     The element's TREE_VALUE is the cleanup-list of that block,
199	     which may be null.  */
200	  tree outer_cleanups;
201	  /* Chain of labels defined inside this binding contour.
202	     For contours that have stack levels or cleanups.  */
203	  struct label_chain *label_chain;
204	  /* Number of function calls seen, as of start of this block.  */
205	  int n_function_calls;
206	  /* Nonzero if this is associated with a EH region.  */
207	  int exception_region;
208	  /* The saved target_temp_slot_level from our outer block.
209	     We may reset target_temp_slot_level to be the level of
210	     this block, if that is done, target_temp_slot_level
211	     reverts to the saved target_temp_slot_level at the very
212	     end of the block.  */
213	  int block_target_temp_slot_level;
214	  /* True if we are currently emitting insns in an area of
215	     output code that is controlled by a conditional
216	     expression.  This is used by the cleanup handling code to
217	     generate conditional cleanup actions.  */
218	  int conditional_code;
219	  /* A place to move the start of the exception region for any
220	     of the conditional cleanups, must be at the end or after
221	     the start of the last unconditional cleanup, and before any
222	     conditional branch points.  */
223	  rtx last_unconditional_cleanup;
224	  /* When in a conditional context, this is the specific
225	     cleanup list associated with last_unconditional_cleanup,
226	     where we place the conditionalized cleanups.  */
227	  tree *cleanup_ptr;
228	} block;
229      /* For switch (C) or case (Pascal) statements,
230	 and also for dummies (see `expand_start_case_dummy').  */
231      struct
232	{
233	  /* The insn after which the case dispatch should finally
234	     be emitted.  Zero for a dummy.  */
235	  rtx start;
236	  /* A list of case labels; it is first built as an AVL tree.
237	     During expand_end_case, this is converted to a list, and may be
238	     rearranged into a nearly balanced binary tree.  */
239	  struct case_node *case_list;
240	  /* Label to jump to if no case matches.  */
241	  tree default_label;
242	  /* The expression to be dispatched on.  */
243	  tree index_expr;
244	  /* Type that INDEX_EXPR should be converted to.  */
245	  tree nominal_type;
246	  /* Name of this kind of statement, for warnings.  */
247	  const char *printname;
248	  /* Used to save no_line_numbers till we see the first case label.
249	     We set this to -1 when we see the first case label in this
250	     case statement.  */
251	  int line_number_status;
252	} case_stmt;
253    } data;
254};
255
256/* Allocate and return a new `struct nesting'.  */
257
258#define ALLOC_NESTING() \
259 (struct nesting *) obstack_alloc (&stmt_obstack, sizeof (struct nesting))
260
261/* Pop the nesting stack element by element until we pop off
262   the element which is at the top of STACK.
263   Update all the other stacks, popping off elements from them
264   as we pop them from nesting_stack.  */
265
266#define POPSTACK(STACK)					\
267do { struct nesting *target = STACK;			\
268     struct nesting *this;				\
269     do { this = nesting_stack;				\
270	  if (loop_stack == this)			\
271	    loop_stack = loop_stack->next;		\
272	  if (cond_stack == this)			\
273	    cond_stack = cond_stack->next;		\
274	  if (block_stack == this)			\
275	    block_stack = block_stack->next;		\
276	  if (stack_block_stack == this)		\
277	    stack_block_stack = stack_block_stack->next; \
278	  if (case_stack == this)			\
279	    case_stack = case_stack->next;		\
280	  nesting_depth = nesting_stack->depth - 1;	\
281	  nesting_stack = this->all;			\
282	  obstack_free (&stmt_obstack, this); }		\
283     while (this != target); } while (0)
284
285/* In some cases it is impossible to generate code for a forward goto
286   until the label definition is seen.  This happens when it may be necessary
287   for the goto to reset the stack pointer: we don't yet know how to do that.
288   So expand_goto puts an entry on this fixup list.
289   Each time a binding contour that resets the stack is exited,
290   we check each fixup.
291   If the target label has now been defined, we can insert the proper code.  */
292
293struct goto_fixup
294{
295  /* Points to following fixup.  */
296  struct goto_fixup *next;
297  /* Points to the insn before the jump insn.
298     If more code must be inserted, it goes after this insn.  */
299  rtx before_jump;
300  /* The LABEL_DECL that this jump is jumping to, or 0
301     for break, continue or return.  */
302  tree target;
303  /* The BLOCK for the place where this goto was found.  */
304  tree context;
305  /* The CODE_LABEL rtx that this is jumping to.  */
306  rtx target_rtl;
307  /* Number of binding contours started in current function
308     before the label reference.  */
309  int block_start_count;
310  /* The outermost stack level that should be restored for this jump.
311     Each time a binding contour that resets the stack is exited,
312     if the target label is *not* yet defined, this slot is updated.  */
313  rtx stack_level;
314  /* List of lists of cleanup expressions to be run by this goto.
315     There is one element for each block that this goto is within.
316     The tail of this list can be 0,
317     if all remaining elements would be empty.
318     The TREE_VALUE contains the cleanup list of that block as of the
319     time this goto was seen.
320     The TREE_ADDRESSABLE flag is 1 for a block that has been exited.  */
321  tree cleanup_list_list;
322};
323
324/* Within any binding contour that must restore a stack level,
325   all labels are recorded with a chain of these structures.  */
326
327struct label_chain
328{
329  /* Points to following fixup.  */
330  struct label_chain *next;
331  tree label;
332};
333
334struct stmt_status
335{
336  /* Chain of all pending binding contours.  */
337  struct nesting *x_block_stack;
338
339  /* If any new stacks are added here, add them to POPSTACKS too.  */
340
341  /* Chain of all pending binding contours that restore stack levels
342     or have cleanups.  */
343  struct nesting *x_stack_block_stack;
344
345  /* Chain of all pending conditional statements.  */
346  struct nesting *x_cond_stack;
347
348  /* Chain of all pending loops.  */
349  struct nesting *x_loop_stack;
350
351  /* Chain of all pending case or switch statements.  */
352  struct nesting *x_case_stack;
353
354  /* Separate chain including all of the above,
355     chained through the `all' field.  */
356  struct nesting *x_nesting_stack;
357
358  /* Number of entries on nesting_stack now.  */
359  int x_nesting_depth;
360
361  /* Number of binding contours started so far in this function.  */
362  int x_block_start_count;
363
364  /* Each time we expand an expression-statement,
365     record the expr's type and its RTL value here.  */
366  tree x_last_expr_type;
367  rtx x_last_expr_value;
368
369  /* Nonzero if within a ({...}) grouping, in which case we must
370     always compute a value for each expr-stmt in case it is the last one.  */
371  int x_expr_stmts_for_value;
372
373  /* Filename and line number of last line-number note,
374     whether we actually emitted it or not.  */
375  const char *x_emit_filename;
376  int x_emit_lineno;
377
378  struct goto_fixup *x_goto_fixup_chain;
379};
380
381#define block_stack (cfun->stmt->x_block_stack)
382#define stack_block_stack (cfun->stmt->x_stack_block_stack)
383#define cond_stack (cfun->stmt->x_cond_stack)
384#define loop_stack (cfun->stmt->x_loop_stack)
385#define case_stack (cfun->stmt->x_case_stack)
386#define nesting_stack (cfun->stmt->x_nesting_stack)
387#define nesting_depth (cfun->stmt->x_nesting_depth)
388#define current_block_start_count (cfun->stmt->x_block_start_count)
389#define last_expr_type (cfun->stmt->x_last_expr_type)
390#define last_expr_value (cfun->stmt->x_last_expr_value)
391#define expr_stmts_for_value (cfun->stmt->x_expr_stmts_for_value)
392#define emit_filename (cfun->stmt->x_emit_filename)
393#define emit_lineno (cfun->stmt->x_emit_lineno)
394#define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
395
396/* Non-zero if we are using EH to handle cleanus.  */
397static int using_eh_for_cleanups_p = 0;
398
399static int n_occurrences		PARAMS ((int, const char *));
400static bool parse_input_constraint	PARAMS ((const char **, int, int, int,
401						 int, const char * const *,
402						 bool *, bool *));
403static void expand_goto_internal	PARAMS ((tree, rtx, rtx));
404static int expand_fixup			PARAMS ((tree, rtx, rtx));
405static rtx expand_nl_handler_label	PARAMS ((rtx, rtx));
406static void expand_nl_goto_receiver	PARAMS ((void));
407static void expand_nl_goto_receivers	PARAMS ((struct nesting *));
408static void fixup_gotos			PARAMS ((struct nesting *, rtx, tree,
409					       rtx, int));
410static bool check_operand_nalternatives	PARAMS ((tree, tree));
411static bool check_unique_operand_names	PARAMS ((tree, tree));
412static tree resolve_operand_names	PARAMS ((tree, tree, tree,
413						 const char **));
414static char *resolve_operand_name_1	PARAMS ((char *, tree, tree));
415static void expand_null_return_1	PARAMS ((rtx));
416static void expand_value_return		PARAMS ((rtx));
417static int tail_recursion_args		PARAMS ((tree, tree));
418static void expand_cleanups		PARAMS ((tree, tree, int, int));
419static void check_seenlabel		PARAMS ((void));
420static void do_jump_if_equal		PARAMS ((rtx, rtx, rtx, int));
421static int estimate_case_costs		PARAMS ((case_node_ptr));
422static void group_case_nodes		PARAMS ((case_node_ptr));
423static void balance_case_nodes		PARAMS ((case_node_ptr *,
424					       case_node_ptr));
425static int node_has_low_bound		PARAMS ((case_node_ptr, tree));
426static int node_has_high_bound		PARAMS ((case_node_ptr, tree));
427static int node_is_bounded		PARAMS ((case_node_ptr, tree));
428static void emit_jump_if_reachable	PARAMS ((rtx));
429static void emit_case_nodes		PARAMS ((rtx, case_node_ptr, rtx, tree));
430static struct case_node *case_tree2list	PARAMS ((case_node *, case_node *));
431static void mark_cond_nesting           PARAMS ((struct nesting *));
432static void mark_loop_nesting           PARAMS ((struct nesting *));
433static void mark_block_nesting          PARAMS ((struct nesting *));
434static void mark_case_nesting           PARAMS ((struct nesting *));
435static void mark_case_node		PARAMS ((struct case_node *));
436static void mark_goto_fixup             PARAMS ((struct goto_fixup *));
437static void free_case_nodes             PARAMS ((case_node_ptr));
438
439void
440using_eh_for_cleanups ()
441{
442  using_eh_for_cleanups_p = 1;
443}
444
445/* Mark N (known to be a cond-nesting) for GC.  */
446
447static void
448mark_cond_nesting (n)
449     struct nesting *n;
450{
451  while (n)
452    {
453      ggc_mark_rtx (n->exit_label);
454      ggc_mark_rtx (n->data.cond.endif_label);
455      ggc_mark_rtx (n->data.cond.next_label);
456
457      n = n->next;
458    }
459}
460
461/* Mark N (known to be a loop-nesting) for GC.  */
462
463static void
464mark_loop_nesting (n)
465     struct nesting *n;
466{
467
468  while (n)
469    {
470      ggc_mark_rtx (n->exit_label);
471      ggc_mark_rtx (n->data.loop.start_label);
472      ggc_mark_rtx (n->data.loop.end_label);
473      ggc_mark_rtx (n->data.loop.alt_end_label);
474      ggc_mark_rtx (n->data.loop.continue_label);
475
476      n = n->next;
477    }
478}
479
480/* Mark N (known to be a block-nesting) for GC.  */
481
482static void
483mark_block_nesting (n)
484     struct nesting *n;
485{
486  while (n)
487    {
488      struct label_chain *l;
489
490      ggc_mark_rtx (n->exit_label);
491      ggc_mark_rtx (n->data.block.stack_level);
492      ggc_mark_rtx (n->data.block.first_insn);
493      ggc_mark_tree (n->data.block.cleanups);
494      ggc_mark_tree (n->data.block.outer_cleanups);
495
496      for (l = n->data.block.label_chain; l != NULL; l = l->next)
497	{
498	  ggc_mark (l);
499	  ggc_mark_tree (l->label);
500	}
501
502      ggc_mark_rtx (n->data.block.last_unconditional_cleanup);
503
504      /* ??? cleanup_ptr never points outside the stack, does it?  */
505
506      n = n->next;
507    }
508}
509
510/* Mark N (known to be a case-nesting) for GC.  */
511
512static void
513mark_case_nesting (n)
514     struct nesting *n;
515{
516  while (n)
517    {
518      ggc_mark_rtx (n->exit_label);
519      ggc_mark_rtx (n->data.case_stmt.start);
520
521      ggc_mark_tree (n->data.case_stmt.default_label);
522      ggc_mark_tree (n->data.case_stmt.index_expr);
523      ggc_mark_tree (n->data.case_stmt.nominal_type);
524
525      mark_case_node (n->data.case_stmt.case_list);
526      n = n->next;
527    }
528}
529
530/* Mark C for GC.  */
531
532static void
533mark_case_node (c)
534     struct case_node *c;
535{
536  if (c != 0)
537    {
538      ggc_mark_tree (c->low);
539      ggc_mark_tree (c->high);
540      ggc_mark_tree (c->code_label);
541
542      mark_case_node (c->right);
543      mark_case_node (c->left);
544    }
545}
546
547/* Mark G for GC.  */
548
549static void
550mark_goto_fixup (g)
551     struct goto_fixup *g;
552{
553  while (g)
554    {
555      ggc_mark (g);
556      ggc_mark_rtx (g->before_jump);
557      ggc_mark_tree (g->target);
558      ggc_mark_tree (g->context);
559      ggc_mark_rtx (g->target_rtl);
560      ggc_mark_rtx (g->stack_level);
561      ggc_mark_tree (g->cleanup_list_list);
562
563      g = g->next;
564    }
565}
566
567/* Clear out all parts of the state in F that can safely be discarded
568   after the function has been compiled, to let garbage collection
569   reclaim the memory.  */
570
571void
572free_stmt_status (f)
573     struct function *f;
574{
575  /* We're about to free the function obstack.  If we hold pointers to
576     things allocated there, then we'll try to mark them when we do
577     GC.  So, we clear them out here explicitly.  */
578  if (f->stmt)
579    free (f->stmt);
580  f->stmt = NULL;
581}
582
583/* Mark P for GC.  */
584
585void
586mark_stmt_status (p)
587     struct stmt_status *p;
588{
589  if (p == 0)
590    return;
591
592  mark_block_nesting (p->x_block_stack);
593  mark_cond_nesting (p->x_cond_stack);
594  mark_loop_nesting (p->x_loop_stack);
595  mark_case_nesting (p->x_case_stack);
596
597  ggc_mark_tree (p->x_last_expr_type);
598  /* last_epxr_value is only valid if last_expr_type is nonzero.  */
599  if (p->x_last_expr_type)
600    ggc_mark_rtx (p->x_last_expr_value);
601
602  mark_goto_fixup (p->x_goto_fixup_chain);
603}
604
605void
606init_stmt ()
607{
608  gcc_obstack_init (&stmt_obstack);
609}
610
611void
612init_stmt_for_function ()
613{
614  cfun->stmt = (struct stmt_status *) xmalloc (sizeof (struct stmt_status));
615
616  /* We are not currently within any block, conditional, loop or case.  */
617  block_stack = 0;
618  stack_block_stack = 0;
619  loop_stack = 0;
620  case_stack = 0;
621  cond_stack = 0;
622  nesting_stack = 0;
623  nesting_depth = 0;
624
625  current_block_start_count = 0;
626
627  /* No gotos have been expanded yet.  */
628  goto_fixup_chain = 0;
629
630  /* We are not processing a ({...}) grouping.  */
631  expr_stmts_for_value = 0;
632  last_expr_type = 0;
633  last_expr_value = NULL_RTX;
634}
635
636/* Return nonzero if anything is pushed on the loop, condition, or case
637   stack.  */
638int
639in_control_zone_p ()
640{
641  return cond_stack || loop_stack || case_stack;
642}
643
644/* Record the current file and line.  Called from emit_line_note.  */
645void
646set_file_and_line_for_stmt (file, line)
647     const char *file;
648     int line;
649{
650  /* If we're outputting an inline function, and we add a line note,
651     there may be no CFUN->STMT information.  So, there's no need to
652     update it.  */
653  if (cfun->stmt)
654    {
655      emit_filename = file;
656      emit_lineno = line;
657    }
658}
659
660/* Emit a no-op instruction.  */
661
662void
663emit_nop ()
664{
665  rtx last_insn;
666
667  last_insn = get_last_insn ();
668  if (!optimize
669      && (GET_CODE (last_insn) == CODE_LABEL
670	  || (GET_CODE (last_insn) == NOTE
671	      && prev_real_insn (last_insn) == 0)))
672    emit_insn (gen_nop ());
673}
674
675/* Return the rtx-label that corresponds to a LABEL_DECL,
676   creating it if necessary.  */
677
678rtx
679label_rtx (label)
680     tree label;
681{
682  if (TREE_CODE (label) != LABEL_DECL)
683    abort ();
684
685  if (!DECL_RTL_SET_P (label))
686    SET_DECL_RTL (label, gen_label_rtx ());
687
688  return DECL_RTL (label);
689}
690
691
692/* Add an unconditional jump to LABEL as the next sequential instruction.  */
693
694void
695emit_jump (label)
696     rtx label;
697{
698  do_pending_stack_adjust ();
699  emit_jump_insn (gen_jump (label));
700  emit_barrier ();
701}
702
703/* Emit code to jump to the address
704   specified by the pointer expression EXP.  */
705
706void
707expand_computed_goto (exp)
708     tree exp;
709{
710  rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
711
712#ifdef POINTERS_EXTEND_UNSIGNED
713  if (GET_MODE (x) != Pmode)
714    x = convert_memory_address (Pmode, x);
715#endif
716
717  emit_queue ();
718  do_pending_stack_adjust ();
719  emit_indirect_jump (x);
720
721  current_function_has_computed_jump = 1;
722}
723
724/* Handle goto statements and the labels that they can go to.  */
725
726/* Specify the location in the RTL code of a label LABEL,
727   which is a LABEL_DECL tree node.
728
729   This is used for the kind of label that the user can jump to with a
730   goto statement, and for alternatives of a switch or case statement.
731   RTL labels generated for loops and conditionals don't go through here;
732   they are generated directly at the RTL level, by other functions below.
733
734   Note that this has nothing to do with defining label *names*.
735   Languages vary in how they do that and what that even means.  */
736
737void
738expand_label (label)
739     tree label;
740{
741  struct label_chain *p;
742
743  do_pending_stack_adjust ();
744  emit_label (label_rtx (label));
745  if (DECL_NAME (label))
746    LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
747
748  if (stack_block_stack != 0)
749    {
750      p = (struct label_chain *) ggc_alloc (sizeof (struct label_chain));
751      p->next = stack_block_stack->data.block.label_chain;
752      stack_block_stack->data.block.label_chain = p;
753      p->label = label;
754    }
755}
756
757/* Declare that LABEL (a LABEL_DECL) may be used for nonlocal gotos
758   from nested functions.  */
759
760void
761declare_nonlocal_label (label)
762     tree label;
763{
764  rtx slot = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
765
766  nonlocal_labels = tree_cons (NULL_TREE, label, nonlocal_labels);
767  LABEL_PRESERVE_P (label_rtx (label)) = 1;
768  if (nonlocal_goto_handler_slots == 0)
769    {
770      emit_stack_save (SAVE_NONLOCAL,
771		       &nonlocal_goto_stack_level,
772		       PREV_INSN (tail_recursion_reentry));
773    }
774  nonlocal_goto_handler_slots
775    = gen_rtx_EXPR_LIST (VOIDmode, slot, nonlocal_goto_handler_slots);
776}
777
778/* Generate RTL code for a `goto' statement with target label LABEL.
779   LABEL should be a LABEL_DECL tree node that was or will later be
780   defined with `expand_label'.  */
781
782void
783expand_goto (label)
784     tree label;
785{
786  tree context;
787
788  /* Check for a nonlocal goto to a containing function.  */
789  context = decl_function_context (label);
790  if (context != 0 && context != current_function_decl)
791    {
792      struct function *p = find_function_data (context);
793      rtx label_ref = gen_rtx_LABEL_REF (Pmode, label_rtx (label));
794      rtx handler_slot, static_chain, save_area, insn;
795      tree link;
796
797      /* Find the corresponding handler slot for this label.  */
798      handler_slot = p->x_nonlocal_goto_handler_slots;
799      for (link = p->x_nonlocal_labels; TREE_VALUE (link) != label;
800	   link = TREE_CHAIN (link))
801	handler_slot = XEXP (handler_slot, 1);
802      handler_slot = XEXP (handler_slot, 0);
803
804      p->has_nonlocal_label = 1;
805      current_function_has_nonlocal_goto = 1;
806      LABEL_REF_NONLOCAL_P (label_ref) = 1;
807
808      /* Copy the rtl for the slots so that they won't be shared in
809	 case the virtual stack vars register gets instantiated differently
810	 in the parent than in the child.  */
811
812      static_chain = copy_to_reg (lookup_static_chain (label));
813
814      /* Get addr of containing function's current nonlocal goto handler,
815	 which will do any cleanups and then jump to the label.  */
816      handler_slot = copy_to_reg (replace_rtx (copy_rtx (handler_slot),
817					       virtual_stack_vars_rtx,
818					       static_chain));
819
820      /* Get addr of containing function's nonlocal save area.  */
821      save_area = p->x_nonlocal_goto_stack_level;
822      if (save_area)
823	save_area = replace_rtx (copy_rtx (save_area),
824				 virtual_stack_vars_rtx, static_chain);
825
826#if HAVE_nonlocal_goto
827      if (HAVE_nonlocal_goto)
828	emit_insn (gen_nonlocal_goto (static_chain, handler_slot,
829				      save_area, label_ref));
830      else
831#endif
832	{
833	  /* Restore frame pointer for containing function.
834	     This sets the actual hard register used for the frame pointer
835	     to the location of the function's incoming static chain info.
836	     The non-local goto handler will then adjust it to contain the
837	     proper value and reload the argument pointer, if needed.  */
838	  emit_move_insn (hard_frame_pointer_rtx, static_chain);
839	  emit_stack_restore (SAVE_NONLOCAL, save_area, NULL_RTX);
840
841	  /* USE of hard_frame_pointer_rtx added for consistency;
842	     not clear if really needed.  */
843	  emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
844	  emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
845	  emit_indirect_jump (handler_slot);
846	}
847
848      /* Search backwards to the jump insn and mark it as a
849	 non-local goto.  */
850      for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
851	{
852	  if (GET_CODE (insn) == JUMP_INSN)
853	    {
854	      REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO,
855						  const0_rtx, REG_NOTES (insn));
856	      break;
857	    }
858	  else if (GET_CODE (insn) == CALL_INSN)
859	      break;
860	}
861    }
862  else
863    expand_goto_internal (label, label_rtx (label), NULL_RTX);
864}
865
866/* Generate RTL code for a `goto' statement with target label BODY.
867   LABEL should be a LABEL_REF.
868   LAST_INSN, if non-0, is the rtx we should consider as the last
869   insn emitted (for the purposes of cleaning up a return).  */
870
871static void
872expand_goto_internal (body, label, last_insn)
873     tree body;
874     rtx label;
875     rtx last_insn;
876{
877  struct nesting *block;
878  rtx stack_level = 0;
879
880  if (GET_CODE (label) != CODE_LABEL)
881    abort ();
882
883  /* If label has already been defined, we can tell now
884     whether and how we must alter the stack level.  */
885
886  if (PREV_INSN (label) != 0)
887    {
888      /* Find the innermost pending block that contains the label.
889	 (Check containment by comparing insn-uids.)
890	 Then restore the outermost stack level within that block,
891	 and do cleanups of all blocks contained in it.  */
892      for (block = block_stack; block; block = block->next)
893	{
894	  if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
895	    break;
896	  if (block->data.block.stack_level != 0)
897	    stack_level = block->data.block.stack_level;
898	  /* Execute the cleanups for blocks we are exiting.  */
899	  if (block->data.block.cleanups != 0)
900	    {
901	      expand_cleanups (block->data.block.cleanups, NULL_TREE, 1, 1);
902	      do_pending_stack_adjust ();
903	    }
904	}
905
906      if (stack_level)
907	{
908	  /* Ensure stack adjust isn't done by emit_jump, as this
909	     would clobber the stack pointer.  This one should be
910	     deleted as dead by flow.  */
911	  clear_pending_stack_adjust ();
912	  do_pending_stack_adjust ();
913
914	  /* Don't do this adjust if it's to the end label and this function
915	     is to return with a depressed stack pointer.  */
916	  if (label == return_label
917	      && (((TREE_CODE (TREE_TYPE (current_function_decl))
918		   == FUNCTION_TYPE)
919		   && (TYPE_RETURNS_STACK_DEPRESSED
920		       (TREE_TYPE (current_function_decl))))))
921	    ;
922	  else
923	    emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
924	}
925
926      if (body != 0 && DECL_TOO_LATE (body))
927	error ("jump to `%s' invalidly jumps into binding contour",
928	       IDENTIFIER_POINTER (DECL_NAME (body)));
929    }
930  /* Label not yet defined: may need to put this goto
931     on the fixup list.  */
932  else if (! expand_fixup (body, label, last_insn))
933    {
934      /* No fixup needed.  Record that the label is the target
935	 of at least one goto that has no fixup.  */
936      if (body != 0)
937	TREE_ADDRESSABLE (body) = 1;
938    }
939
940  emit_jump (label);
941}
942
943/* Generate if necessary a fixup for a goto
944   whose target label in tree structure (if any) is TREE_LABEL
945   and whose target in rtl is RTL_LABEL.
946
947   If LAST_INSN is nonzero, we pretend that the jump appears
948   after insn LAST_INSN instead of at the current point in the insn stream.
949
950   The fixup will be used later to insert insns just before the goto.
951   Those insns will restore the stack level as appropriate for the
952   target label, and will (in the case of C++) also invoke any object
953   destructors which have to be invoked when we exit the scopes which
954   are exited by the goto.
955
956   Value is nonzero if a fixup is made.  */
957
958static int
959expand_fixup (tree_label, rtl_label, last_insn)
960     tree tree_label;
961     rtx rtl_label;
962     rtx last_insn;
963{
964  struct nesting *block, *end_block;
965
966  /* See if we can recognize which block the label will be output in.
967     This is possible in some very common cases.
968     If we succeed, set END_BLOCK to that block.
969     Otherwise, set it to 0.  */
970
971  if (cond_stack
972      && (rtl_label == cond_stack->data.cond.endif_label
973	  || rtl_label == cond_stack->data.cond.next_label))
974    end_block = cond_stack;
975  /* If we are in a loop, recognize certain labels which
976     are likely targets.  This reduces the number of fixups
977     we need to create.  */
978  else if (loop_stack
979      && (rtl_label == loop_stack->data.loop.start_label
980	  || rtl_label == loop_stack->data.loop.end_label
981	  || rtl_label == loop_stack->data.loop.continue_label))
982    end_block = loop_stack;
983  else
984    end_block = 0;
985
986  /* Now set END_BLOCK to the binding level to which we will return.  */
987
988  if (end_block)
989    {
990      struct nesting *next_block = end_block->all;
991      block = block_stack;
992
993      /* First see if the END_BLOCK is inside the innermost binding level.
994	 If so, then no cleanups or stack levels are relevant.  */
995      while (next_block && next_block != block)
996	next_block = next_block->all;
997
998      if (next_block)
999	return 0;
1000
1001      /* Otherwise, set END_BLOCK to the innermost binding level
1002	 which is outside the relevant control-structure nesting.  */
1003      next_block = block_stack->next;
1004      for (block = block_stack; block != end_block; block = block->all)
1005	if (block == next_block)
1006	  next_block = next_block->next;
1007      end_block = next_block;
1008    }
1009
1010  /* Does any containing block have a stack level or cleanups?
1011     If not, no fixup is needed, and that is the normal case
1012     (the only case, for standard C).  */
1013  for (block = block_stack; block != end_block; block = block->next)
1014    if (block->data.block.stack_level != 0
1015	|| block->data.block.cleanups != 0)
1016      break;
1017
1018  if (block != end_block)
1019    {
1020      /* Ok, a fixup is needed.  Add a fixup to the list of such.  */
1021      struct goto_fixup *fixup
1022	= (struct goto_fixup *) ggc_alloc (sizeof (struct goto_fixup));
1023      /* In case an old stack level is restored, make sure that comes
1024	 after any pending stack adjust.  */
1025      /* ?? If the fixup isn't to come at the present position,
1026	 doing the stack adjust here isn't useful.  Doing it with our
1027	 settings at that location isn't useful either.  Let's hope
1028	 someone does it!  */
1029      if (last_insn == 0)
1030	do_pending_stack_adjust ();
1031      fixup->target = tree_label;
1032      fixup->target_rtl = rtl_label;
1033
1034      /* Create a BLOCK node and a corresponding matched set of
1035	 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
1036	 this point.  The notes will encapsulate any and all fixup
1037	 code which we might later insert at this point in the insn
1038	 stream.  Also, the BLOCK node will be the parent (i.e. the
1039	 `SUPERBLOCK') of any other BLOCK nodes which we might create
1040	 later on when we are expanding the fixup code.
1041
1042	 Note that optimization passes (including expand_end_loop)
1043	 might move the *_BLOCK notes away, so we use a NOTE_INSN_DELETED
1044	 as a placeholder.  */
1045
1046      {
1047        rtx original_before_jump
1048          = last_insn ? last_insn : get_last_insn ();
1049	rtx start;
1050	rtx end;
1051	tree block;
1052
1053	block = make_node (BLOCK);
1054	TREE_USED (block) = 1;
1055
1056	if (!cfun->x_whole_function_mode_p)
1057	  insert_block (block);
1058	else
1059	  {
1060	    BLOCK_CHAIN (block)
1061	      = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
1062	    BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
1063	      = block;
1064	  }
1065
1066        start_sequence ();
1067        start = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
1068	if (cfun->x_whole_function_mode_p)
1069	  NOTE_BLOCK (start) = block;
1070	fixup->before_jump = emit_note (NULL, NOTE_INSN_DELETED);
1071	end = emit_note (NULL, NOTE_INSN_BLOCK_END);
1072	if (cfun->x_whole_function_mode_p)
1073	  NOTE_BLOCK (end) = block;
1074        fixup->context = block;
1075        end_sequence ();
1076        emit_insns_after (start, original_before_jump);
1077      }
1078
1079      fixup->block_start_count = current_block_start_count;
1080      fixup->stack_level = 0;
1081      fixup->cleanup_list_list
1082	= ((block->data.block.outer_cleanups
1083	    || block->data.block.cleanups)
1084	   ? tree_cons (NULL_TREE, block->data.block.cleanups,
1085			block->data.block.outer_cleanups)
1086	   : 0);
1087      fixup->next = goto_fixup_chain;
1088      goto_fixup_chain = fixup;
1089    }
1090
1091  return block != 0;
1092}
1093
1094/* Expand any needed fixups in the outputmost binding level of the
1095   function.  FIRST_INSN is the first insn in the function.  */
1096
1097void
1098expand_fixups (first_insn)
1099     rtx first_insn;
1100{
1101  fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
1102}
1103
1104/* When exiting a binding contour, process all pending gotos requiring fixups.
1105   THISBLOCK is the structure that describes the block being exited.
1106   STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
1107   CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
1108   FIRST_INSN is the insn that began this contour.
1109
1110   Gotos that jump out of this contour must restore the
1111   stack level and do the cleanups before actually jumping.
1112
1113   DONT_JUMP_IN nonzero means report error there is a jump into this
1114   contour from before the beginning of the contour.
1115   This is also done if STACK_LEVEL is nonzero.  */
1116
1117static void
1118fixup_gotos (thisblock, stack_level, cleanup_list, first_insn, dont_jump_in)
1119     struct nesting *thisblock;
1120     rtx stack_level;
1121     tree cleanup_list;
1122     rtx first_insn;
1123     int dont_jump_in;
1124{
1125  struct goto_fixup *f, *prev;
1126
1127  /* F is the fixup we are considering; PREV is the previous one.  */
1128  /* We run this loop in two passes so that cleanups of exited blocks
1129     are run first, and blocks that are exited are marked so
1130     afterwards.  */
1131
1132  for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1133    {
1134      /* Test for a fixup that is inactive because it is already handled.  */
1135      if (f->before_jump == 0)
1136	{
1137	  /* Delete inactive fixup from the chain, if that is easy to do.  */
1138	  if (prev != 0)
1139	    prev->next = f->next;
1140	}
1141      /* Has this fixup's target label been defined?
1142	 If so, we can finalize it.  */
1143      else if (PREV_INSN (f->target_rtl) != 0)
1144	{
1145	  rtx cleanup_insns;
1146
1147	  /* If this fixup jumped into this contour from before the beginning
1148	     of this contour, report an error.   This code used to use
1149	     the first non-label insn after f->target_rtl, but that's
1150	     wrong since such can be added, by things like put_var_into_stack
1151	     and have INSN_UIDs that are out of the range of the block.  */
1152	  /* ??? Bug: this does not detect jumping in through intermediate
1153	     blocks that have stack levels or cleanups.
1154	     It detects only a problem with the innermost block
1155	     around the label.  */
1156	  if (f->target != 0
1157	      && (dont_jump_in || stack_level || cleanup_list)
1158	      && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
1159	      && INSN_UID (first_insn) > INSN_UID (f->before_jump)
1160	      && ! DECL_ERROR_ISSUED (f->target))
1161	    {
1162	      error_with_decl (f->target,
1163			       "label `%s' used before containing binding contour");
1164	      /* Prevent multiple errors for one label.  */
1165	      DECL_ERROR_ISSUED (f->target) = 1;
1166	    }
1167
1168	  /* We will expand the cleanups into a sequence of their own and
1169	     then later on we will attach this new sequence to the insn
1170	     stream just ahead of the actual jump insn.  */
1171
1172	  start_sequence ();
1173
1174	  /* Temporarily restore the lexical context where we will
1175	     logically be inserting the fixup code.  We do this for the
1176	     sake of getting the debugging information right.  */
1177
1178	  pushlevel (0);
1179	  set_block (f->context);
1180
1181	  /* Expand the cleanups for blocks this jump exits.  */
1182	  if (f->cleanup_list_list)
1183	    {
1184	      tree lists;
1185	      for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
1186		/* Marked elements correspond to blocks that have been closed.
1187		   Do their cleanups.  */
1188		if (TREE_ADDRESSABLE (lists)
1189		    && TREE_VALUE (lists) != 0)
1190		  {
1191		    expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1192		    /* Pop any pushes done in the cleanups,
1193		       in case function is about to return.  */
1194		    do_pending_stack_adjust ();
1195		  }
1196	    }
1197
1198	  /* Restore stack level for the biggest contour that this
1199	     jump jumps out of.  */
1200	  if (f->stack_level
1201	      && ! (f->target_rtl == return_label
1202		    && ((TREE_CODE (TREE_TYPE (current_function_decl))
1203			 == FUNCTION_TYPE)
1204			&& (TYPE_RETURNS_STACK_DEPRESSED
1205			    (TREE_TYPE (current_function_decl))))))
1206	    emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
1207
1208	  /* Finish up the sequence containing the insns which implement the
1209	     necessary cleanups, and then attach that whole sequence to the
1210	     insn stream just ahead of the actual jump insn.  Attaching it
1211	     at that point insures that any cleanups which are in fact
1212	     implicit C++ object destructions (which must be executed upon
1213	     leaving the block) appear (to the debugger) to be taking place
1214	     in an area of the generated code where the object(s) being
1215	     destructed are still "in scope".  */
1216
1217	  cleanup_insns = get_insns ();
1218	  poplevel (1, 0, 0);
1219
1220	  end_sequence ();
1221	  emit_insns_after (cleanup_insns, f->before_jump);
1222
1223	  f->before_jump = 0;
1224	}
1225    }
1226
1227  /* For any still-undefined labels, do the cleanups for this block now.
1228     We must do this now since items in the cleanup list may go out
1229     of scope when the block ends.  */
1230  for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
1231    if (f->before_jump != 0
1232	&& PREV_INSN (f->target_rtl) == 0
1233	/* Label has still not appeared.  If we are exiting a block with
1234	   a stack level to restore, that started before the fixup,
1235	   mark this stack level as needing restoration
1236	   when the fixup is later finalized.  */
1237	&& thisblock != 0
1238	/* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
1239	   means the label is undefined.  That's erroneous, but possible.  */
1240	&& (thisblock->data.block.block_start_count
1241	    <= f->block_start_count))
1242      {
1243	tree lists = f->cleanup_list_list;
1244	rtx cleanup_insns;
1245
1246	for (; lists; lists = TREE_CHAIN (lists))
1247	  /* If the following elt. corresponds to our containing block
1248	     then the elt. must be for this block.  */
1249	  if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
1250	    {
1251	      start_sequence ();
1252	      pushlevel (0);
1253	      set_block (f->context);
1254	      expand_cleanups (TREE_VALUE (lists), NULL_TREE, 1, 1);
1255	      do_pending_stack_adjust ();
1256	      cleanup_insns = get_insns ();
1257	      poplevel (1, 0, 0);
1258	      end_sequence ();
1259	      if (cleanup_insns != 0)
1260		f->before_jump
1261		  = emit_insns_after (cleanup_insns, f->before_jump);
1262
1263	      f->cleanup_list_list = TREE_CHAIN (lists);
1264	    }
1265
1266	if (stack_level)
1267	  f->stack_level = stack_level;
1268      }
1269}
1270
1271/* Return the number of times character C occurs in string S.  */
1272static int
1273n_occurrences (c, s)
1274     int c;
1275     const char *s;
1276{
1277  int n = 0;
1278  while (*s)
1279    n += (*s++ == c);
1280  return n;
1281}
1282
1283/* Generate RTL for an asm statement (explicit assembler code).
1284   STRING is a STRING_CST node containing the assembler code text,
1285   or an ADDR_EXPR containing a STRING_CST.  VOL nonzero means the
1286   insn is volatile; don't optimize it.  */
1287void
1288expand_asm (string, vol)
1289     tree string;
1290     int vol;
1291{
1292  rtx body;
1293
1294  if (TREE_CODE (string) == ADDR_EXPR)
1295    string = TREE_OPERAND (string, 0);
1296
1297  body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
1298
1299  MEM_VOLATILE_P (body) = vol;
1300
1301  emit_insn (body);
1302
1303  last_expr_type = 0;
1304}
1305
1306/* Parse the output constraint pointed to by *CONSTRAINT_P.  It is the
1307   OPERAND_NUMth output operand, indexed from zero.  There are NINPUTS
1308   inputs and NOUTPUTS outputs to this extended-asm.  Upon return,
1309   *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
1310   memory operand.  Similarly, *ALLOWS_REG will be TRUE iff the
1311   constraint allows the use of a register operand.  And, *IS_INOUT
1312   will be true if the operand is read-write, i.e., if it is used as
1313   an input as well as an output.  If *CONSTRAINT_P is not in
1314   canonical form, it will be made canonical.  (Note that `+' will be
1315   rpelaced with `=' as part of this process.)
1316
1317   Returns TRUE if all went well; FALSE if an error occurred.  */
1318
1319bool
1320parse_output_constraint (constraint_p, operand_num, ninputs, noutputs,
1321			 allows_mem, allows_reg, is_inout)
1322     const char **constraint_p;
1323     int operand_num;
1324     int ninputs;
1325     int noutputs;
1326     bool *allows_mem;
1327     bool *allows_reg;
1328     bool *is_inout;
1329{
1330  const char *constraint = *constraint_p;
1331  const char *p;
1332
1333  /* Assume the constraint doesn't allow the use of either a register
1334     or memory.  */
1335  *allows_mem = false;
1336  *allows_reg = false;
1337
1338  /* Allow the `=' or `+' to not be at the beginning of the string,
1339     since it wasn't explicitly documented that way, and there is a
1340     large body of code that puts it last.  Swap the character to
1341     the front, so as not to uglify any place else.  */
1342  p = strchr (constraint, '=');
1343  if (!p)
1344    p = strchr (constraint, '+');
1345
1346  /* If the string doesn't contain an `=', issue an error
1347     message.  */
1348  if (!p)
1349    {
1350      error ("output operand constraint lacks `='");
1351      return false;
1352    }
1353
1354  /* If the constraint begins with `+', then the operand is both read
1355     from and written to.  */
1356  *is_inout = (*p == '+');
1357
1358  /* Canonicalize the output constraint so that it begins with `='.  */
1359  if (p != constraint || is_inout)
1360    {
1361      char *buf;
1362      size_t c_len = strlen (constraint);
1363
1364      if (p != constraint)
1365	warning ("output constraint `%c' for operand %d is not at the beginning",
1366		 *p, operand_num);
1367
1368      /* Make a copy of the constraint.  */
1369      buf = alloca (c_len + 1);
1370      strcpy (buf, constraint);
1371      /* Swap the first character and the `=' or `+'.  */
1372      buf[p - constraint] = buf[0];
1373      /* Make sure the first character is an `='.  (Until we do this,
1374	 it might be a `+'.)  */
1375      buf[0] = '=';
1376      /* Replace the constraint with the canonicalized string.  */
1377      *constraint_p = ggc_alloc_string (buf, c_len);
1378      constraint = *constraint_p;
1379    }
1380
1381  /* Loop through the constraint string.  */
1382  for (p = constraint + 1; *p; ++p)
1383    switch (*p)
1384      {
1385      case '+':
1386      case '=':
1387	error ("operand constraint contains incorrectly positioned '+' or '='");
1388	return false;
1389
1390      case '%':
1391	if (operand_num + 1 == ninputs + noutputs)
1392	  {
1393	    error ("`%%' constraint used with last operand");
1394	    return false;
1395	  }
1396	break;
1397
1398      case 'V':  case 'm':  case 'o':
1399	*allows_mem = true;
1400	break;
1401
1402      case '?':  case '!':  case '*':  case '&':  case '#':
1403      case 'E':  case 'F':  case 'G':  case 'H':
1404      case 's':  case 'i':  case 'n':
1405      case 'I':  case 'J':  case 'K':  case 'L':  case 'M':
1406      case 'N':  case 'O':  case 'P':  case ',':
1407	break;
1408
1409      case '0':  case '1':  case '2':  case '3':  case '4':
1410      case '5':  case '6':  case '7':  case '8':  case '9':
1411      case '[':
1412	error ("matching constraint not valid in output operand");
1413	return false;
1414
1415      case '<':  case '>':
1416	/* ??? Before flow, auto inc/dec insns are not supposed to exist,
1417	   excepting those that expand_call created.  So match memory
1418	   and hope.  */
1419	*allows_mem = true;
1420	break;
1421
1422      case 'g':  case 'X':
1423	*allows_reg = true;
1424	*allows_mem = true;
1425	break;
1426
1427      case 'p': case 'r':
1428	*allows_reg = true;
1429	break;
1430
1431      default:
1432	if (!ISALPHA (*p))
1433	  break;
1434	if (REG_CLASS_FROM_LETTER (*p) != NO_REGS)
1435	  *allows_reg = true;
1436#ifdef EXTRA_CONSTRAINT
1437	else
1438	  {
1439	    /* Otherwise we can't assume anything about the nature of
1440	       the constraint except that it isn't purely registers.
1441	       Treat it like "g" and hope for the best.  */
1442	    *allows_reg = true;
1443	    *allows_mem = true;
1444	  }
1445#endif
1446	break;
1447      }
1448
1449  return true;
1450}
1451
1452/* Similar, but for input constraints.  */
1453
1454static bool
1455parse_input_constraint (constraint_p, input_num, ninputs, noutputs, ninout,
1456			constraints, allows_mem, allows_reg)
1457     const char **constraint_p;
1458     int input_num;
1459     int ninputs;
1460     int noutputs;
1461     int ninout;
1462     const char * const * constraints;
1463     bool *allows_mem;
1464     bool *allows_reg;
1465{
1466  const char *constraint = *constraint_p;
1467  const char *orig_constraint = constraint;
1468  size_t c_len = strlen (constraint);
1469  size_t j;
1470
1471  /* Assume the constraint doesn't allow the use of either
1472     a register or memory.  */
1473  *allows_mem = false;
1474  *allows_reg = false;
1475
1476  /* Make sure constraint has neither `=', `+', nor '&'.  */
1477
1478  for (j = 0; j < c_len; j++)
1479    switch (constraint[j])
1480      {
1481      case '+':  case '=':  case '&':
1482	if (constraint == orig_constraint)
1483	  {
1484	    error ("input operand constraint contains `%c'", constraint[j]);
1485	    return false;
1486	  }
1487	break;
1488
1489      case '%':
1490	if (constraint == orig_constraint
1491	    && input_num + 1 == ninputs - ninout)
1492	  {
1493	    error ("`%%' constraint used with last operand");
1494	    return false;
1495	  }
1496	break;
1497
1498      case 'V':  case 'm':  case 'o':
1499	*allows_mem = true;
1500	break;
1501
1502      case '<':  case '>':
1503      case '?':  case '!':  case '*':  case '#':
1504      case 'E':  case 'F':  case 'G':  case 'H':
1505      case 's':  case 'i':  case 'n':
1506      case 'I':  case 'J':  case 'K':  case 'L':  case 'M':
1507      case 'N':  case 'O':  case 'P':  case ',':
1508	break;
1509
1510	/* Whether or not a numeric constraint allows a register is
1511	   decided by the matching constraint, and so there is no need
1512	   to do anything special with them.  We must handle them in
1513	   the default case, so that we don't unnecessarily force
1514	   operands to memory.  */
1515      case '0':  case '1':  case '2':  case '3':  case '4':
1516      case '5':  case '6':  case '7':  case '8':  case '9':
1517	{
1518	  char *end;
1519	  unsigned long match;
1520
1521	  match = strtoul (constraint + j, &end, 10);
1522	  if (match >= (unsigned long) noutputs)
1523	    {
1524	      error ("matching constraint references invalid operand number");
1525	      return false;
1526	    }
1527
1528	  /* Try and find the real constraint for this dup.  Only do this
1529	     if the matching constraint is the only alternative.  */
1530	  if (*end == '\0'
1531	      && (j == 0 || (j == 1 && constraint[0] == '%')))
1532	    {
1533	      constraint = constraints[match];
1534	      *constraint_p = constraint;
1535	      c_len = strlen (constraint);
1536	      j = 0;
1537	      break;
1538	    }
1539	  else
1540	    j = end - constraint;
1541	}
1542	/* Fall through.  */
1543
1544      case 'p':  case 'r':
1545	*allows_reg = true;
1546	break;
1547
1548      case 'g':  case 'X':
1549	*allows_reg = true;
1550	*allows_mem = true;
1551	break;
1552
1553      default:
1554	if (! ISALPHA (constraint[j]))
1555	  {
1556	    error ("invalid punctuation `%c' in constraint", constraint[j]);
1557	    return false;
1558	  }
1559	if (REG_CLASS_FROM_LETTER (constraint[j]) != NO_REGS)
1560	  *allows_reg = true;
1561#ifdef EXTRA_CONSTRAINT
1562	else
1563	  {
1564	    /* Otherwise we can't assume anything about the nature of
1565	       the constraint except that it isn't purely registers.
1566	       Treat it like "g" and hope for the best.  */
1567	    *allows_reg = true;
1568	    *allows_mem = true;
1569	  }
1570#endif
1571	break;
1572      }
1573
1574  return true;
1575}
1576
1577/* Generate RTL for an asm statement with arguments.
1578   STRING is the instruction template.
1579   OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1580   Each output or input has an expression in the TREE_VALUE and
1581   and a tree list in TREE_PURPOSE which in turn contains a constraint
1582   name in TREE_VALUE (or NULL_TREE) and a constraint string
1583   in TREE_PURPOSE.
1584   CLOBBERS is a list of STRING_CST nodes each naming a hard register
1585   that is clobbered by this insn.
1586
1587   Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1588   Some elements of OUTPUTS may be replaced with trees representing temporary
1589   values.  The caller should copy those temporary values to the originally
1590   specified lvalues.
1591
1592   VOL nonzero means the insn is volatile; don't optimize it.  */
1593
1594void
1595expand_asm_operands (string, outputs, inputs, clobbers, vol, filename, line)
1596     tree string, outputs, inputs, clobbers;
1597     int vol;
1598     const char *filename;
1599     int line;
1600{
1601  rtvec argvec, constraintvec;
1602  rtx body;
1603  int ninputs = list_length (inputs);
1604  int noutputs = list_length (outputs);
1605  int ninout;
1606  int nclobbers;
1607  tree tail;
1608  int i;
1609  /* Vector of RTX's of evaluated output operands.  */
1610  rtx *output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1611  int *inout_opnum = (int *) alloca (noutputs * sizeof (int));
1612  rtx *real_output_rtx = (rtx *) alloca (noutputs * sizeof (rtx));
1613  enum machine_mode *inout_mode
1614    = (enum machine_mode *) alloca (noutputs * sizeof (enum machine_mode));
1615  const char **constraints
1616    = (const char **) alloca ((noutputs + ninputs) * sizeof (const char *));
1617  /* The insn we have emitted.  */
1618  rtx insn;
1619  int old_generating_concat_p = generating_concat_p;
1620
1621  /* An ASM with no outputs needs to be treated as volatile, for now.  */
1622  if (noutputs == 0)
1623    vol = 1;
1624
1625  if (! check_operand_nalternatives (outputs, inputs))
1626    return;
1627
1628  if (! check_unique_operand_names (outputs, inputs))
1629    return;
1630
1631  string = resolve_operand_names (string, outputs, inputs, constraints);
1632
1633#ifdef MD_ASM_CLOBBERS
1634  /* Sometimes we wish to automatically clobber registers across an asm.
1635     Case in point is when the i386 backend moved from cc0 to a hard reg --
1636     maintaining source-level compatibility means automatically clobbering
1637     the flags register.  */
1638  MD_ASM_CLOBBERS (clobbers);
1639#endif
1640
1641  /* Count the number of meaningful clobbered registers, ignoring what
1642     we would ignore later.  */
1643  nclobbers = 0;
1644  for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1645    {
1646      const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1647
1648      i = decode_reg_name (regname);
1649      if (i >= 0 || i == -4)
1650	++nclobbers;
1651      else if (i == -2)
1652	error ("unknown register name `%s' in `asm'", regname);
1653    }
1654
1655  last_expr_type = 0;
1656
1657  /* First pass over inputs and outputs checks validity and sets
1658     mark_addressable if needed.  */
1659
1660  ninout = 0;
1661  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1662    {
1663      tree val = TREE_VALUE (tail);
1664      tree type = TREE_TYPE (val);
1665      const char *constraint;
1666      bool is_inout;
1667      bool allows_reg;
1668      bool allows_mem;
1669
1670      /* If there's an erroneous arg, emit no insn.  */
1671      if (type == error_mark_node)
1672	return;
1673
1674      /* Try to parse the output constraint.  If that fails, there's
1675	 no point in going further.  */
1676      constraint = constraints[i];
1677      if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1678				    &allows_mem, &allows_reg, &is_inout))
1679	return;
1680
1681      if (! allows_reg
1682	  && (allows_mem
1683	      || is_inout
1684	      || (DECL_P (val)
1685		  && GET_CODE (DECL_RTL (val)) == REG
1686		  && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
1687	mark_addressable (val);
1688
1689      if (is_inout)
1690	ninout++;
1691    }
1692
1693  ninputs += ninout;
1694  if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1695    {
1696      error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1697      return;
1698    }
1699
1700  for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1701    {
1702      bool allows_reg, allows_mem;
1703      const char *constraint;
1704
1705      /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1706	 would get VOIDmode and that could cause a crash in reload.  */
1707      if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1708	return;
1709
1710      constraint = constraints[i + noutputs];
1711      if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1712				    constraints, &allows_mem, &allows_reg))
1713	return;
1714
1715      if (! allows_reg && allows_mem)
1716	mark_addressable (TREE_VALUE (tail));
1717    }
1718
1719  /* Second pass evaluates arguments.  */
1720
1721  ninout = 0;
1722  for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1723    {
1724      tree val = TREE_VALUE (tail);
1725      tree type = TREE_TYPE (val);
1726      bool is_inout;
1727      bool allows_reg;
1728      bool allows_mem;
1729
1730      if (!parse_output_constraint (&constraints[i], i, ninputs,
1731				    noutputs, &allows_mem, &allows_reg,
1732				    &is_inout))
1733	abort ();
1734
1735      /* If an output operand is not a decl or indirect ref and our constraint
1736	 allows a register, make a temporary to act as an intermediate.
1737	 Make the asm insn write into that, then our caller will copy it to
1738	 the real output operand.  Likewise for promoted variables.  */
1739
1740      generating_concat_p = 0;
1741
1742      real_output_rtx[i] = NULL_RTX;
1743      if ((TREE_CODE (val) == INDIRECT_REF
1744	   && allows_mem)
1745	  || (DECL_P (val)
1746	      && (allows_mem || GET_CODE (DECL_RTL (val)) == REG)
1747	      && ! (GET_CODE (DECL_RTL (val)) == REG
1748		    && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
1749	  || ! allows_reg
1750	  || is_inout)
1751	{
1752	  output_rtx[i] = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
1753
1754	  if (! allows_reg && GET_CODE (output_rtx[i]) != MEM)
1755	    error ("output number %d not directly addressable", i);
1756	  if ((! allows_mem && GET_CODE (output_rtx[i]) == MEM)
1757	      || GET_CODE (output_rtx[i]) == CONCAT)
1758	    {
1759    	      real_output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1760	      output_rtx[i] = gen_reg_rtx (GET_MODE (output_rtx[i]));
1761	      if (is_inout)
1762		emit_move_insn (output_rtx[i], real_output_rtx[i]);
1763	    }
1764	}
1765      else
1766	{
1767	  output_rtx[i] = assign_temp (type, 0, 0, 1);
1768	  TREE_VALUE (tail) = make_tree (type, output_rtx[i]);
1769	}
1770
1771      generating_concat_p = old_generating_concat_p;
1772
1773      if (is_inout)
1774	{
1775	  inout_mode[ninout] = TYPE_MODE (type);
1776	  inout_opnum[ninout++] = i;
1777	}
1778    }
1779
1780  /* Make vectors for the expression-rtx, constraint strings,
1781     and named operands.  */
1782
1783  argvec = rtvec_alloc (ninputs);
1784  constraintvec = rtvec_alloc (ninputs);
1785
1786  body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1787				: GET_MODE (output_rtx[0])),
1788			       TREE_STRING_POINTER (string),
1789			       empty_string, 0, argvec, constraintvec,
1790			       filename, line);
1791
1792  MEM_VOLATILE_P (body) = vol;
1793
1794  /* Eval the inputs and put them into ARGVEC.
1795     Put their constraints into ASM_INPUTs and store in CONSTRAINTS.  */
1796
1797  for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
1798    {
1799      bool allows_reg, allows_mem;
1800      const char *constraint;
1801      tree val, type;
1802      rtx op;
1803
1804      constraint = constraints[i + noutputs];
1805      if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1806				    constraints, &allows_mem, &allows_reg))
1807	abort ();
1808
1809      generating_concat_p = 0;
1810
1811      val = TREE_VALUE (tail);
1812      type = TREE_TYPE (val);
1813      op = expand_expr (val, NULL_RTX, VOIDmode, 0);
1814
1815      /* Never pass a CONCAT to an ASM.  */
1816      if (GET_CODE (op) == CONCAT)
1817	op = force_reg (GET_MODE (op), op);
1818
1819      if (asm_operand_ok (op, constraint) <= 0)
1820	{
1821	  if (allows_reg)
1822	    op = force_reg (TYPE_MODE (type), op);
1823	  else if (!allows_mem)
1824	    warning ("asm operand %d probably doesn't match constraints",
1825		     i + noutputs);
1826	  else if (CONSTANT_P (op))
1827	    op = force_const_mem (TYPE_MODE (type), op);
1828	  else if (GET_CODE (op) == REG
1829		   || GET_CODE (op) == SUBREG
1830		   || GET_CODE (op) == ADDRESSOF
1831		   || GET_CODE (op) == CONCAT)
1832	    {
1833	      tree qual_type = build_qualified_type (type,
1834						     (TYPE_QUALS (type)
1835						      | TYPE_QUAL_CONST));
1836	      rtx memloc = assign_temp (qual_type, 1, 1, 1);
1837
1838	      emit_move_insn (memloc, op);
1839	      op = memloc;
1840	    }
1841
1842	  else if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op))
1843	    {
1844	      /* We won't recognize volatile memory as available a
1845		 memory_operand at this point.  Ignore it.  */
1846	    }
1847	  else if (queued_subexp_p (op))
1848	    ;
1849	  else
1850	    /* ??? Leave this only until we have experience with what
1851	       happens in combine and elsewhere when constraints are
1852	       not satisfied.  */
1853	    warning ("asm operand %d probably doesn't match constraints",
1854		     i + noutputs);
1855	}
1856
1857      generating_concat_p = old_generating_concat_p;
1858      ASM_OPERANDS_INPUT (body, i) = op;
1859
1860      ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
1861	= gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
1862    }
1863
1864  /* Protect all the operands from the queue now that they have all been
1865     evaluated.  */
1866
1867  generating_concat_p = 0;
1868
1869  for (i = 0; i < ninputs - ninout; i++)
1870    ASM_OPERANDS_INPUT (body, i)
1871      = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
1872
1873  for (i = 0; i < noutputs; i++)
1874    output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1875
1876  /* For in-out operands, copy output rtx to input rtx.  */
1877  for (i = 0; i < ninout; i++)
1878    {
1879      int j = inout_opnum[i];
1880      char buffer[16];
1881
1882      ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
1883	= output_rtx[j];
1884
1885      sprintf (buffer, "%d", j);
1886      ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
1887	= gen_rtx_ASM_INPUT (inout_mode[i], ggc_alloc_string (buffer, -1));
1888    }
1889
1890  generating_concat_p = old_generating_concat_p;
1891
1892  /* Now, for each output, construct an rtx
1893     (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1894			       ARGVEC CONSTRAINTS OPNAMES))
1895     If there is more than one, put them inside a PARALLEL.  */
1896
1897  if (noutputs == 1 && nclobbers == 0)
1898    {
1899      ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
1900      insn = emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
1901    }
1902
1903  else if (noutputs == 0 && nclobbers == 0)
1904    {
1905      /* No output operands: put in a raw ASM_OPERANDS rtx.  */
1906      insn = emit_insn (body);
1907    }
1908
1909  else
1910    {
1911      rtx obody = body;
1912      int num = noutputs;
1913
1914      if (num == 0)
1915	num = 1;
1916
1917      body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
1918
1919      /* For each output operand, store a SET.  */
1920      for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1921	{
1922	  XVECEXP (body, 0, i)
1923	    = gen_rtx_SET (VOIDmode,
1924			   output_rtx[i],
1925			   gen_rtx_ASM_OPERANDS
1926			   (GET_MODE (output_rtx[i]),
1927			    TREE_STRING_POINTER (string),
1928			    constraints[i], i, argvec, constraintvec,
1929			    filename, line));
1930
1931	  MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1932	}
1933
1934      /* If there are no outputs (but there are some clobbers)
1935	 store the bare ASM_OPERANDS into the PARALLEL.  */
1936
1937      if (i == 0)
1938	XVECEXP (body, 0, i++) = obody;
1939
1940      /* Store (clobber REG) for each clobbered register specified.  */
1941
1942      for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1943	{
1944	  const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
1945	  int j = decode_reg_name (regname);
1946
1947	  if (j < 0)
1948	    {
1949	      if (j == -3)	/* `cc', which is not a register */
1950		continue;
1951
1952	      if (j == -4)	/* `memory', don't cache memory across asm */
1953		{
1954		  XVECEXP (body, 0, i++)
1955		    = gen_rtx_CLOBBER (VOIDmode,
1956				       gen_rtx_MEM
1957				       (BLKmode,
1958					gen_rtx_SCRATCH (VOIDmode)));
1959		  continue;
1960		}
1961
1962	      /* Ignore unknown register, error already signaled.  */
1963	      continue;
1964	    }
1965
1966	  /* Use QImode since that's guaranteed to clobber just one reg.  */
1967	  XVECEXP (body, 0, i++)
1968	    = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (QImode, j));
1969	}
1970
1971      insn = emit_insn (body);
1972    }
1973
1974  /* For any outputs that needed reloading into registers, spill them
1975     back to where they belong.  */
1976  for (i = 0; i < noutputs; ++i)
1977    if (real_output_rtx[i])
1978      emit_move_insn (real_output_rtx[i], output_rtx[i]);
1979
1980  free_temp_slots ();
1981}
1982
1983/* A subroutine of expand_asm_operands.  Check that all operands have
1984   the same number of alternatives.  Return true if so.  */
1985
1986static bool
1987check_operand_nalternatives (outputs, inputs)
1988     tree outputs, inputs;
1989{
1990  if (outputs || inputs)
1991    {
1992      tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1993      int nalternatives
1994	= n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1995      tree next = inputs;
1996
1997      if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1998	{
1999	  error ("too many alternatives in `asm'");
2000	  return false;
2001	}
2002
2003      tmp = outputs;
2004      while (tmp)
2005	{
2006	  const char *constraint
2007	    = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
2008
2009	  if (n_occurrences (',', constraint) != nalternatives)
2010	    {
2011	      error ("operand constraints for `asm' differ in number of alternatives");
2012	      return false;
2013	    }
2014
2015	  if (TREE_CHAIN (tmp))
2016	    tmp = TREE_CHAIN (tmp);
2017	  else
2018	    tmp = next, next = 0;
2019	}
2020    }
2021
2022  return true;
2023}
2024
2025/* A subroutine of expand_asm_operands.  Check that all operand names
2026   are unique.  Return true if so.  We rely on the fact that these names
2027   are identifiers, and so have been canonicalized by get_identifier,
2028   so all we need are pointer comparisons.  */
2029
2030static bool
2031check_unique_operand_names (outputs, inputs)
2032     tree outputs, inputs;
2033{
2034  tree i, j;
2035
2036  for (i = outputs; i ; i = TREE_CHAIN (i))
2037    {
2038      tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
2039      if (! i_name)
2040	continue;
2041
2042      for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
2043	if (i_name == TREE_PURPOSE (TREE_PURPOSE (j)))
2044	  goto failure;
2045    }
2046
2047  for (i = inputs; i ; i = TREE_CHAIN (i))
2048    {
2049      tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
2050      if (! i_name)
2051	continue;
2052
2053      for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
2054	if (i_name == TREE_PURPOSE (TREE_PURPOSE (j)))
2055	  goto failure;
2056      for (j = outputs; j ; j = TREE_CHAIN (j))
2057	if (i_name == TREE_PURPOSE (TREE_PURPOSE (j)))
2058	  goto failure;
2059    }
2060
2061  return true;
2062
2063 failure:
2064  error ("duplicate asm operand name '%s'",
2065	 IDENTIFIER_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
2066  return false;
2067}
2068
2069/* A subroutine of expand_asm_operands.  Resolve the names of the operands
2070   in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
2071   STRING and in the constraints to those numbers.  */
2072
2073static tree
2074resolve_operand_names (string, outputs, inputs, pconstraints)
2075     tree string;
2076     tree outputs, inputs;
2077     const char **pconstraints;
2078{
2079  char *buffer = xstrdup (TREE_STRING_POINTER (string));
2080  char *p;
2081  tree t;
2082
2083  /* Assume that we will not need extra space to perform the substitution.
2084     This because we get to remove '[' and ']', which means we cannot have
2085     a problem until we have more than 999 operands.  */
2086
2087  p = buffer;
2088  while ((p = strchr (p, '%')) != NULL)
2089    {
2090      if (p[1] == '[')
2091	p += 1;
2092      else if (ISALPHA (p[1]) && p[2] == '[')
2093	p += 2;
2094      else
2095	{
2096	  p += 1;
2097	  continue;
2098	}
2099
2100      p = resolve_operand_name_1 (p, outputs, inputs);
2101    }
2102
2103  string = build_string (strlen (buffer), buffer);
2104  free (buffer);
2105
2106  /* Collect output constraints here because it's convenient.
2107     There should be no named operands here; this is verified
2108     in expand_asm_operand.  */
2109  for (t = outputs; t ; t = TREE_CHAIN (t), pconstraints++)
2110    *pconstraints = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2111
2112  /* Substitute [<name>] in input constraint strings.  */
2113  for (t = inputs; t ; t = TREE_CHAIN (t), pconstraints++)
2114    {
2115      const char *c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2116      if (strchr (c, '[') == NULL)
2117	*pconstraints = c;
2118      else
2119	{
2120	  p = buffer = xstrdup (c);
2121	  while ((p = strchr (p, '[')) != NULL)
2122	    p = resolve_operand_name_1 (p, outputs, inputs);
2123
2124	  *pconstraints = ggc_alloc_string (buffer, -1);
2125	  free (buffer);
2126	}
2127    }
2128
2129  return string;
2130}
2131
2132/* A subroutine of resolve_operand_names.  P points to the '[' for a
2133   potential named operand of the form [<name>].  In place, replace
2134   the name and brackets with a number.  Return a pointer to the
2135   balance of the string after substitution.  */
2136
2137static char *
2138resolve_operand_name_1 (p, outputs, inputs)
2139     char *p;
2140     tree outputs, inputs;
2141{
2142  char *q;
2143  int op;
2144  tree t;
2145  size_t len;
2146
2147  /* Collect the operand name.  */
2148  q = strchr (p, ']');
2149  if (!q)
2150    {
2151      error ("missing close brace for named operand");
2152      return strchr (p, '\0');
2153    }
2154  len = q - p - 1;
2155
2156  /* Resolve the name to a number.  */
2157  for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
2158    {
2159      tree id = TREE_PURPOSE (TREE_PURPOSE (t));
2160      if (id)
2161	{
2162	  const char *c = IDENTIFIER_POINTER (id);
2163	  if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2164	    goto found;
2165	}
2166    }
2167  for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2168    {
2169      tree id = TREE_PURPOSE (TREE_PURPOSE (t));
2170      if (id)
2171	{
2172	  const char *c = IDENTIFIER_POINTER (id);
2173	  if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2174	    goto found;
2175	}
2176    }
2177
2178  *q = '\0';
2179  error ("undefined named operand '%s'", p + 1);
2180  op = 0;
2181 found:
2182
2183  /* Replace the name with the number.  Unfortunately, not all libraries
2184     get the return value of sprintf correct, so search for the end of the
2185     generated string by hand.  */
2186  sprintf (p, "%d", op);
2187  p = strchr (p, '\0');
2188
2189  /* Verify the no extra buffer space assumption.  */
2190  if (p > q)
2191    abort ();
2192
2193  /* Shift the rest of the buffer down to fill the gap.  */
2194  memmove (p, q + 1, strlen (q + 1) + 1);
2195
2196  return p;
2197}
2198
2199/* Generate RTL to evaluate the expression EXP
2200   and remember it in case this is the VALUE in a ({... VALUE; }) constr.
2201   Provided just for backward-compatibility.  expand_expr_stmt_value()
2202   should be used for new code.  */
2203
2204void
2205expand_expr_stmt (exp)
2206     tree exp;
2207{
2208  expand_expr_stmt_value (exp, -1, 1);
2209}
2210
2211/* Generate RTL to evaluate the expression EXP.  WANT_VALUE tells
2212   whether to (1) save the value of the expression, (0) discard it or
2213   (-1) use expr_stmts_for_value to tell.  The use of -1 is
2214   deprecated, and retained only for backward compatibility.  */
2215
2216void
2217expand_expr_stmt_value (exp, want_value, maybe_last)
2218     tree exp;
2219     int want_value, maybe_last;
2220{
2221  rtx value;
2222  tree type;
2223
2224  if (want_value == -1)
2225    want_value = expr_stmts_for_value != 0;
2226
2227  /* If -W, warn about statements with no side effects,
2228     except for an explicit cast to void (e.g. for assert()), and
2229     except for last statement in ({...}) where they may be useful.  */
2230  if (! want_value
2231      && (expr_stmts_for_value == 0 || ! maybe_last)
2232      && exp != error_mark_node)
2233    {
2234      if (! TREE_SIDE_EFFECTS (exp))
2235	{
2236	  if ((extra_warnings || warn_unused_value)
2237	      && !(TREE_CODE (exp) == CONVERT_EXPR
2238		   && VOID_TYPE_P (TREE_TYPE (exp))))
2239	    warning_with_file_and_line (emit_filename, emit_lineno,
2240				        "statement with no effect");
2241	}
2242      else if (warn_unused_value)
2243	warn_if_unused_value (exp);
2244    }
2245
2246  /* If EXP is of function type and we are expanding statements for
2247     value, convert it to pointer-to-function.  */
2248  if (want_value && TREE_CODE (TREE_TYPE (exp)) == FUNCTION_TYPE)
2249    exp = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (exp)), exp);
2250
2251  /* The call to `expand_expr' could cause last_expr_type and
2252     last_expr_value to get reset.  Therefore, we set last_expr_value
2253     and last_expr_type *after* calling expand_expr.  */
2254  value = expand_expr (exp, want_value ? NULL_RTX : const0_rtx,
2255		       VOIDmode, 0);
2256  type = TREE_TYPE (exp);
2257
2258  /* If all we do is reference a volatile value in memory,
2259     copy it to a register to be sure it is actually touched.  */
2260  if (value && GET_CODE (value) == MEM && TREE_THIS_VOLATILE (exp))
2261    {
2262      if (TYPE_MODE (type) == VOIDmode)
2263	;
2264      else if (TYPE_MODE (type) != BLKmode)
2265	value = copy_to_reg (value);
2266      else
2267	{
2268	  rtx lab = gen_label_rtx ();
2269
2270	  /* Compare the value with itself to reference it.  */
2271	  emit_cmp_and_jump_insns (value, value, EQ,
2272				   expand_expr (TYPE_SIZE (type),
2273						NULL_RTX, VOIDmode, 0),
2274				   BLKmode, 0, lab);
2275	  emit_label (lab);
2276	}
2277    }
2278
2279  /* If this expression is part of a ({...}) and is in memory, we may have
2280     to preserve temporaries.  */
2281  preserve_temp_slots (value);
2282
2283  /* Free any temporaries used to evaluate this expression.  Any temporary
2284     used as a result of this expression will already have been preserved
2285     above.  */
2286  free_temp_slots ();
2287
2288  if (want_value)
2289    {
2290      last_expr_value = value;
2291      last_expr_type = type;
2292    }
2293
2294  emit_queue ();
2295}
2296
2297/* Warn if EXP contains any computations whose results are not used.
2298   Return 1 if a warning is printed; 0 otherwise.  */
2299
2300int
2301warn_if_unused_value (exp)
2302     tree exp;
2303{
2304  if (TREE_USED (exp))
2305    return 0;
2306
2307  /* Don't warn about void constructs.  This includes casting to void,
2308     void function calls, and statement expressions with a final cast
2309     to void.  */
2310  if (VOID_TYPE_P (TREE_TYPE (exp)))
2311    return 0;
2312
2313  switch (TREE_CODE (exp))
2314    {
2315    case PREINCREMENT_EXPR:
2316    case POSTINCREMENT_EXPR:
2317    case PREDECREMENT_EXPR:
2318    case POSTDECREMENT_EXPR:
2319    case MODIFY_EXPR:
2320    case INIT_EXPR:
2321    case TARGET_EXPR:
2322    case CALL_EXPR:
2323    case METHOD_CALL_EXPR:
2324    case RTL_EXPR:
2325    case TRY_CATCH_EXPR:
2326    case WITH_CLEANUP_EXPR:
2327    case EXIT_EXPR:
2328      return 0;
2329
2330    case BIND_EXPR:
2331      /* For a binding, warn if no side effect within it.  */
2332      return warn_if_unused_value (TREE_OPERAND (exp, 1));
2333
2334    case SAVE_EXPR:
2335      return warn_if_unused_value (TREE_OPERAND (exp, 1));
2336
2337    case TRUTH_ORIF_EXPR:
2338    case TRUTH_ANDIF_EXPR:
2339      /* In && or ||, warn if 2nd operand has no side effect.  */
2340      return warn_if_unused_value (TREE_OPERAND (exp, 1));
2341
2342    case COMPOUND_EXPR:
2343      if (TREE_NO_UNUSED_WARNING (exp))
2344	return 0;
2345      if (warn_if_unused_value (TREE_OPERAND (exp, 0)))
2346	return 1;
2347      /* Let people do `(foo (), 0)' without a warning.  */
2348      if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2349	return 0;
2350      return warn_if_unused_value (TREE_OPERAND (exp, 1));
2351
2352    case NOP_EXPR:
2353    case CONVERT_EXPR:
2354    case NON_LVALUE_EXPR:
2355      /* Don't warn about conversions not explicit in the user's program.  */
2356      if (TREE_NO_UNUSED_WARNING (exp))
2357	return 0;
2358      /* Assignment to a cast usually results in a cast of a modify.
2359	 Don't complain about that.  There can be an arbitrary number of
2360	 casts before the modify, so we must loop until we find the first
2361	 non-cast expression and then test to see if that is a modify.  */
2362      {
2363	tree tem = TREE_OPERAND (exp, 0);
2364
2365	while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2366	  tem = TREE_OPERAND (tem, 0);
2367
2368	if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2369	    || TREE_CODE (tem) == CALL_EXPR)
2370	  return 0;
2371      }
2372      goto maybe_warn;
2373
2374    case INDIRECT_REF:
2375      /* Don't warn about automatic dereferencing of references, since
2376	 the user cannot control it.  */
2377      if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
2378	return warn_if_unused_value (TREE_OPERAND (exp, 0));
2379      /* Fall through.  */
2380
2381    default:
2382      /* Referencing a volatile value is a side effect, so don't warn.  */
2383      if ((DECL_P (exp)
2384	   || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2385	  && TREE_THIS_VOLATILE (exp))
2386	return 0;
2387
2388      /* If this is an expression which has no operands, there is no value
2389	 to be unused.  There are no such language-independent codes,
2390	 but front ends may define such.  */
2391      if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2392	  && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2393	return 0;
2394
2395    maybe_warn:
2396      /* If this is an expression with side effects, don't warn.  */
2397      if (TREE_SIDE_EFFECTS (exp))
2398	return 0;
2399
2400      warning_with_file_and_line (emit_filename, emit_lineno,
2401				  "value computed is not used");
2402      return 1;
2403    }
2404}
2405
2406/* Clear out the memory of the last expression evaluated.  */
2407
2408void
2409clear_last_expr ()
2410{
2411  last_expr_type = 0;
2412}
2413
2414/* Begin a statement-expression, i.e., a series of statements which
2415   may return a value.  Return the RTL_EXPR for this statement expr.
2416   The caller must save that value and pass it to
2417   expand_end_stmt_expr.  If HAS_SCOPE is nonzero, temporaries created
2418   in the statement-expression are deallocated at the end of the
2419   expression.  */
2420
2421tree
2422expand_start_stmt_expr (has_scope)
2423     int has_scope;
2424{
2425  tree t;
2426
2427  /* Make the RTL_EXPR node temporary, not momentary,
2428     so that rtl_expr_chain doesn't become garbage.  */
2429  t = make_node (RTL_EXPR);
2430  do_pending_stack_adjust ();
2431  if (has_scope)
2432    start_sequence_for_rtl_expr (t);
2433  else
2434    start_sequence ();
2435  NO_DEFER_POP;
2436  expr_stmts_for_value++;
2437  last_expr_value = NULL_RTX;
2438  return t;
2439}
2440
2441/* Restore the previous state at the end of a statement that returns a value.
2442   Returns a tree node representing the statement's value and the
2443   insns to compute the value.
2444
2445   The nodes of that expression have been freed by now, so we cannot use them.
2446   But we don't want to do that anyway; the expression has already been
2447   evaluated and now we just want to use the value.  So generate a RTL_EXPR
2448   with the proper type and RTL value.
2449
2450   If the last substatement was not an expression,
2451   return something with type `void'.  */
2452
2453tree
2454expand_end_stmt_expr (t)
2455     tree t;
2456{
2457  OK_DEFER_POP;
2458
2459  if (! last_expr_value || ! last_expr_type)
2460    {
2461      last_expr_value = const0_rtx;
2462      last_expr_type = void_type_node;
2463    }
2464  else if (GET_CODE (last_expr_value) != REG && ! CONSTANT_P (last_expr_value))
2465    /* Remove any possible QUEUED.  */
2466    last_expr_value = protect_from_queue (last_expr_value, 0);
2467
2468  emit_queue ();
2469
2470  TREE_TYPE (t) = last_expr_type;
2471  RTL_EXPR_RTL (t) = last_expr_value;
2472  RTL_EXPR_SEQUENCE (t) = get_insns ();
2473
2474  rtl_expr_chain = tree_cons (NULL_TREE, t, rtl_expr_chain);
2475
2476  end_sequence ();
2477
2478  /* Don't consider deleting this expr or containing exprs at tree level.  */
2479  TREE_SIDE_EFFECTS (t) = 1;
2480  /* Propagate volatility of the actual RTL expr.  */
2481  TREE_THIS_VOLATILE (t) = volatile_refs_p (last_expr_value);
2482
2483  last_expr_type = 0;
2484  expr_stmts_for_value--;
2485
2486  return t;
2487}
2488
2489/* Generate RTL for the start of an if-then.  COND is the expression
2490   whose truth should be tested.
2491
2492   If EXITFLAG is nonzero, this conditional is visible to
2493   `exit_something'.  */
2494
2495void
2496expand_start_cond (cond, exitflag)
2497     tree cond;
2498     int exitflag;
2499{
2500  struct nesting *thiscond = ALLOC_NESTING ();
2501
2502  /* Make an entry on cond_stack for the cond we are entering.  */
2503
2504  thiscond->next = cond_stack;
2505  thiscond->all = nesting_stack;
2506  thiscond->depth = ++nesting_depth;
2507  thiscond->data.cond.next_label = gen_label_rtx ();
2508  /* Before we encounter an `else', we don't need a separate exit label
2509     unless there are supposed to be exit statements
2510     to exit this conditional.  */
2511  thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2512  thiscond->data.cond.endif_label = thiscond->exit_label;
2513  cond_stack = thiscond;
2514  nesting_stack = thiscond;
2515
2516  do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
2517}
2518
2519/* Generate RTL between then-clause and the elseif-clause
2520   of an if-then-elseif-....  */
2521
2522void
2523expand_start_elseif (cond)
2524     tree cond;
2525{
2526  if (cond_stack->data.cond.endif_label == 0)
2527    cond_stack->data.cond.endif_label = gen_label_rtx ();
2528  emit_jump (cond_stack->data.cond.endif_label);
2529  emit_label (cond_stack->data.cond.next_label);
2530  cond_stack->data.cond.next_label = gen_label_rtx ();
2531  do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2532}
2533
2534/* Generate RTL between the then-clause and the else-clause
2535   of an if-then-else.  */
2536
2537void
2538expand_start_else ()
2539{
2540  if (cond_stack->data.cond.endif_label == 0)
2541    cond_stack->data.cond.endif_label = gen_label_rtx ();
2542
2543  emit_jump (cond_stack->data.cond.endif_label);
2544  emit_label (cond_stack->data.cond.next_label);
2545  cond_stack->data.cond.next_label = 0;  /* No more _else or _elseif calls.  */
2546}
2547
2548/* After calling expand_start_else, turn this "else" into an "else if"
2549   by providing another condition.  */
2550
2551void
2552expand_elseif (cond)
2553     tree cond;
2554{
2555  cond_stack->data.cond.next_label = gen_label_rtx ();
2556  do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2557}
2558
2559/* Generate RTL for the end of an if-then.
2560   Pop the record for it off of cond_stack.  */
2561
2562void
2563expand_end_cond ()
2564{
2565  struct nesting *thiscond = cond_stack;
2566
2567  do_pending_stack_adjust ();
2568  if (thiscond->data.cond.next_label)
2569    emit_label (thiscond->data.cond.next_label);
2570  if (thiscond->data.cond.endif_label)
2571    emit_label (thiscond->data.cond.endif_label);
2572
2573  POPSTACK (cond_stack);
2574  last_expr_type = 0;
2575}
2576
2577/* Generate RTL for the start of a loop.  EXIT_FLAG is nonzero if this
2578   loop should be exited by `exit_something'.  This is a loop for which
2579   `expand_continue' will jump to the top of the loop.
2580
2581   Make an entry on loop_stack to record the labels associated with
2582   this loop.  */
2583
2584struct nesting *
2585expand_start_loop (exit_flag)
2586     int exit_flag;
2587{
2588  struct nesting *thisloop = ALLOC_NESTING ();
2589
2590  /* Make an entry on loop_stack for the loop we are entering.  */
2591
2592  thisloop->next = loop_stack;
2593  thisloop->all = nesting_stack;
2594  thisloop->depth = ++nesting_depth;
2595  thisloop->data.loop.start_label = gen_label_rtx ();
2596  thisloop->data.loop.end_label = gen_label_rtx ();
2597  thisloop->data.loop.alt_end_label = 0;
2598  thisloop->data.loop.continue_label = thisloop->data.loop.start_label;
2599  thisloop->exit_label = exit_flag ? thisloop->data.loop.end_label : 0;
2600  loop_stack = thisloop;
2601  nesting_stack = thisloop;
2602
2603  do_pending_stack_adjust ();
2604  emit_queue ();
2605  emit_note (NULL, NOTE_INSN_LOOP_BEG);
2606  emit_label (thisloop->data.loop.start_label);
2607
2608  return thisloop;
2609}
2610
2611/* Like expand_start_loop but for a loop where the continuation point
2612   (for expand_continue_loop) will be specified explicitly.  */
2613
2614struct nesting *
2615expand_start_loop_continue_elsewhere (exit_flag)
2616     int exit_flag;
2617{
2618  struct nesting *thisloop = expand_start_loop (exit_flag);
2619  loop_stack->data.loop.continue_label = gen_label_rtx ();
2620  return thisloop;
2621}
2622
2623/* Begin a null, aka do { } while (0) "loop".  But since the contents
2624   of said loop can still contain a break, we must frob the loop nest.  */
2625
2626struct nesting *
2627expand_start_null_loop ()
2628{
2629  struct nesting *thisloop = ALLOC_NESTING ();
2630
2631  /* Make an entry on loop_stack for the loop we are entering.  */
2632
2633  thisloop->next = loop_stack;
2634  thisloop->all = nesting_stack;
2635  thisloop->depth = ++nesting_depth;
2636  thisloop->data.loop.start_label = emit_note (NULL, NOTE_INSN_DELETED);
2637  thisloop->data.loop.end_label = gen_label_rtx ();
2638  thisloop->data.loop.alt_end_label = NULL_RTX;
2639  thisloop->data.loop.continue_label = thisloop->data.loop.end_label;
2640  thisloop->exit_label = thisloop->data.loop.end_label;
2641  loop_stack = thisloop;
2642  nesting_stack = thisloop;
2643
2644  return thisloop;
2645}
2646
2647/* Specify the continuation point for a loop started with
2648   expand_start_loop_continue_elsewhere.
2649   Use this at the point in the code to which a continue statement
2650   should jump.  */
2651
2652void
2653expand_loop_continue_here ()
2654{
2655  do_pending_stack_adjust ();
2656  emit_note (NULL, NOTE_INSN_LOOP_CONT);
2657  emit_label (loop_stack->data.loop.continue_label);
2658}
2659
2660/* Finish a loop.  Generate a jump back to the top and the loop-exit label.
2661   Pop the block off of loop_stack.  */
2662
2663void
2664expand_end_loop ()
2665{
2666  rtx start_label = loop_stack->data.loop.start_label;
2667  rtx etc_note;
2668  int eh_regions, debug_blocks;
2669
2670  /* Mark the continue-point at the top of the loop if none elsewhere.  */
2671  if (start_label == loop_stack->data.loop.continue_label)
2672    emit_note_before (NOTE_INSN_LOOP_CONT, start_label);
2673
2674  do_pending_stack_adjust ();
2675
2676  /* If the loop starts with a loop exit, roll that to the end where
2677     it will optimize together with the jump back.
2678
2679     If the loop presently looks like this (in pseudo-C):
2680
2681	LOOP_BEG
2682	start_label:
2683	  if (test) goto end_label;
2684	LOOP_END_TOP_COND
2685	  body;
2686	  goto start_label;
2687	end_label:
2688
2689     transform it to look like:
2690
2691	LOOP_BEG
2692	  goto start_label;
2693	top_label:
2694	  body;
2695	start_label:
2696	  if (test) goto end_label;
2697	  goto top_label;
2698	end_label:
2699
2700     We rely on the presence of NOTE_INSN_LOOP_END_TOP_COND to mark
2701     the end of the entry condtional.  Without this, our lexical scan
2702     can't tell the difference between an entry conditional and a
2703     body conditional that exits the loop.  Mistaking the two means
2704     that we can misplace the NOTE_INSN_LOOP_CONT note, which can
2705     screw up loop unrolling.
2706
2707     Things will be oh so much better when loop optimization is done
2708     off of a proper control flow graph...  */
2709
2710  /* Scan insns from the top of the loop looking for the END_TOP_COND note.  */
2711
2712  eh_regions = debug_blocks = 0;
2713  for (etc_note = start_label; etc_note ; etc_note = NEXT_INSN (etc_note))
2714    if (GET_CODE (etc_note) == NOTE)
2715      {
2716	if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_END_TOP_COND)
2717	  break;
2718
2719	/* We must not walk into a nested loop.  */
2720	else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_LOOP_BEG)
2721	  {
2722	    etc_note = NULL_RTX;
2723	    break;
2724	  }
2725
2726	/* At the same time, scan for EH region notes, as we don't want
2727	   to scrog region nesting.  This shouldn't happen, but...  */
2728	else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_BEG)
2729	  eh_regions++;
2730	else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_EH_REGION_END)
2731	  {
2732	    if (--eh_regions < 0)
2733	      /* We've come to the end of an EH region, but never saw the
2734		 beginning of that region.  That means that an EH region
2735		 begins before the top of the loop, and ends in the middle
2736		 of it.  The existence of such a situation violates a basic
2737		 assumption in this code, since that would imply that even
2738		 when EH_REGIONS is zero, we might move code out of an
2739		 exception region.  */
2740	      abort ();
2741	  }
2742
2743	/* Likewise for debug scopes.  In this case we'll either (1) move
2744	   all of the notes if they are properly nested or (2) leave the
2745	   notes alone and only rotate the loop at high optimization
2746	   levels when we expect to scrog debug info.  */
2747	else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_BEG)
2748	  debug_blocks++;
2749	else if (NOTE_LINE_NUMBER (etc_note) == NOTE_INSN_BLOCK_END)
2750	  debug_blocks--;
2751      }
2752
2753  if (etc_note
2754      && optimize
2755      && eh_regions == 0
2756      && (debug_blocks == 0 || optimize >= 2)
2757      && NEXT_INSN (etc_note) != NULL_RTX
2758      && ! any_condjump_p (get_last_insn ()))
2759    {
2760      /* We found one.  Move everything from START to ETC to the end
2761	 of the loop, and add a jump from the top of the loop.  */
2762      rtx top_label = gen_label_rtx ();
2763      rtx start_move = start_label;
2764
2765      /* If the start label is preceded by a NOTE_INSN_LOOP_CONT note,
2766	 then we want to move this note also.  */
2767      if (GET_CODE (PREV_INSN (start_move)) == NOTE
2768	  && NOTE_LINE_NUMBER (PREV_INSN (start_move)) == NOTE_INSN_LOOP_CONT)
2769	start_move = PREV_INSN (start_move);
2770
2771      emit_label_before (top_label, start_move);
2772
2773      /* Actually move the insns.  If the debug scopes are nested, we
2774	 can move everything at once.  Otherwise we have to move them
2775	 one by one and squeeze out the block notes.  */
2776      if (debug_blocks == 0)
2777	reorder_insns (start_move, etc_note, get_last_insn ());
2778      else
2779	{
2780	  rtx insn, next_insn;
2781	  for (insn = start_move; insn; insn = next_insn)
2782	    {
2783	      /* Figure out which insn comes after this one.  We have
2784		 to do this before we move INSN.  */
2785	      next_insn = (insn == etc_note ? NULL : NEXT_INSN (insn));
2786
2787	      if (GET_CODE (insn) == NOTE
2788		  && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
2789		      || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
2790		continue;
2791
2792	      reorder_insns (insn, insn, get_last_insn ());
2793	    }
2794	}
2795
2796      /* Add the jump from the top of the loop.  */
2797      emit_jump_insn_before (gen_jump (start_label), top_label);
2798      emit_barrier_before (top_label);
2799      start_label = top_label;
2800    }
2801
2802  emit_jump (start_label);
2803  emit_note (NULL, NOTE_INSN_LOOP_END);
2804  emit_label (loop_stack->data.loop.end_label);
2805
2806  POPSTACK (loop_stack);
2807
2808  last_expr_type = 0;
2809}
2810
2811/* Finish a null loop, aka do { } while (0).  */
2812
2813void
2814expand_end_null_loop ()
2815{
2816  do_pending_stack_adjust ();
2817  emit_label (loop_stack->data.loop.end_label);
2818
2819  POPSTACK (loop_stack);
2820
2821  last_expr_type = 0;
2822}
2823
2824/* Generate a jump to the current loop's continue-point.
2825   This is usually the top of the loop, but may be specified
2826   explicitly elsewhere.  If not currently inside a loop,
2827   return 0 and do nothing; caller will print an error message.  */
2828
2829int
2830expand_continue_loop (whichloop)
2831     struct nesting *whichloop;
2832{
2833  last_expr_type = 0;
2834  if (whichloop == 0)
2835    whichloop = loop_stack;
2836  if (whichloop == 0)
2837    return 0;
2838  expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
2839			NULL_RTX);
2840  return 1;
2841}
2842
2843/* Generate a jump to exit the current loop.  If not currently inside a loop,
2844   return 0 and do nothing; caller will print an error message.  */
2845
2846int
2847expand_exit_loop (whichloop)
2848     struct nesting *whichloop;
2849{
2850  last_expr_type = 0;
2851  if (whichloop == 0)
2852    whichloop = loop_stack;
2853  if (whichloop == 0)
2854    return 0;
2855  expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
2856  return 1;
2857}
2858
2859/* Generate a conditional jump to exit the current loop if COND
2860   evaluates to zero.  If not currently inside a loop,
2861   return 0 and do nothing; caller will print an error message.  */
2862
2863int
2864expand_exit_loop_if_false (whichloop, cond)
2865     struct nesting *whichloop;
2866     tree cond;
2867{
2868  rtx label = gen_label_rtx ();
2869  rtx last_insn;
2870  last_expr_type = 0;
2871
2872  if (whichloop == 0)
2873    whichloop = loop_stack;
2874  if (whichloop == 0)
2875    return 0;
2876  /* In order to handle fixups, we actually create a conditional jump
2877     around an unconditional branch to exit the loop.  If fixups are
2878     necessary, they go before the unconditional branch.  */
2879
2880  do_jump (cond, NULL_RTX, label);
2881  last_insn = get_last_insn ();
2882  if (GET_CODE (last_insn) == CODE_LABEL)
2883    whichloop->data.loop.alt_end_label = last_insn;
2884  expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label,
2885			NULL_RTX);
2886  emit_label (label);
2887
2888  return 1;
2889}
2890
2891/* Like expand_exit_loop_if_false except also emit a note marking
2892   the end of the conditional.  Should only be used immediately
2893   after expand_loop_start.  */
2894
2895int
2896expand_exit_loop_top_cond (whichloop, cond)
2897     struct nesting *whichloop;
2898     tree cond;
2899{
2900  if (! expand_exit_loop_if_false (whichloop, cond))
2901    return 0;
2902
2903  emit_note (NULL, NOTE_INSN_LOOP_END_TOP_COND);
2904  return 1;
2905}
2906
2907/* Return nonzero if the loop nest is empty.  Else return zero.  */
2908
2909int
2910stmt_loop_nest_empty ()
2911{
2912  /* cfun->stmt can be NULL if we are building a call to get the
2913     EH context for a setjmp/longjmp EH target and the current
2914     function was a deferred inline function.  */
2915  return (cfun->stmt == NULL || loop_stack == NULL);
2916}
2917
2918/* Return non-zero if we should preserve sub-expressions as separate
2919   pseudos.  We never do so if we aren't optimizing.  We always do so
2920   if -fexpensive-optimizations.
2921
2922   Otherwise, we only do so if we are in the "early" part of a loop.  I.e.,
2923   the loop may still be a small one.  */
2924
2925int
2926preserve_subexpressions_p ()
2927{
2928  rtx insn;
2929
2930  if (flag_expensive_optimizations)
2931    return 1;
2932
2933  if (optimize == 0 || cfun == 0 || cfun->stmt == 0 || loop_stack == 0)
2934    return 0;
2935
2936  insn = get_last_insn_anywhere ();
2937
2938  return (insn
2939	  && (INSN_UID (insn) - INSN_UID (loop_stack->data.loop.start_label)
2940	      < n_non_fixed_regs * 3));
2941
2942}
2943
2944/* Generate a jump to exit the current loop, conditional, binding contour
2945   or case statement.  Not all such constructs are visible to this function,
2946   only those started with EXIT_FLAG nonzero.  Individual languages use
2947   the EXIT_FLAG parameter to control which kinds of constructs you can
2948   exit this way.
2949
2950   If not currently inside anything that can be exited,
2951   return 0 and do nothing; caller will print an error message.  */
2952
2953int
2954expand_exit_something ()
2955{
2956  struct nesting *n;
2957  last_expr_type = 0;
2958  for (n = nesting_stack; n; n = n->all)
2959    if (n->exit_label != 0)
2960      {
2961	expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
2962	return 1;
2963      }
2964
2965  return 0;
2966}
2967
2968/* Generate RTL to return from the current function, with no value.
2969   (That is, we do not do anything about returning any value.)  */
2970
2971void
2972expand_null_return ()
2973{
2974  rtx last_insn = get_last_insn ();
2975
2976  /* If this function was declared to return a value, but we
2977     didn't, clobber the return registers so that they are not
2978     propagated live to the rest of the function.  */
2979  clobber_return_register ();
2980
2981  expand_null_return_1 (last_insn);
2982}
2983
2984/* Generate RTL to return from the current function, with value VAL.  */
2985
2986static void
2987expand_value_return (val)
2988     rtx val;
2989{
2990  rtx last_insn = get_last_insn ();
2991  rtx return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
2992
2993  /* Copy the value to the return location
2994     unless it's already there.  */
2995
2996  if (return_reg != val)
2997    {
2998      tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
2999#ifdef PROMOTE_FUNCTION_RETURN
3000      int unsignedp = TREE_UNSIGNED (type);
3001      enum machine_mode old_mode
3002	= DECL_MODE (DECL_RESULT (current_function_decl));
3003      enum machine_mode mode
3004	= promote_mode (type, old_mode, &unsignedp, 1);
3005
3006      if (mode != old_mode)
3007	val = convert_modes (mode, old_mode, val, unsignedp);
3008#endif
3009      if (GET_CODE (return_reg) == PARALLEL)
3010	emit_group_load (return_reg, val, int_size_in_bytes (type));
3011      else
3012	emit_move_insn (return_reg, val);
3013    }
3014
3015  expand_null_return_1 (last_insn);
3016}
3017
3018/* Output a return with no value.  If LAST_INSN is nonzero,
3019   pretend that the return takes place after LAST_INSN.  */
3020
3021static void
3022expand_null_return_1 (last_insn)
3023     rtx last_insn;
3024{
3025  rtx end_label = cleanup_label ? cleanup_label : return_label;
3026
3027  clear_pending_stack_adjust ();
3028  do_pending_stack_adjust ();
3029  last_expr_type = 0;
3030
3031  if (end_label == 0)
3032     end_label = return_label = gen_label_rtx ();
3033  expand_goto_internal (NULL_TREE, end_label, last_insn);
3034}
3035
3036/* Generate RTL to evaluate the expression RETVAL and return it
3037   from the current function.  */
3038
3039void
3040expand_return (retval)
3041     tree retval;
3042{
3043  /* If there are any cleanups to be performed, then they will
3044     be inserted following LAST_INSN.  It is desirable
3045     that the last_insn, for such purposes, should be the
3046     last insn before computing the return value.  Otherwise, cleanups
3047     which call functions can clobber the return value.  */
3048  /* ??? rms: I think that is erroneous, because in C++ it would
3049     run destructors on variables that might be used in the subsequent
3050     computation of the return value.  */
3051  rtx last_insn = 0;
3052  rtx result_rtl;
3053  rtx val = 0;
3054  tree retval_rhs;
3055
3056  /* If function wants no value, give it none.  */
3057  if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3058    {
3059      expand_expr (retval, NULL_RTX, VOIDmode, 0);
3060      emit_queue ();
3061      expand_null_return ();
3062      return;
3063    }
3064
3065  if (retval == error_mark_node)
3066    {
3067      /* Treat this like a return of no value from a function that
3068	 returns a value.  */
3069      expand_null_return ();
3070      return;
3071    }
3072  else if (TREE_CODE (retval) == RESULT_DECL)
3073    retval_rhs = retval;
3074  else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
3075	   && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3076    retval_rhs = TREE_OPERAND (retval, 1);
3077  else if (VOID_TYPE_P (TREE_TYPE (retval)))
3078    /* Recognize tail-recursive call to void function.  */
3079    retval_rhs = retval;
3080  else
3081    retval_rhs = NULL_TREE;
3082
3083  last_insn = get_last_insn ();
3084
3085  /* Distribute return down conditional expr if either of the sides
3086     may involve tail recursion (see test below).  This enhances the number
3087     of tail recursions we see.  Don't do this always since it can produce
3088     sub-optimal code in some cases and we distribute assignments into
3089     conditional expressions when it would help.  */
3090
3091  if (optimize && retval_rhs != 0
3092      && frame_offset == 0
3093      && TREE_CODE (retval_rhs) == COND_EXPR
3094      && (TREE_CODE (TREE_OPERAND (retval_rhs, 1)) == CALL_EXPR
3095	  || TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
3096    {
3097      rtx label = gen_label_rtx ();
3098      tree expr;
3099
3100      do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
3101      start_cleanup_deferral ();
3102      expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3103		    DECL_RESULT (current_function_decl),
3104		    TREE_OPERAND (retval_rhs, 1));
3105      TREE_SIDE_EFFECTS (expr) = 1;
3106      expand_return (expr);
3107      emit_label (label);
3108
3109      expr = build (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (current_function_decl)),
3110		    DECL_RESULT (current_function_decl),
3111		    TREE_OPERAND (retval_rhs, 2));
3112      TREE_SIDE_EFFECTS (expr) = 1;
3113      expand_return (expr);
3114      end_cleanup_deferral ();
3115      return;
3116    }
3117
3118  result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3119
3120  /* If the result is an aggregate that is being returned in one (or more)
3121     registers, load the registers here.  The compiler currently can't handle
3122     copying a BLKmode value into registers.  We could put this code in a
3123     more general area (for use by everyone instead of just function
3124     call/return), but until this feature is generally usable it is kept here
3125     (and in expand_call).  The value must go into a pseudo in case there
3126     are cleanups that will clobber the real return register.  */
3127
3128  if (retval_rhs != 0
3129      && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3130      && GET_CODE (result_rtl) == REG)
3131    {
3132      int i;
3133      unsigned HOST_WIDE_INT bitpos, xbitpos;
3134      unsigned HOST_WIDE_INT big_endian_correction = 0;
3135      unsigned HOST_WIDE_INT bytes
3136	= int_size_in_bytes (TREE_TYPE (retval_rhs));
3137      int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
3138      unsigned int bitsize
3139	= MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
3140      rtx *result_pseudos = (rtx *) alloca (sizeof (rtx) * n_regs);
3141      rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
3142      rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
3143      enum machine_mode tmpmode, result_reg_mode;
3144
3145      if (bytes == 0)
3146	{
3147	  expand_null_return ();
3148	  return;
3149	}
3150
3151      /* Structures whose size is not a multiple of a word are aligned
3152	 to the least significant byte (to the right).  On a BYTES_BIG_ENDIAN
3153	 machine, this means we must skip the empty high order bytes when
3154	 calculating the bit offset.  */
3155      if (BYTES_BIG_ENDIAN
3156	  && !FUNCTION_ARG_REG_LITTLE_ENDIAN
3157	  && bytes % UNITS_PER_WORD)
3158	big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
3159						  * BITS_PER_UNIT));
3160
3161      /* Copy the structure BITSIZE bits at a time.  */
3162      for (bitpos = 0, xbitpos = big_endian_correction;
3163	   bitpos < bytes * BITS_PER_UNIT;
3164	   bitpos += bitsize, xbitpos += bitsize)
3165	{
3166	  /* We need a new destination pseudo each time xbitpos is
3167	     on a word boundary and when xbitpos == big_endian_correction
3168	     (the first time through).  */
3169	  if (xbitpos % BITS_PER_WORD == 0
3170	      || xbitpos == big_endian_correction)
3171	    {
3172	      /* Generate an appropriate register.  */
3173	      dst = gen_reg_rtx (word_mode);
3174	      result_pseudos[xbitpos / BITS_PER_WORD] = dst;
3175
3176	      /* Clear the destination before we move anything into it.  */
3177	      emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
3178	    }
3179
3180	  /* We need a new source operand each time bitpos is on a word
3181	     boundary.  */
3182	  if (bitpos % BITS_PER_WORD == 0)
3183	    src = operand_subword_force (result_val,
3184					 bitpos / BITS_PER_WORD,
3185					 BLKmode);
3186
3187	  /* Use bitpos for the source extraction (left justified) and
3188	     xbitpos for the destination store (right justified).  */
3189	  store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
3190			   extract_bit_field (src, bitsize,
3191					      bitpos % BITS_PER_WORD, 1,
3192					      NULL_RTX, word_mode, word_mode,
3193					      BITS_PER_WORD),
3194			   BITS_PER_WORD);
3195	}
3196
3197      /* Find the smallest integer mode large enough to hold the
3198	 entire structure and use that mode instead of BLKmode
3199	 on the USE insn for the return register.  */
3200      for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3201	   tmpmode != VOIDmode;
3202	   tmpmode = GET_MODE_WIDER_MODE (tmpmode))
3203	/* Have we found a large enough mode?  */
3204	if (GET_MODE_SIZE (tmpmode) >= bytes)
3205	  break;
3206
3207      /* No suitable mode found.  */
3208      if (tmpmode == VOIDmode)
3209	abort ();
3210
3211      PUT_MODE (result_rtl, tmpmode);
3212
3213      if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
3214	result_reg_mode = word_mode;
3215      else
3216	result_reg_mode = tmpmode;
3217      result_reg = gen_reg_rtx (result_reg_mode);
3218
3219      emit_queue ();
3220      for (i = 0; i < n_regs; i++)
3221	emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3222			result_pseudos[i]);
3223
3224      if (tmpmode != result_reg_mode)
3225	result_reg = gen_lowpart (tmpmode, result_reg);
3226
3227      expand_value_return (result_reg);
3228    }
3229  else if (retval_rhs != 0
3230	   && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3231	   && (GET_CODE (result_rtl) == REG
3232	       || (GET_CODE (result_rtl) == PARALLEL)))
3233    {
3234      /* Calculate the return value into a temporary (usually a pseudo
3235         reg).  */
3236      tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
3237      tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
3238
3239      val = assign_temp (nt, 0, 0, 1);
3240      val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
3241      val = force_not_mem (val);
3242      emit_queue ();
3243      /* Return the calculated value, doing cleanups first.  */
3244      expand_value_return (val);
3245    }
3246  else
3247    {
3248      /* No cleanups or no hard reg used;
3249	 calculate value into hard return reg.  */
3250      expand_expr (retval, const0_rtx, VOIDmode, 0);
3251      emit_queue ();
3252      expand_value_return (result_rtl);
3253    }
3254}
3255
3256/* Return 1 if the end of the generated RTX is not a barrier.
3257   This means code already compiled can drop through.  */
3258
3259int
3260drop_through_at_end_p ()
3261{
3262  rtx insn = get_last_insn ();
3263  while (insn && GET_CODE (insn) == NOTE)
3264    insn = PREV_INSN (insn);
3265  return insn && GET_CODE (insn) != BARRIER;
3266}
3267
3268/* Attempt to optimize a potential tail recursion call into a goto.
3269   ARGUMENTS are the arguments to a CALL_EXPR; LAST_INSN indicates
3270   where to place the jump to the tail recursion label.
3271
3272   Return TRUE if the call was optimized into a goto.  */
3273
3274int
3275optimize_tail_recursion (arguments, last_insn)
3276     tree arguments;
3277     rtx last_insn;
3278{
3279  /* Finish checking validity, and if valid emit code to set the
3280     argument variables for the new call.  */
3281  if (tail_recursion_args (arguments, DECL_ARGUMENTS (current_function_decl)))
3282    {
3283      if (tail_recursion_label == 0)
3284	{
3285	  tail_recursion_label = gen_label_rtx ();
3286	  emit_label_after (tail_recursion_label,
3287			    tail_recursion_reentry);
3288	}
3289      emit_queue ();
3290      expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
3291      emit_barrier ();
3292      return 1;
3293    }
3294  return 0;
3295}
3296
3297/* Emit code to alter this function's formal parms for a tail-recursive call.
3298   ACTUALS is a list of actual parameter expressions (chain of TREE_LISTs).
3299   FORMALS is the chain of decls of formals.
3300   Return 1 if this can be done;
3301   otherwise return 0 and do not emit any code.  */
3302
3303static int
3304tail_recursion_args (actuals, formals)
3305     tree actuals, formals;
3306{
3307  tree a = actuals, f = formals;
3308  int i;
3309  rtx *argvec;
3310
3311  /* Check that number and types of actuals are compatible
3312     with the formals.  This is not always true in valid C code.
3313     Also check that no formal needs to be addressable
3314     and that all formals are scalars.  */
3315
3316  /* Also count the args.  */
3317
3318  for (a = actuals, f = formals, i = 0; a && f; a = TREE_CHAIN (a), f = TREE_CHAIN (f), i++)
3319    {
3320      if (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_VALUE (a)))
3321	  != TYPE_MAIN_VARIANT (TREE_TYPE (f)))
3322	return 0;
3323      if (GET_CODE (DECL_RTL (f)) != REG || DECL_MODE (f) == BLKmode)
3324	return 0;
3325    }
3326  if (a != 0 || f != 0)
3327    return 0;
3328
3329  /* Compute all the actuals.  */
3330
3331  argvec = (rtx *) alloca (i * sizeof (rtx));
3332
3333  for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3334    argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
3335
3336  /* Find which actual values refer to current values of previous formals.
3337     Copy each of them now, before any formal is changed.  */
3338
3339  for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
3340    {
3341      int copy = 0;
3342      int j;
3343      for (f = formals, j = 0; j < i; f = TREE_CHAIN (f), j++)
3344	if (reg_mentioned_p (DECL_RTL (f), argvec[i]))
3345	  {
3346	    copy = 1;
3347	    break;
3348	  }
3349      if (copy)
3350	argvec[i] = copy_to_reg (argvec[i]);
3351    }
3352
3353  /* Store the values of the actuals into the formals.  */
3354
3355  for (f = formals, a = actuals, i = 0; f;
3356       f = TREE_CHAIN (f), a = TREE_CHAIN (a), i++)
3357    {
3358      if (GET_MODE (DECL_RTL (f)) == GET_MODE (argvec[i]))
3359	emit_move_insn (DECL_RTL (f), argvec[i]);
3360      else
3361	{
3362	  rtx tmp = argvec[i];
3363
3364	  if (DECL_MODE (f) != GET_MODE (DECL_RTL (f)))
3365	    {
3366	      tmp = gen_reg_rtx (DECL_MODE (f));
3367	      convert_move (tmp, argvec[i],
3368			    TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3369	    }
3370	  convert_move (DECL_RTL (f), tmp,
3371			TREE_UNSIGNED (TREE_TYPE (TREE_VALUE (a))));
3372	}
3373    }
3374
3375  free_temp_slots ();
3376  return 1;
3377}
3378
3379/* Generate the RTL code for entering a binding contour.
3380   The variables are declared one by one, by calls to `expand_decl'.
3381
3382   FLAGS is a bitwise or of the following flags:
3383
3384     1 - Nonzero if this construct should be visible to
3385         `exit_something'.
3386
3387     2 - Nonzero if this contour does not require a
3388	 NOTE_INSN_BLOCK_BEG note.  Virtually all calls from
3389	 language-independent code should set this flag because they
3390	 will not create corresponding BLOCK nodes.  (There should be
3391	 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
3392	 and BLOCKs.)  If this flag is set, MARK_ENDS should be zero
3393	 when expand_end_bindings is called.
3394
3395    If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
3396    optionally be supplied.  If so, it becomes the NOTE_BLOCK for the
3397    note.  */
3398
3399void
3400expand_start_bindings_and_block (flags, block)
3401     int flags;
3402     tree block;
3403{
3404  struct nesting *thisblock = ALLOC_NESTING ();
3405  rtx note;
3406  int exit_flag = ((flags & 1) != 0);
3407  int block_flag = ((flags & 2) == 0);
3408
3409  /* If a BLOCK is supplied, then the caller should be requesting a
3410     NOTE_INSN_BLOCK_BEG note.  */
3411  if (!block_flag && block)
3412    abort ();
3413
3414  /* Create a note to mark the beginning of the block.  */
3415  if (block_flag)
3416    {
3417      note = emit_note (NULL, NOTE_INSN_BLOCK_BEG);
3418      NOTE_BLOCK (note) = block;
3419    }
3420  else
3421    note = emit_note (NULL, NOTE_INSN_DELETED);
3422
3423  /* Make an entry on block_stack for the block we are entering.  */
3424
3425  thisblock->next = block_stack;
3426  thisblock->all = nesting_stack;
3427  thisblock->depth = ++nesting_depth;
3428  thisblock->data.block.stack_level = 0;
3429  thisblock->data.block.cleanups = 0;
3430  thisblock->data.block.n_function_calls = 0;
3431  thisblock->data.block.exception_region = 0;
3432  thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
3433
3434  thisblock->data.block.conditional_code = 0;
3435  thisblock->data.block.last_unconditional_cleanup = note;
3436  /* When we insert instructions after the last unconditional cleanup,
3437     we don't adjust last_insn.  That means that a later add_insn will
3438     clobber the instructions we've just added.  The easiest way to
3439     fix this is to just insert another instruction here, so that the
3440     instructions inserted after the last unconditional cleanup are
3441     never the last instruction.  */
3442  emit_note (NULL, NOTE_INSN_DELETED);
3443  thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
3444
3445  if (block_stack
3446      && !(block_stack->data.block.cleanups == NULL_TREE
3447	   && block_stack->data.block.outer_cleanups == NULL_TREE))
3448    thisblock->data.block.outer_cleanups
3449      = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
3450		   block_stack->data.block.outer_cleanups);
3451  else
3452    thisblock->data.block.outer_cleanups = 0;
3453  thisblock->data.block.label_chain = 0;
3454  thisblock->data.block.innermost_stack_block = stack_block_stack;
3455  thisblock->data.block.first_insn = note;
3456  thisblock->data.block.block_start_count = ++current_block_start_count;
3457  thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
3458  block_stack = thisblock;
3459  nesting_stack = thisblock;
3460
3461  /* Make a new level for allocating stack slots.  */
3462  push_temp_slots ();
3463}
3464
3465/* Specify the scope of temporaries created by TARGET_EXPRs.  Similar
3466   to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
3467   expand_expr are made.  After we end the region, we know that all
3468   space for all temporaries that were created by TARGET_EXPRs will be
3469   destroyed and their space freed for reuse.  */
3470
3471void
3472expand_start_target_temps ()
3473{
3474  /* This is so that even if the result is preserved, the space
3475     allocated will be freed, as we know that it is no longer in use.  */
3476  push_temp_slots ();
3477
3478  /* Start a new binding layer that will keep track of all cleanup
3479     actions to be performed.  */
3480  expand_start_bindings (2);
3481
3482  target_temp_slot_level = temp_slot_level;
3483}
3484
3485void
3486expand_end_target_temps ()
3487{
3488  expand_end_bindings (NULL_TREE, 0, 0);
3489
3490  /* This is so that even if the result is preserved, the space
3491     allocated will be freed, as we know that it is no longer in use.  */
3492  pop_temp_slots ();
3493}
3494
3495/* Given a pointer to a BLOCK node return non-zero if (and only if) the node
3496   in question represents the outermost pair of curly braces (i.e. the "body
3497   block") of a function or method.
3498
3499   For any BLOCK node representing a "body block" of a function or method, the
3500   BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
3501   represents the outermost (function) scope for the function or method (i.e.
3502   the one which includes the formal parameters).  The BLOCK_SUPERCONTEXT of
3503   *that* node in turn will point to the relevant FUNCTION_DECL node.  */
3504
3505int
3506is_body_block (stmt)
3507     tree stmt;
3508{
3509  if (TREE_CODE (stmt) == BLOCK)
3510    {
3511      tree parent = BLOCK_SUPERCONTEXT (stmt);
3512
3513      if (parent && TREE_CODE (parent) == BLOCK)
3514	{
3515	  tree grandparent = BLOCK_SUPERCONTEXT (parent);
3516
3517	  if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
3518	    return 1;
3519	}
3520    }
3521
3522  return 0;
3523}
3524
3525/* True if we are currently emitting insns in an area of output code
3526   that is controlled by a conditional expression.  This is used by
3527   the cleanup handling code to generate conditional cleanup actions.  */
3528
3529int
3530conditional_context ()
3531{
3532  return block_stack && block_stack->data.block.conditional_code;
3533}
3534
3535/* Return an opaque pointer to the current nesting level, so frontend code
3536   can check its own sanity.  */
3537
3538struct nesting *
3539current_nesting_level ()
3540{
3541  return cfun ? block_stack : 0;
3542}
3543
3544/* Emit a handler label for a nonlocal goto handler.
3545   Also emit code to store the handler label in SLOT before BEFORE_INSN.  */
3546
3547static rtx
3548expand_nl_handler_label (slot, before_insn)
3549     rtx slot, before_insn;
3550{
3551  rtx insns;
3552  rtx handler_label = gen_label_rtx ();
3553
3554  /* Don't let cleanup_cfg delete the handler.  */
3555  LABEL_PRESERVE_P (handler_label) = 1;
3556
3557  start_sequence ();
3558  emit_move_insn (slot, gen_rtx_LABEL_REF (Pmode, handler_label));
3559  insns = get_insns ();
3560  end_sequence ();
3561  emit_insns_before (insns, before_insn);
3562
3563  emit_label (handler_label);
3564
3565  return handler_label;
3566}
3567
3568/* Emit code to restore vital registers at the beginning of a nonlocal goto
3569   handler.  */
3570static void
3571expand_nl_goto_receiver ()
3572{
3573#ifdef HAVE_nonlocal_goto
3574  if (! HAVE_nonlocal_goto)
3575#endif
3576    /* First adjust our frame pointer to its actual value.  It was
3577       previously set to the start of the virtual area corresponding to
3578       the stacked variables when we branched here and now needs to be
3579       adjusted to the actual hardware fp value.
3580
3581       Assignments are to virtual registers are converted by
3582       instantiate_virtual_regs into the corresponding assignment
3583       to the underlying register (fp in this case) that makes
3584       the original assignment true.
3585       So the following insn will actually be
3586       decrementing fp by STARTING_FRAME_OFFSET.  */
3587    emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
3588
3589#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
3590  if (fixed_regs[ARG_POINTER_REGNUM])
3591    {
3592#ifdef ELIMINABLE_REGS
3593      /* If the argument pointer can be eliminated in favor of the
3594	 frame pointer, we don't need to restore it.  We assume here
3595	 that if such an elimination is present, it can always be used.
3596	 This is the case on all known machines; if we don't make this
3597	 assumption, we do unnecessary saving on many machines.  */
3598      static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
3599      size_t i;
3600
3601      for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
3602	if (elim_regs[i].from == ARG_POINTER_REGNUM
3603	    && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
3604	  break;
3605
3606      if (i == ARRAY_SIZE (elim_regs))
3607#endif
3608	{
3609	  /* Now restore our arg pointer from the address at which it
3610	     was saved in our stack frame.  */
3611	  emit_move_insn (virtual_incoming_args_rtx,
3612			  copy_to_reg (get_arg_pointer_save_area (cfun)));
3613	}
3614    }
3615#endif
3616
3617#ifdef HAVE_nonlocal_goto_receiver
3618  if (HAVE_nonlocal_goto_receiver)
3619    emit_insn (gen_nonlocal_goto_receiver ());
3620#endif
3621}
3622
3623/* Make handlers for nonlocal gotos taking place in the function calls in
3624   block THISBLOCK.  */
3625
3626static void
3627expand_nl_goto_receivers (thisblock)
3628     struct nesting *thisblock;
3629{
3630  tree link;
3631  rtx afterward = gen_label_rtx ();
3632  rtx insns, slot;
3633  rtx label_list;
3634  int any_invalid;
3635
3636  /* Record the handler address in the stack slot for that purpose,
3637     during this block, saving and restoring the outer value.  */
3638  if (thisblock->next != 0)
3639    for (slot = nonlocal_goto_handler_slots; slot; slot = XEXP (slot, 1))
3640      {
3641	rtx save_receiver = gen_reg_rtx (Pmode);
3642	emit_move_insn (XEXP (slot, 0), save_receiver);
3643
3644	start_sequence ();
3645	emit_move_insn (save_receiver, XEXP (slot, 0));
3646	insns = get_insns ();
3647	end_sequence ();
3648	emit_insns_before (insns, thisblock->data.block.first_insn);
3649      }
3650
3651  /* Jump around the handlers; they run only when specially invoked.  */
3652  emit_jump (afterward);
3653
3654  /* Make a separate handler for each label.  */
3655  link = nonlocal_labels;
3656  slot = nonlocal_goto_handler_slots;
3657  label_list = NULL_RTX;
3658  for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3659    /* Skip any labels we shouldn't be able to jump to from here,
3660       we generate one special handler for all of them below which just calls
3661       abort.  */
3662    if (! DECL_TOO_LATE (TREE_VALUE (link)))
3663      {
3664	rtx lab;
3665	lab = expand_nl_handler_label (XEXP (slot, 0),
3666				       thisblock->data.block.first_insn);
3667	label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3668
3669	expand_nl_goto_receiver ();
3670
3671	/* Jump to the "real" nonlocal label.  */
3672	expand_goto (TREE_VALUE (link));
3673      }
3674
3675  /* A second pass over all nonlocal labels; this time we handle those
3676     we should not be able to jump to at this point.  */
3677  link = nonlocal_labels;
3678  slot = nonlocal_goto_handler_slots;
3679  any_invalid = 0;
3680  for (; link; link = TREE_CHAIN (link), slot = XEXP (slot, 1))
3681    if (DECL_TOO_LATE (TREE_VALUE (link)))
3682      {
3683	rtx lab;
3684	lab = expand_nl_handler_label (XEXP (slot, 0),
3685				       thisblock->data.block.first_insn);
3686	label_list = gen_rtx_EXPR_LIST (VOIDmode, lab, label_list);
3687	any_invalid = 1;
3688      }
3689
3690  if (any_invalid)
3691    {
3692      expand_nl_goto_receiver ();
3693      expand_builtin_trap ();
3694    }
3695
3696  nonlocal_goto_handler_labels = label_list;
3697  emit_label (afterward);
3698}
3699
3700/* Warn about any unused VARS (which may contain nodes other than
3701   VAR_DECLs, but such nodes are ignored).  The nodes are connected
3702   via the TREE_CHAIN field.  */
3703
3704void
3705warn_about_unused_variables (vars)
3706     tree vars;
3707{
3708  tree decl;
3709
3710  if (warn_unused_variable)
3711    for (decl = vars; decl; decl = TREE_CHAIN (decl))
3712      if (TREE_CODE (decl) == VAR_DECL
3713	  && ! TREE_USED (decl)
3714	  && ! DECL_IN_SYSTEM_HEADER (decl)
3715	  && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
3716	warning_with_decl (decl, "unused variable `%s'");
3717}
3718
3719/* Generate RTL code to terminate a binding contour.
3720
3721   VARS is the chain of VAR_DECL nodes for the variables bound in this
3722   contour.  There may actually be other nodes in this chain, but any
3723   nodes other than VAR_DECLS are ignored.
3724
3725   MARK_ENDS is nonzero if we should put a note at the beginning
3726   and end of this binding contour.
3727
3728   DONT_JUMP_IN is nonzero if it is not valid to jump into this contour.
3729   (That is true automatically if the contour has a saved stack level.)  */
3730
3731void
3732expand_end_bindings (vars, mark_ends, dont_jump_in)
3733     tree vars;
3734     int mark_ends;
3735     int dont_jump_in;
3736{
3737  struct nesting *thisblock = block_stack;
3738
3739  /* If any of the variables in this scope were not used, warn the
3740     user.  */
3741  warn_about_unused_variables (vars);
3742
3743  if (thisblock->exit_label)
3744    {
3745      do_pending_stack_adjust ();
3746      emit_label (thisblock->exit_label);
3747    }
3748
3749  /* If necessary, make handlers for nonlocal gotos taking
3750     place in the function calls in this block.  */
3751  if (function_call_count != thisblock->data.block.n_function_calls
3752      && nonlocal_labels
3753      /* Make handler for outermost block
3754	 if there were any nonlocal gotos to this function.  */
3755      && (thisblock->next == 0 ? current_function_has_nonlocal_label
3756	  /* Make handler for inner block if it has something
3757	     special to do when you jump out of it.  */
3758	  : (thisblock->data.block.cleanups != 0
3759	     || thisblock->data.block.stack_level != 0)))
3760    expand_nl_goto_receivers (thisblock);
3761
3762  /* Don't allow jumping into a block that has a stack level.
3763     Cleanups are allowed, though.  */
3764  if (dont_jump_in
3765      || thisblock->data.block.stack_level != 0)
3766    {
3767      struct label_chain *chain;
3768
3769      /* Any labels in this block are no longer valid to go to.
3770	 Mark them to cause an error message.  */
3771      for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
3772	{
3773	  DECL_TOO_LATE (chain->label) = 1;
3774	  /* If any goto without a fixup came to this label,
3775	     that must be an error, because gotos without fixups
3776	     come from outside all saved stack-levels.  */
3777	  if (TREE_ADDRESSABLE (chain->label))
3778	    error_with_decl (chain->label,
3779			     "label `%s' used before containing binding contour");
3780	}
3781    }
3782
3783  /* Restore stack level in effect before the block
3784     (only if variable-size objects allocated).  */
3785  /* Perform any cleanups associated with the block.  */
3786
3787  if (thisblock->data.block.stack_level != 0
3788      || thisblock->data.block.cleanups != 0)
3789    {
3790      int reachable;
3791      rtx insn;
3792
3793      /* Don't let cleanups affect ({...}) constructs.  */
3794      int old_expr_stmts_for_value = expr_stmts_for_value;
3795      rtx old_last_expr_value = last_expr_value;
3796      tree old_last_expr_type = last_expr_type;
3797      expr_stmts_for_value = 0;
3798
3799      /* Only clean up here if this point can actually be reached.  */
3800      insn = get_last_insn ();
3801      if (GET_CODE (insn) == NOTE)
3802	insn = prev_nonnote_insn (insn);
3803      reachable = (! insn || GET_CODE (insn) != BARRIER);
3804
3805      /* Do the cleanups.  */
3806      expand_cleanups (thisblock->data.block.cleanups, NULL_TREE, 0, reachable);
3807      if (reachable)
3808	do_pending_stack_adjust ();
3809
3810      expr_stmts_for_value = old_expr_stmts_for_value;
3811      last_expr_value = old_last_expr_value;
3812      last_expr_type = old_last_expr_type;
3813
3814      /* Restore the stack level.  */
3815
3816      if (reachable && thisblock->data.block.stack_level != 0)
3817	{
3818	  emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3819			      thisblock->data.block.stack_level, NULL_RTX);
3820	  if (nonlocal_goto_handler_slots != 0)
3821	    emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
3822			     NULL_RTX);
3823	}
3824
3825      /* Any gotos out of this block must also do these things.
3826	 Also report any gotos with fixups that came to labels in this
3827	 level.  */
3828      fixup_gotos (thisblock,
3829		   thisblock->data.block.stack_level,
3830		   thisblock->data.block.cleanups,
3831		   thisblock->data.block.first_insn,
3832		   dont_jump_in);
3833    }
3834
3835  /* Mark the beginning and end of the scope if requested.
3836     We do this now, after running cleanups on the variables
3837     just going out of scope, so they are in scope for their cleanups.  */
3838
3839  if (mark_ends)
3840    {
3841      rtx note = emit_note (NULL, NOTE_INSN_BLOCK_END);
3842      NOTE_BLOCK (note) = NOTE_BLOCK (thisblock->data.block.first_insn);
3843    }
3844  else
3845    /* Get rid of the beginning-mark if we don't make an end-mark.  */
3846    NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
3847
3848  /* Restore the temporary level of TARGET_EXPRs.  */
3849  target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
3850
3851  /* Restore block_stack level for containing block.  */
3852
3853  stack_block_stack = thisblock->data.block.innermost_stack_block;
3854  POPSTACK (block_stack);
3855
3856  /* Pop the stack slot nesting and free any slots at this level.  */
3857  pop_temp_slots ();
3858}
3859
3860/* Generate code to save the stack pointer at the start of the current block
3861   and set up to restore it on exit.  */
3862
3863void
3864save_stack_pointer ()
3865{
3866  struct nesting *thisblock = block_stack;
3867
3868  if (thisblock->data.block.stack_level == 0)
3869    {
3870      emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3871		       &thisblock->data.block.stack_level,
3872		       thisblock->data.block.first_insn);
3873      stack_block_stack = thisblock;
3874    }
3875}
3876
3877/* Generate RTL for the automatic variable declaration DECL.
3878   (Other kinds of declarations are simply ignored if seen here.)  */
3879
3880void
3881expand_decl (decl)
3882     tree decl;
3883{
3884  struct nesting *thisblock;
3885  tree type;
3886
3887  type = TREE_TYPE (decl);
3888
3889  /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3890     type in case this node is used in a reference.  */
3891  if (TREE_CODE (decl) == CONST_DECL)
3892    {
3893      DECL_MODE (decl) = TYPE_MODE (type);
3894      DECL_ALIGN (decl) = TYPE_ALIGN (type);
3895      DECL_SIZE (decl) = TYPE_SIZE (type);
3896      DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3897      return;
3898    }
3899
3900  /* Otherwise, only automatic variables need any expansion done.  Static and
3901     external variables, and external functions, will be handled by
3902     `assemble_variable' (called from finish_decl).  TYPE_DECL requires
3903     nothing.  PARM_DECLs are handled in `assign_parms'.  */
3904  if (TREE_CODE (decl) != VAR_DECL)
3905    return;
3906
3907  if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
3908    return;
3909
3910  thisblock = block_stack;
3911
3912  /* Create the RTL representation for the variable.  */
3913
3914  if (type == error_mark_node)
3915    SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
3916
3917  else if (DECL_SIZE (decl) == 0)
3918    /* Variable with incomplete type.  */
3919    {
3920      rtx x;
3921      if (DECL_INITIAL (decl) == 0)
3922	/* Error message was already done; now avoid a crash.  */
3923	x = gen_rtx_MEM (BLKmode, const0_rtx);
3924      else
3925	/* An initializer is going to decide the size of this array.
3926	   Until we know the size, represent its address with a reg.  */
3927	x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3928
3929      set_mem_attributes (x, decl, 1);
3930      SET_DECL_RTL (decl, x);
3931    }
3932  else if (DECL_MODE (decl) != BLKmode
3933	   /* If -ffloat-store, don't put explicit float vars
3934	      into regs.  */
3935	   && !(flag_float_store
3936		&& TREE_CODE (type) == REAL_TYPE)
3937	   && ! TREE_THIS_VOLATILE (decl)
3938	   && (DECL_REGISTER (decl) || optimize))
3939    {
3940      /* Automatic variable that can go in a register.  */
3941      int unsignedp = TREE_UNSIGNED (type);
3942      enum machine_mode reg_mode
3943	= promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
3944
3945      SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
3946
3947      if (GET_CODE (DECL_RTL (decl)) == REG)
3948	REGNO_DECL (REGNO (DECL_RTL (decl))) = decl;
3949      else if (GET_CODE (DECL_RTL (decl)) == CONCAT)
3950	{
3951	  REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 0))) = decl;
3952	  REGNO_DECL (REGNO (XEXP (DECL_RTL (decl), 1))) = decl;
3953	}
3954
3955      mark_user_reg (DECL_RTL (decl));
3956
3957      if (POINTER_TYPE_P (type))
3958	mark_reg_pointer (DECL_RTL (decl),
3959			  TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3960
3961      maybe_set_unchanging (DECL_RTL (decl), decl);
3962
3963      /* If something wants our address, try to use ADDRESSOF.  */
3964      if (TREE_ADDRESSABLE (decl))
3965	put_var_into_stack (decl);
3966    }
3967
3968  else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
3969	   && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
3970		 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3971					  STACK_CHECK_MAX_VAR_SIZE)))
3972    {
3973      /* Variable of fixed size that goes on the stack.  */
3974      rtx oldaddr = 0;
3975      rtx addr;
3976      rtx x;
3977
3978      /* If we previously made RTL for this decl, it must be an array
3979	 whose size was determined by the initializer.
3980	 The old address was a register; set that register now
3981	 to the proper address.  */
3982      if (DECL_RTL_SET_P (decl))
3983	{
3984	  if (GET_CODE (DECL_RTL (decl)) != MEM
3985	      || GET_CODE (XEXP (DECL_RTL (decl), 0)) != REG)
3986	    abort ();
3987	  oldaddr = XEXP (DECL_RTL (decl), 0);
3988	}
3989
3990      /* Set alignment we actually gave this decl.  */
3991      DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3992			   : GET_MODE_BITSIZE (DECL_MODE (decl)));
3993      DECL_USER_ALIGN (decl) = 0;
3994
3995      x = assign_temp (decl, 1, 1, 1);
3996      set_mem_attributes (x, decl, 1);
3997      SET_DECL_RTL (decl, x);
3998
3999      if (oldaddr)
4000	{
4001	  addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
4002	  if (addr != oldaddr)
4003	    emit_move_insn (oldaddr, addr);
4004	}
4005    }
4006  else
4007    /* Dynamic-size object: must push space on the stack.  */
4008    {
4009      rtx address, size, x;
4010
4011      /* Record the stack pointer on entry to block, if have
4012	 not already done so.  */
4013      do_pending_stack_adjust ();
4014      save_stack_pointer ();
4015
4016      /* In function-at-a-time mode, variable_size doesn't expand this,
4017	 so do it now.  */
4018      if (TREE_CODE (type) == ARRAY_TYPE && TYPE_DOMAIN (type))
4019	expand_expr (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
4020		     const0_rtx, VOIDmode, 0);
4021
4022      /* Compute the variable's size, in bytes.  */
4023      size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
4024      free_temp_slots ();
4025
4026      /* Allocate space on the stack for the variable.  Note that
4027	 DECL_ALIGN says how the variable is to be aligned and we
4028	 cannot use it to conclude anything about the alignment of
4029	 the size.  */
4030      address = allocate_dynamic_stack_space (size, NULL_RTX,
4031					      TYPE_ALIGN (TREE_TYPE (decl)));
4032
4033      /* Reference the variable indirect through that rtx.  */
4034      x = gen_rtx_MEM (DECL_MODE (decl), address);
4035      set_mem_attributes (x, decl, 1);
4036      SET_DECL_RTL (decl, x);
4037
4038
4039      /* Indicate the alignment we actually gave this variable.  */
4040#ifdef STACK_BOUNDARY
4041      DECL_ALIGN (decl) = STACK_BOUNDARY;
4042#else
4043      DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
4044#endif
4045      DECL_USER_ALIGN (decl) = 0;
4046    }
4047}
4048
4049/* Emit code to perform the initialization of a declaration DECL.  */
4050
4051void
4052expand_decl_init (decl)
4053     tree decl;
4054{
4055  int was_used = TREE_USED (decl);
4056
4057  /* If this is a CONST_DECL, we don't have to generate any code.  Likewise
4058     for static decls.  */
4059  if (TREE_CODE (decl) == CONST_DECL
4060      || TREE_STATIC (decl))
4061    return;
4062
4063  /* Compute and store the initial value now.  */
4064
4065  if (DECL_INITIAL (decl) == error_mark_node)
4066    {
4067      enum tree_code code = TREE_CODE (TREE_TYPE (decl));
4068
4069      if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
4070	  || code == POINTER_TYPE || code == REFERENCE_TYPE)
4071	expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
4072			   0, 0);
4073      emit_queue ();
4074    }
4075  else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
4076    {
4077      emit_line_note (DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl));
4078      expand_assignment (decl, DECL_INITIAL (decl), 0, 0);
4079      emit_queue ();
4080    }
4081
4082  /* Don't let the initialization count as "using" the variable.  */
4083  TREE_USED (decl) = was_used;
4084
4085  /* Free any temporaries we made while initializing the decl.  */
4086  preserve_temp_slots (NULL_RTX);
4087  free_temp_slots ();
4088}
4089
4090/* CLEANUP is an expression to be executed at exit from this binding contour;
4091   for example, in C++, it might call the destructor for this variable.
4092
4093   We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
4094   CLEANUP multiple times, and have the correct semantics.  This
4095   happens in exception handling, for gotos, returns, breaks that
4096   leave the current scope.
4097
4098   If CLEANUP is nonzero and DECL is zero, we record a cleanup
4099   that is not associated with any particular variable.  */
4100
4101int
4102expand_decl_cleanup (decl, cleanup)
4103     tree decl, cleanup;
4104{
4105  struct nesting *thisblock;
4106
4107  /* Error if we are not in any block.  */
4108  if (cfun == 0 || block_stack == 0)
4109    return 0;
4110
4111  thisblock = block_stack;
4112
4113  /* Record the cleanup if there is one.  */
4114
4115  if (cleanup != 0)
4116    {
4117      tree t;
4118      rtx seq;
4119      tree *cleanups = &thisblock->data.block.cleanups;
4120      int cond_context = conditional_context ();
4121
4122      if (cond_context)
4123	{
4124	  rtx flag = gen_reg_rtx (word_mode);
4125	  rtx set_flag_0;
4126	  tree cond;
4127
4128	  start_sequence ();
4129	  emit_move_insn (flag, const0_rtx);
4130	  set_flag_0 = get_insns ();
4131	  end_sequence ();
4132
4133	  thisblock->data.block.last_unconditional_cleanup
4134	    = emit_insns_after (set_flag_0,
4135				thisblock->data.block.last_unconditional_cleanup);
4136
4137	  emit_move_insn (flag, const1_rtx);
4138
4139	  cond = build_decl (VAR_DECL, NULL_TREE, type_for_mode (word_mode, 1));
4140	  SET_DECL_RTL (cond, flag);
4141
4142	  /* Conditionalize the cleanup.  */
4143	  cleanup = build (COND_EXPR, void_type_node,
4144			   truthvalue_conversion (cond),
4145			   cleanup, integer_zero_node);
4146	  cleanup = fold (cleanup);
4147
4148	  cleanups = thisblock->data.block.cleanup_ptr;
4149	}
4150
4151      cleanup = unsave_expr (cleanup);
4152
4153      t = *cleanups = tree_cons (decl, cleanup, *cleanups);
4154
4155      if (! cond_context)
4156	/* If this block has a cleanup, it belongs in stack_block_stack.  */
4157	stack_block_stack = thisblock;
4158
4159      if (cond_context)
4160	{
4161	  start_sequence ();
4162	}
4163
4164      if (! using_eh_for_cleanups_p)
4165	TREE_ADDRESSABLE (t) = 1;
4166      else
4167	expand_eh_region_start ();
4168
4169      if (cond_context)
4170	{
4171	  seq = get_insns ();
4172	  end_sequence ();
4173	  if (seq)
4174	    thisblock->data.block.last_unconditional_cleanup
4175	      = emit_insns_after (seq,
4176				  thisblock->data.block.last_unconditional_cleanup);
4177	}
4178      else
4179	{
4180	  thisblock->data.block.last_unconditional_cleanup
4181	    = get_last_insn ();
4182	  /* When we insert instructions after the last unconditional cleanup,
4183	     we don't adjust last_insn.  That means that a later add_insn will
4184	     clobber the instructions we've just added.  The easiest way to
4185	     fix this is to just insert another instruction here, so that the
4186	     instructions inserted after the last unconditional cleanup are
4187	     never the last instruction.  */
4188	  emit_note (NULL, NOTE_INSN_DELETED);
4189	  thisblock->data.block.cleanup_ptr = &thisblock->data.block.cleanups;
4190	}
4191    }
4192  return 1;
4193}
4194
4195/* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
4196   is thrown.  */
4197
4198int
4199expand_decl_cleanup_eh (decl, cleanup, eh_only)
4200     tree decl, cleanup;
4201     int eh_only;
4202{
4203  int ret = expand_decl_cleanup (decl, cleanup);
4204  if (cleanup && ret)
4205    {
4206      tree node = block_stack->data.block.cleanups;
4207      CLEANUP_EH_ONLY (node) = eh_only;
4208    }
4209  return ret;
4210}
4211
4212/* DECL is an anonymous union.  CLEANUP is a cleanup for DECL.
4213   DECL_ELTS is the list of elements that belong to DECL's type.
4214   In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup.  */
4215
4216void
4217expand_anon_union_decl (decl, cleanup, decl_elts)
4218     tree decl, cleanup, decl_elts;
4219{
4220  struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
4221  rtx x;
4222  tree t;
4223
4224  /* If any of the elements are addressable, so is the entire union.  */
4225  for (t = decl_elts; t; t = TREE_CHAIN (t))
4226    if (TREE_ADDRESSABLE (TREE_VALUE (t)))
4227      {
4228	TREE_ADDRESSABLE (decl) = 1;
4229	break;
4230      }
4231
4232  expand_decl (decl);
4233  expand_decl_cleanup (decl, cleanup);
4234  x = DECL_RTL (decl);
4235
4236  /* Go through the elements, assigning RTL to each.  */
4237  for (t = decl_elts; t; t = TREE_CHAIN (t))
4238    {
4239      tree decl_elt = TREE_VALUE (t);
4240      tree cleanup_elt = TREE_PURPOSE (t);
4241      enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
4242
4243      /* If any of the elements are addressable, so is the entire
4244	 union.  */
4245      if (TREE_USED (decl_elt))
4246	TREE_USED (decl) = 1;
4247
4248      /* Propagate the union's alignment to the elements.  */
4249      DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
4250      DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
4251
4252      /* If the element has BLKmode and the union doesn't, the union is
4253         aligned such that the element doesn't need to have BLKmode, so
4254         change the element's mode to the appropriate one for its size.  */
4255      if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
4256	DECL_MODE (decl_elt) = mode
4257	  = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
4258
4259      /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
4260         instead create a new MEM rtx with the proper mode.  */
4261      if (GET_CODE (x) == MEM)
4262	{
4263	  if (mode == GET_MODE (x))
4264	    SET_DECL_RTL (decl_elt, x);
4265	  else
4266	    SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
4267	}
4268      else if (GET_CODE (x) == REG)
4269	{
4270	  if (mode == GET_MODE (x))
4271	    SET_DECL_RTL (decl_elt, x);
4272	  else
4273	    SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
4274	}
4275      else
4276	abort ();
4277
4278      /* Record the cleanup if there is one.  */
4279
4280      if (cleanup != 0)
4281	thisblock->data.block.cleanups
4282	  = tree_cons (decl_elt, cleanup_elt,
4283		       thisblock->data.block.cleanups);
4284    }
4285}
4286
4287/* Expand a list of cleanups LIST.
4288   Elements may be expressions or may be nested lists.
4289
4290   If DONT_DO is nonnull, then any list-element
4291   whose TREE_PURPOSE matches DONT_DO is omitted.
4292   This is sometimes used to avoid a cleanup associated with
4293   a value that is being returned out of the scope.
4294
4295   If IN_FIXUP is non-zero, we are generating this cleanup for a fixup
4296   goto and handle protection regions specially in that case.
4297
4298   If REACHABLE, we emit code, otherwise just inform the exception handling
4299   code about this finalization.  */
4300
4301static void
4302expand_cleanups (list, dont_do, in_fixup, reachable)
4303     tree list;
4304     tree dont_do;
4305     int in_fixup;
4306     int reachable;
4307{
4308  tree tail;
4309  for (tail = list; tail; tail = TREE_CHAIN (tail))
4310    if (dont_do == 0 || TREE_PURPOSE (tail) != dont_do)
4311      {
4312	if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4313	  expand_cleanups (TREE_VALUE (tail), dont_do, in_fixup, reachable);
4314	else
4315	  {
4316	    if (! in_fixup && using_eh_for_cleanups_p)
4317	      expand_eh_region_end_cleanup (TREE_VALUE (tail));
4318
4319	    if (reachable && !CLEANUP_EH_ONLY (tail))
4320	      {
4321		/* Cleanups may be run multiple times.  For example,
4322		   when exiting a binding contour, we expand the
4323		   cleanups associated with that contour.  When a goto
4324		   within that binding contour has a target outside that
4325		   contour, it will expand all cleanups from its scope to
4326		   the target.  Though the cleanups are expanded multiple
4327		   times, the control paths are non-overlapping so the
4328		   cleanups will not be executed twice.  */
4329
4330		/* We may need to protect from outer cleanups.  */
4331		if (in_fixup && using_eh_for_cleanups_p)
4332		  {
4333		    expand_eh_region_start ();
4334
4335		    expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4336
4337		    expand_eh_region_end_fixup (TREE_VALUE (tail));
4338		  }
4339		else
4340		  expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
4341
4342		free_temp_slots ();
4343	      }
4344	  }
4345      }
4346}
4347
4348/* Mark when the context we are emitting RTL for as a conditional
4349   context, so that any cleanup actions we register with
4350   expand_decl_init will be properly conditionalized when those
4351   cleanup actions are later performed.  Must be called before any
4352   expression (tree) is expanded that is within a conditional context.  */
4353
4354void
4355start_cleanup_deferral ()
4356{
4357  /* block_stack can be NULL if we are inside the parameter list.  It is
4358     OK to do nothing, because cleanups aren't possible here.  */
4359  if (block_stack)
4360    ++block_stack->data.block.conditional_code;
4361}
4362
4363/* Mark the end of a conditional region of code.  Because cleanup
4364   deferrals may be nested, we may still be in a conditional region
4365   after we end the currently deferred cleanups, only after we end all
4366   deferred cleanups, are we back in unconditional code.  */
4367
4368void
4369end_cleanup_deferral ()
4370{
4371  /* block_stack can be NULL if we are inside the parameter list.  It is
4372     OK to do nothing, because cleanups aren't possible here.  */
4373  if (block_stack)
4374    --block_stack->data.block.conditional_code;
4375}
4376
4377/* Move all cleanups from the current block_stack
4378   to the containing block_stack, where they are assumed to
4379   have been created.  If anything can cause a temporary to
4380   be created, but not expanded for more than one level of
4381   block_stacks, then this code will have to change.  */
4382
4383void
4384move_cleanups_up ()
4385{
4386  struct nesting *block = block_stack;
4387  struct nesting *outer = block->next;
4388
4389  outer->data.block.cleanups
4390    = chainon (block->data.block.cleanups,
4391	       outer->data.block.cleanups);
4392  block->data.block.cleanups = 0;
4393}
4394
4395tree
4396last_cleanup_this_contour ()
4397{
4398  if (block_stack == 0)
4399    return 0;
4400
4401  return block_stack->data.block.cleanups;
4402}
4403
4404/* Return 1 if there are any pending cleanups at this point.
4405   If THIS_CONTOUR is nonzero, check the current contour as well.
4406   Otherwise, look only at the contours that enclose this one.  */
4407
4408int
4409any_pending_cleanups (this_contour)
4410     int this_contour;
4411{
4412  struct nesting *block;
4413
4414  if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
4415    return 0;
4416
4417  if (this_contour && block_stack->data.block.cleanups != NULL)
4418    return 1;
4419  if (block_stack->data.block.cleanups == 0
4420      && block_stack->data.block.outer_cleanups == 0)
4421    return 0;
4422
4423  for (block = block_stack->next; block; block = block->next)
4424    if (block->data.block.cleanups != 0)
4425      return 1;
4426
4427  return 0;
4428}
4429
4430/* Enter a case (Pascal) or switch (C) statement.
4431   Push a block onto case_stack and nesting_stack
4432   to accumulate the case-labels that are seen
4433   and to record the labels generated for the statement.
4434
4435   EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
4436   Otherwise, this construct is transparent for `exit_something'.
4437
4438   EXPR is the index-expression to be dispatched on.
4439   TYPE is its nominal type.  We could simply convert EXPR to this type,
4440   but instead we take short cuts.  */
4441
4442void
4443expand_start_case (exit_flag, expr, type, printname)
4444     int exit_flag;
4445     tree expr;
4446     tree type;
4447     const char *printname;
4448{
4449  struct nesting *thiscase = ALLOC_NESTING ();
4450
4451  /* Make an entry on case_stack for the case we are entering.  */
4452
4453  thiscase->next = case_stack;
4454  thiscase->all = nesting_stack;
4455  thiscase->depth = ++nesting_depth;
4456  thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
4457  thiscase->data.case_stmt.case_list = 0;
4458  thiscase->data.case_stmt.index_expr = expr;
4459  thiscase->data.case_stmt.nominal_type = type;
4460  thiscase->data.case_stmt.default_label = 0;
4461  thiscase->data.case_stmt.printname = printname;
4462  thiscase->data.case_stmt.line_number_status = force_line_numbers ();
4463  case_stack = thiscase;
4464  nesting_stack = thiscase;
4465
4466  do_pending_stack_adjust ();
4467
4468  /* Make sure case_stmt.start points to something that won't
4469     need any transformation before expand_end_case.  */
4470  if (GET_CODE (get_last_insn ()) != NOTE)
4471    emit_note (NULL, NOTE_INSN_DELETED);
4472
4473  thiscase->data.case_stmt.start = get_last_insn ();
4474
4475  start_cleanup_deferral ();
4476}
4477
4478/* Start a "dummy case statement" within which case labels are invalid
4479   and are not connected to any larger real case statement.
4480   This can be used if you don't want to let a case statement jump
4481   into the middle of certain kinds of constructs.  */
4482
4483void
4484expand_start_case_dummy ()
4485{
4486  struct nesting *thiscase = ALLOC_NESTING ();
4487
4488  /* Make an entry on case_stack for the dummy.  */
4489
4490  thiscase->next = case_stack;
4491  thiscase->all = nesting_stack;
4492  thiscase->depth = ++nesting_depth;
4493  thiscase->exit_label = 0;
4494  thiscase->data.case_stmt.case_list = 0;
4495  thiscase->data.case_stmt.start = 0;
4496  thiscase->data.case_stmt.nominal_type = 0;
4497  thiscase->data.case_stmt.default_label = 0;
4498  case_stack = thiscase;
4499  nesting_stack = thiscase;
4500  start_cleanup_deferral ();
4501}
4502
4503/* End a dummy case statement.  */
4504
4505void
4506expand_end_case_dummy ()
4507{
4508  end_cleanup_deferral ();
4509  POPSTACK (case_stack);
4510}
4511
4512/* Return the data type of the index-expression
4513   of the innermost case statement, or null if none.  */
4514
4515tree
4516case_index_expr_type ()
4517{
4518  if (case_stack)
4519    return TREE_TYPE (case_stack->data.case_stmt.index_expr);
4520  return 0;
4521}
4522
4523static void
4524check_seenlabel ()
4525{
4526  /* If this is the first label, warn if any insns have been emitted.  */
4527  if (case_stack->data.case_stmt.line_number_status >= 0)
4528    {
4529      rtx insn;
4530
4531      restore_line_number_status
4532	(case_stack->data.case_stmt.line_number_status);
4533      case_stack->data.case_stmt.line_number_status = -1;
4534
4535      for (insn = case_stack->data.case_stmt.start;
4536	   insn;
4537	   insn = NEXT_INSN (insn))
4538	{
4539	  if (GET_CODE (insn) == CODE_LABEL)
4540	    break;
4541	  if (GET_CODE (insn) != NOTE
4542	      && (GET_CODE (insn) != INSN || GET_CODE (PATTERN (insn)) != USE))
4543	    {
4544	      do
4545		insn = PREV_INSN (insn);
4546	      while (insn && (GET_CODE (insn) != NOTE || NOTE_LINE_NUMBER (insn) < 0));
4547
4548	      /* If insn is zero, then there must have been a syntax error.  */
4549	      if (insn)
4550		warning_with_file_and_line (NOTE_SOURCE_FILE (insn),
4551					    NOTE_LINE_NUMBER (insn),
4552					    "unreachable code at beginning of %s",
4553					    case_stack->data.case_stmt.printname);
4554	      break;
4555	    }
4556	}
4557    }
4558}
4559
4560/* Accumulate one case or default label inside a case or switch statement.
4561   VALUE is the value of the case (a null pointer, for a default label).
4562   The function CONVERTER, when applied to arguments T and V,
4563   converts the value V to the type T.
4564
4565   If not currently inside a case or switch statement, return 1 and do
4566   nothing.  The caller will print a language-specific error message.
4567   If VALUE is a duplicate or overlaps, return 2 and do nothing
4568   except store the (first) duplicate node in *DUPLICATE.
4569   If VALUE is out of range, return 3 and do nothing.
4570   If we are jumping into the scope of a cleanup or var-sized array, return 5.
4571   Return 0 on success.
4572
4573   Extended to handle range statements.  */
4574
4575int
4576pushcase (value, converter, label, duplicate)
4577     tree value;
4578     tree (*converter) PARAMS ((tree, tree));
4579     tree label;
4580     tree *duplicate;
4581{
4582  tree index_type;
4583  tree nominal_type;
4584
4585  /* Fail if not inside a real case statement.  */
4586  if (! (case_stack && case_stack->data.case_stmt.start))
4587    return 1;
4588
4589  if (stack_block_stack
4590      && stack_block_stack->depth > case_stack->depth)
4591    return 5;
4592
4593  index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4594  nominal_type = case_stack->data.case_stmt.nominal_type;
4595
4596  /* If the index is erroneous, avoid more problems: pretend to succeed.  */
4597  if (index_type == error_mark_node)
4598    return 0;
4599
4600  /* Convert VALUE to the type in which the comparisons are nominally done.  */
4601  if (value != 0)
4602    value = (*converter) (nominal_type, value);
4603
4604  check_seenlabel ();
4605
4606  /* Fail if this value is out of range for the actual type of the index
4607     (which may be narrower than NOMINAL_TYPE).  */
4608  if (value != 0
4609      && (TREE_CONSTANT_OVERFLOW (value)
4610	  || ! int_fits_type_p (value, index_type)))
4611    return 3;
4612
4613  return add_case_node (value, value, label, duplicate);
4614}
4615
4616/* Like pushcase but this case applies to all values between VALUE1 and
4617   VALUE2 (inclusive).  If VALUE1 is NULL, the range starts at the lowest
4618   value of the index type and ends at VALUE2.  If VALUE2 is NULL, the range
4619   starts at VALUE1 and ends at the highest value of the index type.
4620   If both are NULL, this case applies to all values.
4621
4622   The return value is the same as that of pushcase but there is one
4623   additional error code: 4 means the specified range was empty.  */
4624
4625int
4626pushcase_range (value1, value2, converter, label, duplicate)
4627     tree value1, value2;
4628     tree (*converter) PARAMS ((tree, tree));
4629     tree label;
4630     tree *duplicate;
4631{
4632  tree index_type;
4633  tree nominal_type;
4634
4635  /* Fail if not inside a real case statement.  */
4636  if (! (case_stack && case_stack->data.case_stmt.start))
4637    return 1;
4638
4639  if (stack_block_stack
4640      && stack_block_stack->depth > case_stack->depth)
4641    return 5;
4642
4643  index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
4644  nominal_type = case_stack->data.case_stmt.nominal_type;
4645
4646  /* If the index is erroneous, avoid more problems: pretend to succeed.  */
4647  if (index_type == error_mark_node)
4648    return 0;
4649
4650  check_seenlabel ();
4651
4652  /* Convert VALUEs to type in which the comparisons are nominally done
4653     and replace any unspecified value with the corresponding bound.  */
4654  if (value1 == 0)
4655    value1 = TYPE_MIN_VALUE (index_type);
4656  if (value2 == 0)
4657    value2 = TYPE_MAX_VALUE (index_type);
4658
4659  /* Fail if the range is empty.  Do this before any conversion since
4660     we want to allow out-of-range empty ranges.  */
4661  if (value2 != 0 && tree_int_cst_lt (value2, value1))
4662    return 4;
4663
4664  /* If the max was unbounded, use the max of the nominal_type we are
4665     converting to.  Do this after the < check above to suppress false
4666     positives.  */
4667  if (value2 == 0)
4668    value2 = TYPE_MAX_VALUE (nominal_type);
4669
4670  value1 = (*converter) (nominal_type, value1);
4671  value2 = (*converter) (nominal_type, value2);
4672
4673  /* Fail if these values are out of range.  */
4674  if (TREE_CONSTANT_OVERFLOW (value1)
4675      || ! int_fits_type_p (value1, index_type))
4676    return 3;
4677
4678  if (TREE_CONSTANT_OVERFLOW (value2)
4679      || ! int_fits_type_p (value2, index_type))
4680    return 3;
4681
4682  return add_case_node (value1, value2, label, duplicate);
4683}
4684
4685/* Do the actual insertion of a case label for pushcase and pushcase_range
4686   into case_stack->data.case_stmt.case_list.  Use an AVL tree to avoid
4687   slowdown for large switch statements.  */
4688
4689int
4690add_case_node (low, high, label, duplicate)
4691     tree low, high;
4692     tree label;
4693     tree *duplicate;
4694{
4695  struct case_node *p, **q, *r;
4696
4697  /* If there's no HIGH value, then this is not a case range; it's
4698     just a simple case label.  But that's just a degenerate case
4699     range.  */
4700  if (!high)
4701    high = low;
4702
4703  /* Handle default labels specially.  */
4704  if (!high && !low)
4705    {
4706      if (case_stack->data.case_stmt.default_label != 0)
4707	{
4708	  *duplicate = case_stack->data.case_stmt.default_label;
4709	  return 2;
4710	}
4711      case_stack->data.case_stmt.default_label = label;
4712      expand_label (label);
4713      return 0;
4714    }
4715
4716  q = &case_stack->data.case_stmt.case_list;
4717  p = *q;
4718
4719  while ((r = *q))
4720    {
4721      p = r;
4722
4723      /* Keep going past elements distinctly greater than HIGH.  */
4724      if (tree_int_cst_lt (high, p->low))
4725	q = &p->left;
4726
4727      /* or distinctly less than LOW.  */
4728      else if (tree_int_cst_lt (p->high, low))
4729	q = &p->right;
4730
4731      else
4732	{
4733	  /* We have an overlap; this is an error.  */
4734	  *duplicate = p->code_label;
4735	  return 2;
4736	}
4737    }
4738
4739  /* Add this label to the chain, and succeed.  */
4740
4741  r = (struct case_node *) xmalloc (sizeof (struct case_node));
4742  r->low = low;
4743
4744  /* If the bounds are equal, turn this into the one-value case.  */
4745  if (tree_int_cst_equal (low, high))
4746    r->high = r->low;
4747  else
4748    r->high = high;
4749
4750  r->code_label = label;
4751  expand_label (label);
4752
4753  *q = r;
4754  r->parent = p;
4755  r->left = 0;
4756  r->right = 0;
4757  r->balance = 0;
4758
4759  while (p)
4760    {
4761      struct case_node *s;
4762
4763      if (r == p->left)
4764	{
4765	  int b;
4766
4767	  if (! (b = p->balance))
4768	    /* Growth propagation from left side.  */
4769	    p->balance = -1;
4770	  else if (b < 0)
4771	    {
4772	      if (r->balance < 0)
4773		{
4774		  /* R-Rotation */
4775		  if ((p->left = s = r->right))
4776		    s->parent = p;
4777
4778		  r->right = p;
4779		  p->balance = 0;
4780		  r->balance = 0;
4781		  s = p->parent;
4782		  p->parent = r;
4783
4784		  if ((r->parent = s))
4785		    {
4786		      if (s->left == p)
4787			s->left = r;
4788		      else
4789			s->right = r;
4790		    }
4791		  else
4792		    case_stack->data.case_stmt.case_list = r;
4793		}
4794	      else
4795		/* r->balance == +1 */
4796		{
4797		  /* LR-Rotation */
4798
4799		  int b2;
4800		  struct case_node *t = r->right;
4801
4802		  if ((p->left = s = t->right))
4803		    s->parent = p;
4804
4805		  t->right = p;
4806		  if ((r->right = s = t->left))
4807		    s->parent = r;
4808
4809		  t->left = r;
4810		  b = t->balance;
4811		  b2 = b < 0;
4812		  p->balance = b2;
4813		  b2 = -b2 - b;
4814		  r->balance = b2;
4815		  t->balance = 0;
4816		  s = p->parent;
4817		  p->parent = t;
4818		  r->parent = t;
4819
4820		  if ((t->parent = s))
4821		    {
4822		      if (s->left == p)
4823			s->left = t;
4824		      else
4825			s->right = t;
4826		    }
4827		  else
4828		    case_stack->data.case_stmt.case_list = t;
4829		}
4830	      break;
4831	    }
4832
4833	  else
4834	    {
4835	      /* p->balance == +1; growth of left side balances the node.  */
4836	      p->balance = 0;
4837	      break;
4838	    }
4839	}
4840      else
4841	/* r == p->right */
4842	{
4843	  int b;
4844
4845	  if (! (b = p->balance))
4846	    /* Growth propagation from right side.  */
4847	    p->balance++;
4848	  else if (b > 0)
4849	    {
4850	      if (r->balance > 0)
4851		{
4852		  /* L-Rotation */
4853
4854		  if ((p->right = s = r->left))
4855		    s->parent = p;
4856
4857		  r->left = p;
4858		  p->balance = 0;
4859		  r->balance = 0;
4860		  s = p->parent;
4861		  p->parent = r;
4862		  if ((r->parent = s))
4863		    {
4864		      if (s->left == p)
4865			s->left = r;
4866		      else
4867			s->right = r;
4868		    }
4869
4870		  else
4871		    case_stack->data.case_stmt.case_list = r;
4872		}
4873
4874	      else
4875		/* r->balance == -1 */
4876		{
4877		  /* RL-Rotation */
4878		  int b2;
4879		  struct case_node *t = r->left;
4880
4881		  if ((p->right = s = t->left))
4882		    s->parent = p;
4883
4884		  t->left = p;
4885
4886		  if ((r->left = s = t->right))
4887		    s->parent = r;
4888
4889		  t->right = r;
4890		  b = t->balance;
4891		  b2 = b < 0;
4892		  r->balance = b2;
4893		  b2 = -b2 - b;
4894		  p->balance = b2;
4895		  t->balance = 0;
4896		  s = p->parent;
4897		  p->parent = t;
4898		  r->parent = t;
4899
4900		  if ((t->parent = s))
4901		    {
4902		      if (s->left == p)
4903			s->left = t;
4904		      else
4905			s->right = t;
4906		    }
4907
4908		  else
4909		    case_stack->data.case_stmt.case_list = t;
4910		}
4911	      break;
4912	    }
4913	  else
4914	    {
4915	      /* p->balance == -1; growth of right side balances the node.  */
4916	      p->balance = 0;
4917	      break;
4918	    }
4919	}
4920
4921      r = p;
4922      p = p->parent;
4923    }
4924
4925  return 0;
4926}
4927
4928/* Returns the number of possible values of TYPE.
4929   Returns -1 if the number is unknown, variable, or if the number does not
4930   fit in a HOST_WIDE_INT.
4931   Sets *SPARSENESS to 2 if TYPE is an ENUMERAL_TYPE whose values
4932   do not increase monotonically (there may be duplicates);
4933   to 1 if the values increase monotonically, but not always by 1;
4934   otherwise sets it to 0.  */
4935
4936HOST_WIDE_INT
4937all_cases_count (type, sparseness)
4938     tree type;
4939     int *sparseness;
4940{
4941  tree t;
4942  HOST_WIDE_INT count, minval, lastval;
4943
4944  *sparseness = 0;
4945
4946  switch (TREE_CODE (type))
4947    {
4948    case BOOLEAN_TYPE:
4949      count = 2;
4950      break;
4951
4952    case CHAR_TYPE:
4953      count = 1 << BITS_PER_UNIT;
4954      break;
4955
4956    default:
4957    case INTEGER_TYPE:
4958      if (TYPE_MAX_VALUE (type) != 0
4959	  && 0 != (t = fold (build (MINUS_EXPR, type, TYPE_MAX_VALUE (type),
4960				    TYPE_MIN_VALUE (type))))
4961	  && 0 != (t = fold (build (PLUS_EXPR, type, t,
4962				    convert (type, integer_zero_node))))
4963	  && host_integerp (t, 1))
4964	count = tree_low_cst (t, 1);
4965      else
4966	return -1;
4967      break;
4968
4969    case ENUMERAL_TYPE:
4970      /* Don't waste time with enumeral types with huge values.  */
4971      if (! host_integerp (TYPE_MIN_VALUE (type), 0)
4972	  || TYPE_MAX_VALUE (type) == 0
4973	  || ! host_integerp (TYPE_MAX_VALUE (type), 0))
4974	return -1;
4975
4976      lastval = minval = tree_low_cst (TYPE_MIN_VALUE (type), 0);
4977      count = 0;
4978
4979      for (t = TYPE_VALUES (type); t != NULL_TREE; t = TREE_CHAIN (t))
4980	{
4981	  HOST_WIDE_INT thisval = tree_low_cst (TREE_VALUE (t), 0);
4982
4983	  if (*sparseness == 2 || thisval <= lastval)
4984	    *sparseness = 2;
4985	  else if (thisval != minval + count)
4986	    *sparseness = 1;
4987
4988	  lastval = thisval;
4989	  count++;
4990	}
4991    }
4992
4993  return count;
4994}
4995
4996#define BITARRAY_TEST(ARRAY, INDEX) \
4997  ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
4998			  & (1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR)))
4999#define BITARRAY_SET(ARRAY, INDEX) \
5000  ((ARRAY)[(unsigned) (INDEX) / HOST_BITS_PER_CHAR]\
5001			  |= 1 << ((unsigned) (INDEX) % HOST_BITS_PER_CHAR))
5002
5003/* Set the elements of the bitstring CASES_SEEN (which has length COUNT),
5004   with the case values we have seen, assuming the case expression
5005   has the given TYPE.
5006   SPARSENESS is as determined by all_cases_count.
5007
5008   The time needed is proportional to COUNT, unless
5009   SPARSENESS is 2, in which case quadratic time is needed.  */
5010
5011void
5012mark_seen_cases (type, cases_seen, count, sparseness)
5013     tree type;
5014     unsigned char *cases_seen;
5015     HOST_WIDE_INT count;
5016     int sparseness;
5017{
5018  tree next_node_to_try = NULL_TREE;
5019  HOST_WIDE_INT next_node_offset = 0;
5020
5021  struct case_node *n, *root = case_stack->data.case_stmt.case_list;
5022  tree val = make_node (INTEGER_CST);
5023
5024  TREE_TYPE (val) = type;
5025  if (! root)
5026    /* Do nothing.  */
5027    ;
5028  else if (sparseness == 2)
5029    {
5030      tree t;
5031      unsigned HOST_WIDE_INT xlo;
5032
5033      /* This less efficient loop is only needed to handle
5034	 duplicate case values (multiple enum constants
5035	 with the same value).  */
5036      TREE_TYPE (val) = TREE_TYPE (root->low);
5037      for (t = TYPE_VALUES (type), xlo = 0; t != NULL_TREE;
5038	   t = TREE_CHAIN (t), xlo++)
5039	{
5040	  TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (TREE_VALUE (t));
5041	  TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (TREE_VALUE (t));
5042	  n = root;
5043	  do
5044	    {
5045	      /* Keep going past elements distinctly greater than VAL.  */
5046	      if (tree_int_cst_lt (val, n->low))
5047		n = n->left;
5048
5049	      /* or distinctly less than VAL.  */
5050	      else if (tree_int_cst_lt (n->high, val))
5051		n = n->right;
5052
5053	      else
5054		{
5055		  /* We have found a matching range.  */
5056		  BITARRAY_SET (cases_seen, xlo);
5057		  break;
5058		}
5059	    }
5060	  while (n);
5061	}
5062    }
5063  else
5064    {
5065      if (root->left)
5066	case_stack->data.case_stmt.case_list = root = case_tree2list (root, 0);
5067
5068      for (n = root; n; n = n->right)
5069	{
5070	  TREE_INT_CST_LOW (val) = TREE_INT_CST_LOW (n->low);
5071	  TREE_INT_CST_HIGH (val) = TREE_INT_CST_HIGH (n->low);
5072	  while (! tree_int_cst_lt (n->high, val))
5073	    {
5074	      /* Calculate (into xlo) the "offset" of the integer (val).
5075		 The element with lowest value has offset 0, the next smallest
5076		 element has offset 1, etc.  */
5077
5078	      unsigned HOST_WIDE_INT xlo;
5079	      HOST_WIDE_INT xhi;
5080	      tree t;
5081
5082	      if (sparseness && TYPE_VALUES (type) != NULL_TREE)
5083		{
5084		  /* The TYPE_VALUES will be in increasing order, so
5085		     starting searching where we last ended.  */
5086		  t = next_node_to_try;
5087		  xlo = next_node_offset;
5088		  xhi = 0;
5089		  for (;;)
5090		    {
5091		      if (t == NULL_TREE)
5092			{
5093			  t = TYPE_VALUES (type);
5094			  xlo = 0;
5095			}
5096		      if (tree_int_cst_equal (val, TREE_VALUE (t)))
5097			{
5098			  next_node_to_try = TREE_CHAIN (t);
5099			  next_node_offset = xlo + 1;
5100			  break;
5101			}
5102		      xlo++;
5103		      t = TREE_CHAIN (t);
5104		      if (t == next_node_to_try)
5105			{
5106			  xlo = -1;
5107			  break;
5108			}
5109		    }
5110		}
5111	      else
5112		{
5113		  t = TYPE_MIN_VALUE (type);
5114		  if (t)
5115		    neg_double (TREE_INT_CST_LOW (t), TREE_INT_CST_HIGH (t),
5116				&xlo, &xhi);
5117		  else
5118		    xlo = xhi = 0;
5119		  add_double (xlo, xhi,
5120			      TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5121			      &xlo, &xhi);
5122		}
5123
5124	      if (xhi == 0 && xlo < (unsigned HOST_WIDE_INT) count)
5125		BITARRAY_SET (cases_seen, xlo);
5126
5127	      add_double (TREE_INT_CST_LOW (val), TREE_INT_CST_HIGH (val),
5128			  1, 0,
5129			  &TREE_INT_CST_LOW (val), &TREE_INT_CST_HIGH (val));
5130	    }
5131	}
5132    }
5133}
5134
5135/* Called when the index of a switch statement is an enumerated type
5136   and there is no default label.
5137
5138   Checks that all enumeration literals are covered by the case
5139   expressions of a switch.  Also, warn if there are any extra
5140   switch cases that are *not* elements of the enumerated type.
5141
5142   If all enumeration literals were covered by the case expressions,
5143   turn one of the expressions into the default expression since it should
5144   not be possible to fall through such a switch.  */
5145
5146void
5147check_for_full_enumeration_handling (type)
5148     tree type;
5149{
5150  struct case_node *n;
5151  tree chain;
5152
5153  /* True iff the selector type is a numbered set mode.  */
5154  int sparseness = 0;
5155
5156  /* The number of possible selector values.  */
5157  HOST_WIDE_INT size;
5158
5159  /* For each possible selector value. a one iff it has been matched
5160     by a case value alternative.  */
5161  unsigned char *cases_seen;
5162
5163  /* The allocated size of cases_seen, in chars.  */
5164  HOST_WIDE_INT bytes_needed;
5165
5166  if (! warn_switch)
5167    return;
5168
5169  size = all_cases_count (type, &sparseness);
5170  bytes_needed = (size + HOST_BITS_PER_CHAR) / HOST_BITS_PER_CHAR;
5171
5172  if (size > 0 && size < 600000
5173      /* We deliberately use calloc here, not cmalloc, so that we can suppress
5174	 this optimization if we don't have enough memory rather than
5175	 aborting, as xmalloc would do.  */
5176      && (cases_seen =
5177	  (unsigned char *) really_call_calloc (bytes_needed, 1)) != NULL)
5178    {
5179      HOST_WIDE_INT i;
5180      tree v = TYPE_VALUES (type);
5181
5182      /* The time complexity of this code is normally O(N), where
5183	 N being the number of members in the enumerated type.
5184	 However, if type is a ENUMERAL_TYPE whose values do not
5185	 increase monotonically, O(N*log(N)) time may be needed.  */
5186
5187      mark_seen_cases (type, cases_seen, size, sparseness);
5188
5189      for (i = 0; v != NULL_TREE && i < size; i++, v = TREE_CHAIN (v))
5190	if (BITARRAY_TEST (cases_seen, i) == 0)
5191	  warning ("enumeration value `%s' not handled in switch",
5192		   IDENTIFIER_POINTER (TREE_PURPOSE (v)));
5193
5194      free (cases_seen);
5195    }
5196
5197  /* Now we go the other way around; we warn if there are case
5198     expressions that don't correspond to enumerators.  This can
5199     occur since C and C++ don't enforce type-checking of
5200     assignments to enumeration variables.  */
5201
5202  if (case_stack->data.case_stmt.case_list
5203      && case_stack->data.case_stmt.case_list->left)
5204    case_stack->data.case_stmt.case_list
5205      = case_tree2list (case_stack->data.case_stmt.case_list, 0);
5206  if (warn_switch)
5207    for (n = case_stack->data.case_stmt.case_list; n; n = n->right)
5208      {
5209	for (chain = TYPE_VALUES (type);
5210	     chain && !tree_int_cst_equal (n->low, TREE_VALUE (chain));
5211	     chain = TREE_CHAIN (chain))
5212	  ;
5213
5214	if (!chain)
5215	  {
5216	    if (TYPE_NAME (type) == 0)
5217	      warning ("case value `%ld' not in enumerated type",
5218		       (long) TREE_INT_CST_LOW (n->low));
5219	    else
5220	      warning ("case value `%ld' not in enumerated type `%s'",
5221		       (long) TREE_INT_CST_LOW (n->low),
5222		       IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5223					    == IDENTIFIER_NODE)
5224					   ? TYPE_NAME (type)
5225					   : DECL_NAME (TYPE_NAME (type))));
5226	  }
5227	if (!tree_int_cst_equal (n->low, n->high))
5228	  {
5229	    for (chain = TYPE_VALUES (type);
5230		 chain && !tree_int_cst_equal (n->high, TREE_VALUE (chain));
5231		 chain = TREE_CHAIN (chain))
5232	      ;
5233
5234	    if (!chain)
5235	      {
5236		if (TYPE_NAME (type) == 0)
5237		  warning ("case value `%ld' not in enumerated type",
5238			   (long) TREE_INT_CST_LOW (n->high));
5239		else
5240		  warning ("case value `%ld' not in enumerated type `%s'",
5241			   (long) TREE_INT_CST_LOW (n->high),
5242			   IDENTIFIER_POINTER ((TREE_CODE (TYPE_NAME (type))
5243						== IDENTIFIER_NODE)
5244					       ? TYPE_NAME (type)
5245					       : DECL_NAME (TYPE_NAME (type))));
5246	      }
5247	  }
5248      }
5249}
5250
5251/* Free CN, and its children.  */
5252
5253static void
5254free_case_nodes (cn)
5255     case_node_ptr cn;
5256{
5257  if (cn)
5258    {
5259      free_case_nodes (cn->left);
5260      free_case_nodes (cn->right);
5261      free (cn);
5262    }
5263}
5264
5265
5266
5267/* Terminate a case (Pascal) or switch (C) statement
5268   in which ORIG_INDEX is the expression to be tested.
5269   If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
5270   type as given in the source before any compiler conversions.
5271   Generate the code to test it and jump to the right place.  */
5272
5273void
5274expand_end_case_type (orig_index, orig_type)
5275     tree orig_index, orig_type;
5276{
5277  tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
5278  rtx default_label = 0;
5279  struct case_node *n;
5280  unsigned int count;
5281  rtx index;
5282  rtx table_label;
5283  int ncases;
5284  rtx *labelvec;
5285  int i;
5286  rtx before_case, end;
5287  struct nesting *thiscase = case_stack;
5288  tree index_expr, index_type;
5289  int unsignedp;
5290
5291  /* Don't crash due to previous errors.  */
5292  if (thiscase == NULL)
5293    return;
5294
5295  table_label = gen_label_rtx ();
5296  index_expr = thiscase->data.case_stmt.index_expr;
5297  index_type = TREE_TYPE (index_expr);
5298  unsignedp = TREE_UNSIGNED (index_type);
5299  if (orig_type == NULL)
5300    orig_type = TREE_TYPE (orig_index);
5301
5302  do_pending_stack_adjust ();
5303
5304  /* This might get an spurious warning in the presence of a syntax error;
5305     it could be fixed by moving the call to check_seenlabel after the
5306     check for error_mark_node, and copying the code of check_seenlabel that
5307     deals with case_stack->data.case_stmt.line_number_status /
5308     restore_line_number_status in front of the call to end_cleanup_deferral;
5309     However, this might miss some useful warnings in the presence of
5310     non-syntax errors.  */
5311  check_seenlabel ();
5312
5313  /* An ERROR_MARK occurs for various reasons including invalid data type.  */
5314  if (index_type != error_mark_node)
5315    {
5316      /* If switch expression was an enumerated type, check that all
5317	 enumeration literals are covered by the cases.
5318	 No sense trying this if there's a default case, however.  */
5319
5320      if (!thiscase->data.case_stmt.default_label
5321	  && TREE_CODE (orig_type) == ENUMERAL_TYPE
5322	  && TREE_CODE (index_expr) != INTEGER_CST)
5323	check_for_full_enumeration_handling (orig_type);
5324
5325      /* If we don't have a default-label, create one here,
5326	 after the body of the switch.  */
5327      if (thiscase->data.case_stmt.default_label == 0)
5328	{
5329	  thiscase->data.case_stmt.default_label
5330	    = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
5331	  expand_label (thiscase->data.case_stmt.default_label);
5332	}
5333      default_label = label_rtx (thiscase->data.case_stmt.default_label);
5334
5335      before_case = get_last_insn ();
5336
5337      if (thiscase->data.case_stmt.case_list
5338	  && thiscase->data.case_stmt.case_list->left)
5339	thiscase->data.case_stmt.case_list
5340	  = case_tree2list (thiscase->data.case_stmt.case_list, 0);
5341
5342      /* Simplify the case-list before we count it.  */
5343      group_case_nodes (thiscase->data.case_stmt.case_list);
5344
5345      /* Get upper and lower bounds of case values.
5346	 Also convert all the case values to the index expr's data type.  */
5347
5348      count = 0;
5349      for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5350	{
5351	  /* Check low and high label values are integers.  */
5352	  if (TREE_CODE (n->low) != INTEGER_CST)
5353	    abort ();
5354	  if (TREE_CODE (n->high) != INTEGER_CST)
5355	    abort ();
5356
5357	  n->low = convert (index_type, n->low);
5358	  n->high = convert (index_type, n->high);
5359
5360	  /* Count the elements and track the largest and smallest
5361	     of them (treating them as signed even if they are not).  */
5362	  if (count++ == 0)
5363	    {
5364	      minval = n->low;
5365	      maxval = n->high;
5366	    }
5367	  else
5368	    {
5369	      if (INT_CST_LT (n->low, minval))
5370		minval = n->low;
5371	      if (INT_CST_LT (maxval, n->high))
5372		maxval = n->high;
5373	    }
5374	  /* A range counts double, since it requires two compares.  */
5375	  if (! tree_int_cst_equal (n->low, n->high))
5376	    count++;
5377	}
5378
5379      /* Compute span of values.  */
5380      if (count != 0)
5381	range = fold (build (MINUS_EXPR, index_type, maxval, minval));
5382
5383      end_cleanup_deferral ();
5384
5385      if (count == 0)
5386	{
5387	  expand_expr (index_expr, const0_rtx, VOIDmode, 0);
5388	  emit_queue ();
5389	  emit_jump (default_label);
5390	}
5391
5392      /* If range of values is much bigger than number of values,
5393	 make a sequence of conditional branches instead of a dispatch.
5394	 If the switch-index is a constant, do it this way
5395	 because we can optimize it.  */
5396
5397      else if (count < case_values_threshold ()
5398	       || compare_tree_int (range, 10 * count) > 0
5399	       /* RANGE may be signed, and really large ranges will show up
5400		  as negative numbers.  */
5401	       || compare_tree_int (range, 0) < 0
5402#ifndef ASM_OUTPUT_ADDR_DIFF_ELT
5403	       || flag_pic
5404#endif
5405	       || TREE_CODE (index_expr) == INTEGER_CST
5406	       || (TREE_CODE (index_expr) == COMPOUND_EXPR
5407		   && TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
5408	{
5409	  index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
5410
5411	  /* If the index is a short or char that we do not have
5412	     an insn to handle comparisons directly, convert it to
5413	     a full integer now, rather than letting each comparison
5414	     generate the conversion.  */
5415
5416	  if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
5417	      && ! have_insn_for (COMPARE, GET_MODE (index)))
5418	    {
5419	      enum machine_mode wider_mode;
5420	      for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
5421		   wider_mode = GET_MODE_WIDER_MODE (wider_mode))
5422		if (have_insn_for (COMPARE, wider_mode))
5423		  {
5424		    index = convert_to_mode (wider_mode, index, unsignedp);
5425		    break;
5426		  }
5427	    }
5428
5429	  emit_queue ();
5430	  do_pending_stack_adjust ();
5431
5432	  index = protect_from_queue (index, 0);
5433	  if (GET_CODE (index) == MEM)
5434	    index = copy_to_reg (index);
5435	  if (GET_CODE (index) == CONST_INT
5436	      || TREE_CODE (index_expr) == INTEGER_CST)
5437	    {
5438	      /* Make a tree node with the proper constant value
5439		 if we don't already have one.  */
5440	      if (TREE_CODE (index_expr) != INTEGER_CST)
5441		{
5442		  index_expr
5443		    = build_int_2 (INTVAL (index),
5444				   unsignedp || INTVAL (index) >= 0 ? 0 : -1);
5445		  index_expr = convert (index_type, index_expr);
5446		}
5447
5448	      /* For constant index expressions we need only
5449		 issue an unconditional branch to the appropriate
5450		 target code.  The job of removing any unreachable
5451		 code is left to the optimisation phase if the
5452		 "-O" option is specified.  */
5453	      for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5454		if (! tree_int_cst_lt (index_expr, n->low)
5455		    && ! tree_int_cst_lt (n->high, index_expr))
5456		  break;
5457
5458	      if (n)
5459		emit_jump (label_rtx (n->code_label));
5460	      else
5461		emit_jump (default_label);
5462	    }
5463	  else
5464	    {
5465	      /* If the index expression is not constant we generate
5466		 a binary decision tree to select the appropriate
5467		 target code.  This is done as follows:
5468
5469		 The list of cases is rearranged into a binary tree,
5470		 nearly optimal assuming equal probability for each case.
5471
5472		 The tree is transformed into RTL, eliminating
5473		 redundant test conditions at the same time.
5474
5475		 If program flow could reach the end of the
5476		 decision tree an unconditional jump to the
5477		 default code is emitted.  */
5478
5479	      use_cost_table
5480		= (TREE_CODE (orig_type) != ENUMERAL_TYPE
5481		   && estimate_case_costs (thiscase->data.case_stmt.case_list));
5482	      balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
5483	      emit_case_nodes (index, thiscase->data.case_stmt.case_list,
5484			       default_label, index_type);
5485	      emit_jump_if_reachable (default_label);
5486	    }
5487	}
5488      else
5489	{
5490	  if (! try_casesi (index_type, index_expr, minval, range,
5491			    table_label, default_label))
5492	    {
5493	      index_type = thiscase->data.case_stmt.nominal_type;
5494
5495              /* Index jumptables from zero for suitable values of
5496                 minval to avoid a subtraction.  */
5497              if (! optimize_size
5498                  && compare_tree_int (minval, 0) > 0
5499                  && compare_tree_int (minval, 3) < 0)
5500                {
5501                  minval = integer_zero_node;
5502                  range = maxval;
5503                }
5504
5505	      if (! try_tablejump (index_type, index_expr, minval, range,
5506				   table_label, default_label))
5507		abort ();
5508	    }
5509
5510	  /* Get table of labels to jump to, in order of case index.  */
5511
5512	  ncases = tree_low_cst (range, 0) + 1;
5513	  labelvec = (rtx *) alloca (ncases * sizeof (rtx));
5514	  memset ((char *) labelvec, 0, ncases * sizeof (rtx));
5515
5516	  for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
5517	    {
5518	      /* Compute the low and high bounds relative to the minimum
5519		 value since that should fit in a HOST_WIDE_INT while the
5520		 actual values may not.  */
5521	      HOST_WIDE_INT i_low
5522		= tree_low_cst (fold (build (MINUS_EXPR, index_type,
5523                                             n->low, minval)), 1);
5524	      HOST_WIDE_INT i_high
5525		= tree_low_cst (fold (build (MINUS_EXPR, index_type,
5526                                             n->high, minval)), 1);
5527	      HOST_WIDE_INT i;
5528
5529	      for (i = i_low; i <= i_high; i ++)
5530		labelvec[i]
5531		  = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
5532	    }
5533
5534	  /* Fill in the gaps with the default.  */
5535	  for (i = 0; i < ncases; i++)
5536	    if (labelvec[i] == 0)
5537	      labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
5538
5539	  /* Output the table */
5540	  emit_label (table_label);
5541
5542	  if (CASE_VECTOR_PC_RELATIVE || flag_pic)
5543	    emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
5544						   gen_rtx_LABEL_REF (Pmode, table_label),
5545						   gen_rtvec_v (ncases, labelvec),
5546						   const0_rtx, const0_rtx));
5547	  else
5548	    emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
5549					      gen_rtvec_v (ncases, labelvec)));
5550
5551	  /* If the case insn drops through the table,
5552	     after the table we must jump to the default-label.
5553	     Otherwise record no drop-through after the table.  */
5554#ifdef CASE_DROPS_THROUGH
5555	  emit_jump (default_label);
5556#else
5557	  emit_barrier ();
5558#endif
5559	}
5560
5561      before_case = NEXT_INSN (before_case);
5562      end = get_last_insn ();
5563      if (squeeze_notes (&before_case, &end))
5564	abort ();
5565      reorder_insns (before_case, end,
5566		     thiscase->data.case_stmt.start);
5567    }
5568  else
5569    end_cleanup_deferral ();
5570
5571  if (thiscase->exit_label)
5572    emit_label (thiscase->exit_label);
5573
5574  free_case_nodes (case_stack->data.case_stmt.case_list);
5575  POPSTACK (case_stack);
5576
5577  free_temp_slots ();
5578}
5579
5580/* Convert the tree NODE into a list linked by the right field, with the left
5581   field zeroed.  RIGHT is used for recursion; it is a list to be placed
5582   rightmost in the resulting list.  */
5583
5584static struct case_node *
5585case_tree2list (node, right)
5586     struct case_node *node, *right;
5587{
5588  struct case_node *left;
5589
5590  if (node->right)
5591    right = case_tree2list (node->right, right);
5592
5593  node->right = right;
5594  if ((left = node->left))
5595    {
5596      node->left = 0;
5597      return case_tree2list (left, node);
5598    }
5599
5600  return node;
5601}
5602
5603/* Generate code to jump to LABEL if OP1 and OP2 are equal.  */
5604
5605static void
5606do_jump_if_equal (op1, op2, label, unsignedp)
5607     rtx op1, op2, label;
5608     int unsignedp;
5609{
5610  if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
5611    {
5612      if (INTVAL (op1) == INTVAL (op2))
5613	emit_jump (label);
5614    }
5615  else
5616    emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
5617			     (GET_MODE (op1) == VOIDmode
5618			     ? GET_MODE (op2) : GET_MODE (op1)),
5619			     unsignedp, label);
5620}
5621
5622/* Not all case values are encountered equally.  This function
5623   uses a heuristic to weight case labels, in cases where that
5624   looks like a reasonable thing to do.
5625
5626   Right now, all we try to guess is text, and we establish the
5627   following weights:
5628
5629	chars above space:	16
5630	digits:			16
5631	default:		12
5632	space, punct:		8
5633	tab:			4
5634	newline:		2
5635	other "\" chars:	1
5636	remaining chars:	0
5637
5638   If we find any cases in the switch that are not either -1 or in the range
5639   of valid ASCII characters, or are control characters other than those
5640   commonly used with "\", don't treat this switch scanning text.
5641
5642   Return 1 if these nodes are suitable for cost estimation, otherwise
5643   return 0.  */
5644
5645static int
5646estimate_case_costs (node)
5647     case_node_ptr node;
5648{
5649  tree min_ascii = integer_minus_one_node;
5650  tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
5651  case_node_ptr n;
5652  int i;
5653
5654  /* If we haven't already made the cost table, make it now.  Note that the
5655     lower bound of the table is -1, not zero.  */
5656
5657  if (! cost_table_initialized)
5658    {
5659      cost_table_initialized = 1;
5660
5661      for (i = 0; i < 128; i++)
5662	{
5663	  if (ISALNUM (i))
5664	    COST_TABLE (i) = 16;
5665	  else if (ISPUNCT (i))
5666	    COST_TABLE (i) = 8;
5667	  else if (ISCNTRL (i))
5668	    COST_TABLE (i) = -1;
5669	}
5670
5671      COST_TABLE (' ') = 8;
5672      COST_TABLE ('\t') = 4;
5673      COST_TABLE ('\0') = 4;
5674      COST_TABLE ('\n') = 2;
5675      COST_TABLE ('\f') = 1;
5676      COST_TABLE ('\v') = 1;
5677      COST_TABLE ('\b') = 1;
5678    }
5679
5680  /* See if all the case expressions look like text.  It is text if the
5681     constant is >= -1 and the highest constant is <= 127.  Do all comparisons
5682     as signed arithmetic since we don't want to ever access cost_table with a
5683     value less than -1.  Also check that none of the constants in a range
5684     are strange control characters.  */
5685
5686  for (n = node; n; n = n->right)
5687    {
5688      if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
5689	return 0;
5690
5691      for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
5692	   i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
5693	if (COST_TABLE (i) < 0)
5694	  return 0;
5695    }
5696
5697  /* All interesting values are within the range of interesting
5698     ASCII characters.  */
5699  return 1;
5700}
5701
5702/* Scan an ordered list of case nodes
5703   combining those with consecutive values or ranges.
5704
5705   Eg. three separate entries 1: 2: 3: become one entry 1..3:  */
5706
5707static void
5708group_case_nodes (head)
5709     case_node_ptr head;
5710{
5711  case_node_ptr node = head;
5712
5713  while (node)
5714    {
5715      rtx lb = next_real_insn (label_rtx (node->code_label));
5716      rtx lb2;
5717      case_node_ptr np = node;
5718
5719      /* Try to group the successors of NODE with NODE.  */
5720      while (((np = np->right) != 0)
5721	     /* Do they jump to the same place?  */
5722	     && ((lb2 = next_real_insn (label_rtx (np->code_label))) == lb
5723		 || (lb != 0 && lb2 != 0
5724		     && simplejump_p (lb)
5725		     && simplejump_p (lb2)
5726		     && rtx_equal_p (SET_SRC (PATTERN (lb)),
5727				     SET_SRC (PATTERN (lb2)))))
5728	     /* Are their ranges consecutive?  */
5729	     && tree_int_cst_equal (np->low,
5730				    fold (build (PLUS_EXPR,
5731						 TREE_TYPE (node->high),
5732						 node->high,
5733						 integer_one_node)))
5734	     /* An overflow is not consecutive.  */
5735	     && tree_int_cst_lt (node->high,
5736				 fold (build (PLUS_EXPR,
5737					      TREE_TYPE (node->high),
5738					      node->high,
5739					      integer_one_node))))
5740	{
5741	  node->high = np->high;
5742	}
5743      /* NP is the first node after NODE which can't be grouped with it.
5744	 Delete the nodes in between, and move on to that node.  */
5745      node->right = np;
5746      node = np;
5747    }
5748}
5749
5750/* Take an ordered list of case nodes
5751   and transform them into a near optimal binary tree,
5752   on the assumption that any target code selection value is as
5753   likely as any other.
5754
5755   The transformation is performed by splitting the ordered
5756   list into two equal sections plus a pivot.  The parts are
5757   then attached to the pivot as left and right branches.  Each
5758   branch is then transformed recursively.  */
5759
5760static void
5761balance_case_nodes (head, parent)
5762     case_node_ptr *head;
5763     case_node_ptr parent;
5764{
5765  case_node_ptr np;
5766
5767  np = *head;
5768  if (np)
5769    {
5770      int cost = 0;
5771      int i = 0;
5772      int ranges = 0;
5773      case_node_ptr *npp;
5774      case_node_ptr left;
5775
5776      /* Count the number of entries on branch.  Also count the ranges.  */
5777
5778      while (np)
5779	{
5780	  if (!tree_int_cst_equal (np->low, np->high))
5781	    {
5782	      ranges++;
5783	      if (use_cost_table)
5784		cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
5785	    }
5786
5787	  if (use_cost_table)
5788	    cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
5789
5790	  i++;
5791	  np = np->right;
5792	}
5793
5794      if (i > 2)
5795	{
5796	  /* Split this list if it is long enough for that to help.  */
5797	  npp = head;
5798	  left = *npp;
5799	  if (use_cost_table)
5800	    {
5801	      /* Find the place in the list that bisects the list's total cost,
5802		 Here I gets half the total cost.  */
5803	      int n_moved = 0;
5804	      i = (cost + 1) / 2;
5805	      while (1)
5806		{
5807		  /* Skip nodes while their cost does not reach that amount.  */
5808		  if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5809		    i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
5810		  i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
5811		  if (i <= 0)
5812		    break;
5813		  npp = &(*npp)->right;
5814		  n_moved += 1;
5815		}
5816	      if (n_moved == 0)
5817		{
5818		  /* Leave this branch lopsided, but optimize left-hand
5819		     side and fill in `parent' fields for right-hand side.  */
5820		  np = *head;
5821		  np->parent = parent;
5822		  balance_case_nodes (&np->left, np);
5823		  for (; np->right; np = np->right)
5824		    np->right->parent = np;
5825		  return;
5826		}
5827	    }
5828	  /* If there are just three nodes, split at the middle one.  */
5829	  else if (i == 3)
5830	    npp = &(*npp)->right;
5831	  else
5832	    {
5833	      /* Find the place in the list that bisects the list's total cost,
5834		 where ranges count as 2.
5835		 Here I gets half the total cost.  */
5836	      i = (i + ranges + 1) / 2;
5837	      while (1)
5838		{
5839		  /* Skip nodes while their cost does not reach that amount.  */
5840		  if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
5841		    i--;
5842		  i--;
5843		  if (i <= 0)
5844		    break;
5845		  npp = &(*npp)->right;
5846		}
5847	    }
5848	  *head = np = *npp;
5849	  *npp = 0;
5850	  np->parent = parent;
5851	  np->left = left;
5852
5853	  /* Optimize each of the two split parts.  */
5854	  balance_case_nodes (&np->left, np);
5855	  balance_case_nodes (&np->right, np);
5856	}
5857      else
5858	{
5859	  /* Else leave this branch as one level,
5860	     but fill in `parent' fields.  */
5861	  np = *head;
5862	  np->parent = parent;
5863	  for (; np->right; np = np->right)
5864	    np->right->parent = np;
5865	}
5866    }
5867}
5868
5869/* Search the parent sections of the case node tree
5870   to see if a test for the lower bound of NODE would be redundant.
5871   INDEX_TYPE is the type of the index expression.
5872
5873   The instructions to generate the case decision tree are
5874   output in the same order as nodes are processed so it is
5875   known that if a parent node checks the range of the current
5876   node minus one that the current node is bounded at its lower
5877   span.  Thus the test would be redundant.  */
5878
5879static int
5880node_has_low_bound (node, index_type)
5881     case_node_ptr node;
5882     tree index_type;
5883{
5884  tree low_minus_one;
5885  case_node_ptr pnode;
5886
5887  /* If the lower bound of this node is the lowest value in the index type,
5888     we need not test it.  */
5889
5890  if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
5891    return 1;
5892
5893  /* If this node has a left branch, the value at the left must be less
5894     than that at this node, so it cannot be bounded at the bottom and
5895     we need not bother testing any further.  */
5896
5897  if (node->left)
5898    return 0;
5899
5900  low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
5901			       node->low, integer_one_node));
5902
5903  /* If the subtraction above overflowed, we can't verify anything.
5904     Otherwise, look for a parent that tests our value - 1.  */
5905
5906  if (! tree_int_cst_lt (low_minus_one, node->low))
5907    return 0;
5908
5909  for (pnode = node->parent; pnode; pnode = pnode->parent)
5910    if (tree_int_cst_equal (low_minus_one, pnode->high))
5911      return 1;
5912
5913  return 0;
5914}
5915
5916/* Search the parent sections of the case node tree
5917   to see if a test for the upper bound of NODE would be redundant.
5918   INDEX_TYPE is the type of the index expression.
5919
5920   The instructions to generate the case decision tree are
5921   output in the same order as nodes are processed so it is
5922   known that if a parent node checks the range of the current
5923   node plus one that the current node is bounded at its upper
5924   span.  Thus the test would be redundant.  */
5925
5926static int
5927node_has_high_bound (node, index_type)
5928     case_node_ptr node;
5929     tree index_type;
5930{
5931  tree high_plus_one;
5932  case_node_ptr pnode;
5933
5934  /* If there is no upper bound, obviously no test is needed.  */
5935
5936  if (TYPE_MAX_VALUE (index_type) == NULL)
5937    return 1;
5938
5939  /* If the upper bound of this node is the highest value in the type
5940     of the index expression, we need not test against it.  */
5941
5942  if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
5943    return 1;
5944
5945  /* If this node has a right branch, the value at the right must be greater
5946     than that at this node, so it cannot be bounded at the top and
5947     we need not bother testing any further.  */
5948
5949  if (node->right)
5950    return 0;
5951
5952  high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
5953			       node->high, integer_one_node));
5954
5955  /* If the addition above overflowed, we can't verify anything.
5956     Otherwise, look for a parent that tests our value + 1.  */
5957
5958  if (! tree_int_cst_lt (node->high, high_plus_one))
5959    return 0;
5960
5961  for (pnode = node->parent; pnode; pnode = pnode->parent)
5962    if (tree_int_cst_equal (high_plus_one, pnode->low))
5963      return 1;
5964
5965  return 0;
5966}
5967
5968/* Search the parent sections of the
5969   case node tree to see if both tests for the upper and lower
5970   bounds of NODE would be redundant.  */
5971
5972static int
5973node_is_bounded (node, index_type)
5974     case_node_ptr node;
5975     tree index_type;
5976{
5977  return (node_has_low_bound (node, index_type)
5978	  && node_has_high_bound (node, index_type));
5979}
5980
5981/*  Emit an unconditional jump to LABEL unless it would be dead code.  */
5982
5983static void
5984emit_jump_if_reachable (label)
5985     rtx label;
5986{
5987  if (GET_CODE (get_last_insn ()) != BARRIER)
5988    emit_jump (label);
5989}
5990
5991/* Emit step-by-step code to select a case for the value of INDEX.
5992   The thus generated decision tree follows the form of the
5993   case-node binary tree NODE, whose nodes represent test conditions.
5994   INDEX_TYPE is the type of the index of the switch.
5995
5996   Care is taken to prune redundant tests from the decision tree
5997   by detecting any boundary conditions already checked by
5998   emitted rtx.  (See node_has_high_bound, node_has_low_bound
5999   and node_is_bounded, above.)
6000
6001   Where the test conditions can be shown to be redundant we emit
6002   an unconditional jump to the target code.  As a further
6003   optimization, the subordinates of a tree node are examined to
6004   check for bounded nodes.  In this case conditional and/or
6005   unconditional jumps as a result of the boundary check for the
6006   current node are arranged to target the subordinates associated
6007   code for out of bound conditions on the current node.
6008
6009   We can assume that when control reaches the code generated here,
6010   the index value has already been compared with the parents
6011   of this node, and determined to be on the same side of each parent
6012   as this node is.  Thus, if this node tests for the value 51,
6013   and a parent tested for 52, we don't need to consider
6014   the possibility of a value greater than 51.  If another parent
6015   tests for the value 50, then this node need not test anything.  */
6016
6017static void
6018emit_case_nodes (index, node, default_label, index_type)
6019     rtx index;
6020     case_node_ptr node;
6021     rtx default_label;
6022     tree index_type;
6023{
6024  /* If INDEX has an unsigned type, we must make unsigned branches.  */
6025  int unsignedp = TREE_UNSIGNED (index_type);
6026  enum machine_mode mode = GET_MODE (index);
6027  enum machine_mode imode = TYPE_MODE (index_type);
6028
6029  /* See if our parents have already tested everything for us.
6030     If they have, emit an unconditional jump for this node.  */
6031  if (node_is_bounded (node, index_type))
6032    emit_jump (label_rtx (node->code_label));
6033
6034  else if (tree_int_cst_equal (node->low, node->high))
6035    {
6036      /* Node is single valued.  First see if the index expression matches
6037	 this node and then check our children, if any.  */
6038
6039      do_jump_if_equal (index,
6040			convert_modes (mode, imode,
6041				       expand_expr (node->low, NULL_RTX,
6042						    VOIDmode, 0),
6043				       unsignedp),
6044			label_rtx (node->code_label), unsignedp);
6045
6046      if (node->right != 0 && node->left != 0)
6047	{
6048	  /* This node has children on both sides.
6049	     Dispatch to one side or the other
6050	     by comparing the index value with this node's value.
6051	     If one subtree is bounded, check that one first,
6052	     so we can avoid real branches in the tree.  */
6053
6054	  if (node_is_bounded (node->right, index_type))
6055	    {
6056	      emit_cmp_and_jump_insns (index,
6057				       convert_modes
6058				       (mode, imode,
6059					expand_expr (node->high, NULL_RTX,
6060						     VOIDmode, 0),
6061					unsignedp),
6062				       GT, NULL_RTX, mode, unsignedp,
6063				       label_rtx (node->right->code_label));
6064	      emit_case_nodes (index, node->left, default_label, index_type);
6065	    }
6066
6067	  else if (node_is_bounded (node->left, index_type))
6068	    {
6069	      emit_cmp_and_jump_insns (index,
6070				       convert_modes
6071				       (mode, imode,
6072					expand_expr (node->high, NULL_RTX,
6073						     VOIDmode, 0),
6074					unsignedp),
6075				       LT, NULL_RTX, mode, unsignedp,
6076				       label_rtx (node->left->code_label));
6077	      emit_case_nodes (index, node->right, default_label, index_type);
6078	    }
6079
6080	  else
6081	    {
6082	      /* Neither node is bounded.  First distinguish the two sides;
6083		 then emit the code for one side at a time.  */
6084
6085	      tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6086
6087	      /* See if the value is on the right.  */
6088	      emit_cmp_and_jump_insns (index,
6089				       convert_modes
6090				       (mode, imode,
6091					expand_expr (node->high, NULL_RTX,
6092						     VOIDmode, 0),
6093					unsignedp),
6094				       GT, NULL_RTX, mode, unsignedp,
6095				       label_rtx (test_label));
6096
6097	      /* Value must be on the left.
6098		 Handle the left-hand subtree.  */
6099	      emit_case_nodes (index, node->left, default_label, index_type);
6100	      /* If left-hand subtree does nothing,
6101		 go to default.  */
6102	      emit_jump_if_reachable (default_label);
6103
6104	      /* Code branches here for the right-hand subtree.  */
6105	      expand_label (test_label);
6106	      emit_case_nodes (index, node->right, default_label, index_type);
6107	    }
6108	}
6109
6110      else if (node->right != 0 && node->left == 0)
6111	{
6112	  /* Here we have a right child but no left so we issue conditional
6113	     branch to default and process the right child.
6114
6115	     Omit the conditional branch to default if we it avoid only one
6116	     right child; it costs too much space to save so little time.  */
6117
6118	  if (node->right->right || node->right->left
6119	      || !tree_int_cst_equal (node->right->low, node->right->high))
6120	    {
6121	      if (!node_has_low_bound (node, index_type))
6122		{
6123		  emit_cmp_and_jump_insns (index,
6124					   convert_modes
6125					   (mode, imode,
6126					    expand_expr (node->high, NULL_RTX,
6127							 VOIDmode, 0),
6128					    unsignedp),
6129					   LT, NULL_RTX, mode, unsignedp,
6130					   default_label);
6131		}
6132
6133	      emit_case_nodes (index, node->right, default_label, index_type);
6134	    }
6135	  else
6136	    /* We cannot process node->right normally
6137	       since we haven't ruled out the numbers less than
6138	       this node's value.  So handle node->right explicitly.  */
6139	    do_jump_if_equal (index,
6140			      convert_modes
6141			      (mode, imode,
6142			       expand_expr (node->right->low, NULL_RTX,
6143					    VOIDmode, 0),
6144			       unsignedp),
6145			      label_rtx (node->right->code_label), unsignedp);
6146	}
6147
6148      else if (node->right == 0 && node->left != 0)
6149	{
6150	  /* Just one subtree, on the left.  */
6151	  if (node->left->left || node->left->right
6152	      || !tree_int_cst_equal (node->left->low, node->left->high))
6153	    {
6154	      if (!node_has_high_bound (node, index_type))
6155		{
6156		  emit_cmp_and_jump_insns (index,
6157					   convert_modes
6158					   (mode, imode,
6159					    expand_expr (node->high, NULL_RTX,
6160							 VOIDmode, 0),
6161					    unsignedp),
6162					   GT, NULL_RTX, mode, unsignedp,
6163					   default_label);
6164		}
6165
6166	      emit_case_nodes (index, node->left, default_label, index_type);
6167	    }
6168	  else
6169	    /* We cannot process node->left normally
6170	       since we haven't ruled out the numbers less than
6171	       this node's value.  So handle node->left explicitly.  */
6172	    do_jump_if_equal (index,
6173			      convert_modes
6174			      (mode, imode,
6175			       expand_expr (node->left->low, NULL_RTX,
6176					    VOIDmode, 0),
6177			       unsignedp),
6178			      label_rtx (node->left->code_label), unsignedp);
6179	}
6180    }
6181  else
6182    {
6183      /* Node is a range.  These cases are very similar to those for a single
6184	 value, except that we do not start by testing whether this node
6185	 is the one to branch to.  */
6186
6187      if (node->right != 0 && node->left != 0)
6188	{
6189	  /* Node has subtrees on both sides.
6190	     If the right-hand subtree is bounded,
6191	     test for it first, since we can go straight there.
6192	     Otherwise, we need to make a branch in the control structure,
6193	     then handle the two subtrees.  */
6194	  tree test_label = 0;
6195
6196	  if (node_is_bounded (node->right, index_type))
6197	    /* Right hand node is fully bounded so we can eliminate any
6198	       testing and branch directly to the target code.  */
6199	    emit_cmp_and_jump_insns (index,
6200				     convert_modes
6201				     (mode, imode,
6202				      expand_expr (node->high, NULL_RTX,
6203						   VOIDmode, 0),
6204				      unsignedp),
6205				     GT, NULL_RTX, mode, unsignedp,
6206				     label_rtx (node->right->code_label));
6207	  else
6208	    {
6209	      /* Right hand node requires testing.
6210		 Branch to a label where we will handle it later.  */
6211
6212	      test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
6213	      emit_cmp_and_jump_insns (index,
6214				       convert_modes
6215				       (mode, imode,
6216					expand_expr (node->high, NULL_RTX,
6217						     VOIDmode, 0),
6218					unsignedp),
6219				       GT, NULL_RTX, mode, unsignedp,
6220				       label_rtx (test_label));
6221	    }
6222
6223	  /* Value belongs to this node or to the left-hand subtree.  */
6224
6225	  emit_cmp_and_jump_insns (index,
6226				   convert_modes
6227				   (mode, imode,
6228				    expand_expr (node->low, NULL_RTX,
6229						 VOIDmode, 0),
6230				    unsignedp),
6231				   GE, NULL_RTX, mode, unsignedp,
6232				   label_rtx (node->code_label));
6233
6234	  /* Handle the left-hand subtree.  */
6235	  emit_case_nodes (index, node->left, default_label, index_type);
6236
6237	  /* If right node had to be handled later, do that now.  */
6238
6239	  if (test_label)
6240	    {
6241	      /* If the left-hand subtree fell through,
6242		 don't let it fall into the right-hand subtree.  */
6243	      emit_jump_if_reachable (default_label);
6244
6245	      expand_label (test_label);
6246	      emit_case_nodes (index, node->right, default_label, index_type);
6247	    }
6248	}
6249
6250      else if (node->right != 0 && node->left == 0)
6251	{
6252	  /* Deal with values to the left of this node,
6253	     if they are possible.  */
6254	  if (!node_has_low_bound (node, index_type))
6255	    {
6256	      emit_cmp_and_jump_insns (index,
6257				       convert_modes
6258				       (mode, imode,
6259					expand_expr (node->low, NULL_RTX,
6260						     VOIDmode, 0),
6261					unsignedp),
6262				       LT, NULL_RTX, mode, unsignedp,
6263				       default_label);
6264	    }
6265
6266	  /* Value belongs to this node or to the right-hand subtree.  */
6267
6268	  emit_cmp_and_jump_insns (index,
6269				   convert_modes
6270				   (mode, imode,
6271				    expand_expr (node->high, NULL_RTX,
6272						 VOIDmode, 0),
6273				    unsignedp),
6274				   LE, NULL_RTX, mode, unsignedp,
6275				   label_rtx (node->code_label));
6276
6277	  emit_case_nodes (index, node->right, default_label, index_type);
6278	}
6279
6280      else if (node->right == 0 && node->left != 0)
6281	{
6282	  /* Deal with values to the right of this node,
6283	     if they are possible.  */
6284	  if (!node_has_high_bound (node, index_type))
6285	    {
6286	      emit_cmp_and_jump_insns (index,
6287				       convert_modes
6288				       (mode, imode,
6289					expand_expr (node->high, NULL_RTX,
6290						     VOIDmode, 0),
6291					unsignedp),
6292				       GT, NULL_RTX, mode, unsignedp,
6293				       default_label);
6294	    }
6295
6296	  /* Value belongs to this node or to the left-hand subtree.  */
6297
6298	  emit_cmp_and_jump_insns (index,
6299				   convert_modes
6300				   (mode, imode,
6301				    expand_expr (node->low, NULL_RTX,
6302						 VOIDmode, 0),
6303				    unsignedp),
6304				   GE, NULL_RTX, mode, unsignedp,
6305				   label_rtx (node->code_label));
6306
6307	  emit_case_nodes (index, node->left, default_label, index_type);
6308	}
6309
6310      else
6311	{
6312	  /* Node has no children so we check low and high bounds to remove
6313	     redundant tests.  Only one of the bounds can exist,
6314	     since otherwise this node is bounded--a case tested already.  */
6315	  int high_bound = node_has_high_bound (node, index_type);
6316	  int low_bound = node_has_low_bound (node, index_type);
6317
6318	  if (!high_bound && low_bound)
6319	    {
6320	      emit_cmp_and_jump_insns (index,
6321				       convert_modes
6322				       (mode, imode,
6323					expand_expr (node->high, NULL_RTX,
6324						     VOIDmode, 0),
6325					unsignedp),
6326				       GT, NULL_RTX, mode, unsignedp,
6327				       default_label);
6328	    }
6329
6330	  else if (!low_bound && high_bound)
6331	    {
6332	      emit_cmp_and_jump_insns (index,
6333				       convert_modes
6334				       (mode, imode,
6335					expand_expr (node->low, NULL_RTX,
6336						     VOIDmode, 0),
6337					unsignedp),
6338				       LT, NULL_RTX, mode, unsignedp,
6339				       default_label);
6340	    }
6341	  else if (!low_bound && !high_bound)
6342	    {
6343	      /* Widen LOW and HIGH to the same width as INDEX.  */
6344	      tree type = type_for_mode (mode, unsignedp);
6345	      tree low = build1 (CONVERT_EXPR, type, node->low);
6346	      tree high = build1 (CONVERT_EXPR, type, node->high);
6347	      rtx low_rtx, new_index, new_bound;
6348
6349	      /* Instead of doing two branches, emit one unsigned branch for
6350		 (index-low) > (high-low).  */
6351	      low_rtx = expand_expr (low, NULL_RTX, mode, 0);
6352	      new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
6353					       NULL_RTX, unsignedp,
6354					       OPTAB_WIDEN);
6355	      new_bound = expand_expr (fold (build (MINUS_EXPR, type,
6356						    high, low)),
6357				       NULL_RTX, mode, 0);
6358
6359	      emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
6360				       mode, 1, default_label);
6361	    }
6362
6363	  emit_jump (label_rtx (node->code_label));
6364	}
6365    }
6366}
6367